diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 11071446bda..b68815e2a9c 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -673,3 +673,9 @@ which you can use to measure the performance impact. It comes with a set of default benchmarks that we also https://elasticsearch-benchmarks.elastic.co/[run every night]. To get started, please see https://esrally.readthedocs.io/en/stable/[Rally's documentation]. + +== Test doc builds + +The Elasticsearch docs are in AsciiDoc format. You can test and build the docs +locally using the Elasticsearch documentation build process. See +https://github.com/elastic/docs. \ No newline at end of file diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/LazyPropertyList.java b/buildSrc/src/main/java/org/elasticsearch/gradle/LazyPropertyList.java index 1d302ded632..9f8bbe1c024 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/LazyPropertyList.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/LazyPropertyList.java @@ -170,13 +170,23 @@ public class LazyPropertyList extends AbstractLazyPropertyCollection implemen } @Override - public List getNormalizedCollection() { + public List> getNormalizedCollection() { return delegate.stream() .peek(this::validate) .filter(entry -> entry.getNormalization() != PropertyNormalization.IGNORE_VALUE) .collect(Collectors.toList()); } + /** + * Return a "flattened" collection. This should be used when the collection type is itself a complex type with properties + * annotated as Gradle inputs rather than a simple type like {@link String}. + * + * @return a flattened collection filtered according to normalization strategy + */ + public List getFlatNormalizedCollection() { + return getNormalizedCollection().stream().map(PropertyListEntry::getValue).collect(Collectors.toList()); + } + private void validate(PropertyListEntry entry) { assertNotNull(entry.getValue(), "entry"); } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 9fe8c41004d..bff6cc3fea7 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -1188,7 +1188,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { @Nested public List getCliSetup() { - return cliSetup.getNormalizedCollection(); + return cliSetup.getFlatNormalizedCollection(); } @Nested diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 3e1746ab994..03aaacf8c76 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -54,6 +54,10 @@ import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.client.analytics.ParsedStringStats; +import org.elasticsearch.client.analytics.ParsedTopMetrics; +import org.elasticsearch.client.analytics.StringStatsAggregationBuilder; +import org.elasticsearch.client.analytics.TopMetricsAggregationBuilder; import org.elasticsearch.client.core.CountRequest; import org.elasticsearch.client.core.CountResponse; import org.elasticsearch.client.core.GetSourceRequest; @@ -1926,6 +1930,8 @@ public class RestHighLevelClient implements Closeable { map.put(IpRangeAggregationBuilder.NAME, (p, c) -> ParsedBinaryRange.fromXContent(p, (String) c)); map.put(TopHitsAggregationBuilder.NAME, (p, c) -> ParsedTopHits.fromXContent(p, (String) c)); map.put(CompositeAggregationBuilder.NAME, (p, c) -> ParsedComposite.fromXContent(p, (String) c)); + map.put(StringStatsAggregationBuilder.NAME, (p, c) -> ParsedStringStats.PARSER.parse(p, (String) c)); + map.put(TopMetricsAggregationBuilder.NAME, (p, c) -> ParsedTopMetrics.PARSER.parse(p, (String) c)); List entries = map.entrySet().stream() .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) .collect(Collectors.toList()); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedStringStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedStringStats.java new file mode 100644 index 00000000000..6c11707accd --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedStringStats.java @@ -0,0 +1,172 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.analytics; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.ParsedAggregation; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Results from the {@code string_stats} aggregation. + */ +public class ParsedStringStats extends ParsedAggregation { + private static final ParseField COUNT_FIELD = new ParseField("count"); + private static final ParseField MIN_LENGTH_FIELD = new ParseField("min_length"); + private static final ParseField MAX_LENGTH_FIELD = new ParseField("max_length"); + private static final ParseField AVG_LENGTH_FIELD = new ParseField("avg_length"); + private static final ParseField ENTROPY_FIELD = new ParseField("entropy"); + private static final ParseField DISTRIBUTION_FIELD = new ParseField("distribution"); + + private final long count; + private final int minLength; + private final int maxLength; + private final double avgLength; + private final double entropy; + private final boolean showDistribution; + private final Map distribution; + + private ParsedStringStats(String name, long count, int minLength, int maxLength, double avgLength, double entropy, + boolean showDistribution, Map distribution) { + setName(name); + this.count = count; + this.minLength = minLength; + this.maxLength = maxLength; + this.avgLength = avgLength; + this.entropy = entropy; + this.showDistribution = showDistribution; + this.distribution = distribution; + } + + /** + * The number of non-empty fields counted. + */ + public long getCount() { + return count; + } + + /** + * The length of the shortest term. + */ + public int getMinLength() { + return minLength; + } + + /** + * The length of the longest term. + */ + public int getMaxLength() { + return maxLength; + } + + /** + * The average length computed over all terms. + */ + public double getAvgLength() { + return avgLength; + } + + /** + * The Shannon Entropy + * value computed over all terms collected by the aggregation. + * Shannon entropy quantifies the amount of information contained in + * the field. It is a very useful metric for measuring a wide range of + * properties of a data set, such as diversity, similarity, + * randomness etc. + */ + public double getEntropy() { + return entropy; + } + + /** + * The probability distribution for all characters. {@code null} unless + * explicitly requested with {@link StringStatsAggregationBuilder#showDistribution(boolean)}. + */ + public Map getDistribution() { + return distribution; + } + + @Override + public String getType() { + return StringStatsAggregationBuilder.NAME; + } + + private static final Object NULL_DISTRIBUTION_MARKER = new Object(); + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + StringStatsAggregationBuilder.NAME, true, (args, name) -> { + long count = (long) args[0]; + boolean disributionWasExplicitNull = args[5] == NULL_DISTRIBUTION_MARKER; + if (count == 0) { + return new ParsedStringStats(name, count, 0, 0, 0, 0, disributionWasExplicitNull, null); + } + int minLength = (int) args[1]; + int maxLength = (int) args[2]; + double averageLength = (double) args[3]; + double entropy = (double) args[4]; + if (disributionWasExplicitNull) { + return new ParsedStringStats(name, count, minLength, maxLength, averageLength, entropy, + disributionWasExplicitNull, null); + } else { + @SuppressWarnings("unchecked") + Map distribution = (Map) args[5]; + return new ParsedStringStats(name, count, minLength, maxLength, averageLength, entropy, + distribution != null, distribution); + } + }); + static { + PARSER.declareLong(constructorArg(), COUNT_FIELD); + PARSER.declareIntOrNull(constructorArg(), 0, MIN_LENGTH_FIELD); + PARSER.declareIntOrNull(constructorArg(), 0, MAX_LENGTH_FIELD); + PARSER.declareDoubleOrNull(constructorArg(), 0, AVG_LENGTH_FIELD); + PARSER.declareDoubleOrNull(constructorArg(), 0, ENTROPY_FIELD); + PARSER.declareObjectOrNull(optionalConstructorArg(), (p, c) -> unmodifiableMap(p.map(HashMap::new, XContentParser::doubleValue)), + NULL_DISTRIBUTION_MARKER, DISTRIBUTION_FIELD); + ParsedAggregation.declareAggregationFields(PARSER); + } + + @Override + protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + builder.field(COUNT_FIELD.getPreferredName(), count); + if (count == 0) { + builder.nullField(MIN_LENGTH_FIELD.getPreferredName()); + builder.nullField(MAX_LENGTH_FIELD.getPreferredName()); + builder.nullField(AVG_LENGTH_FIELD.getPreferredName()); + builder.field(ENTROPY_FIELD.getPreferredName(), 0.0); + } else { + builder.field(MIN_LENGTH_FIELD.getPreferredName(), minLength); + builder.field(MAX_LENGTH_FIELD.getPreferredName(), maxLength); + builder.field(AVG_LENGTH_FIELD.getPreferredName(), avgLength); + builder.field(ENTROPY_FIELD.getPreferredName(), entropy); + } + if (showDistribution) { + builder.field(DISTRIBUTION_FIELD.getPreferredName(), distribution); + } + return builder; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedTopMetrics.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedTopMetrics.java new file mode 100644 index 00000000000..2509dcb7674 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedTopMetrics.java @@ -0,0 +1,134 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.analytics; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.search.aggregations.ParsedAggregation; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +/** + * Results of the {@code top_metrics} aggregation. + */ +public class ParsedTopMetrics extends ParsedAggregation { + private static final ParseField TOP_FIELD = new ParseField("top"); + + private final List topMetrics; + + private ParsedTopMetrics(String name, List topMetrics) { + setName(name); + this.topMetrics = topMetrics; + } + + /** + * The list of top metrics, in sorted order. + */ + public List getTopMetrics() { + return topMetrics; + } + + @Override + public String getType() { + return TopMetricsAggregationBuilder.NAME; + } + + @Override + protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + builder.startArray(TOP_FIELD.getPreferredName()); + for (TopMetrics top : topMetrics) { + top.toXContent(builder, params); + } + return builder.endArray(); + } + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + TopMetricsAggregationBuilder.NAME, true, (args, name) -> { + @SuppressWarnings("unchecked") + List topMetrics = (List) args[0]; + return new ParsedTopMetrics(name, topMetrics); + }); + static { + PARSER.declareObjectArray(constructorArg(), (p, c) -> TopMetrics.PARSER.parse(p, null), TOP_FIELD); + ParsedAggregation.declareAggregationFields(PARSER); + } + + /** + * The metrics belonging to the document with the "top" sort key. + */ + public static class TopMetrics implements ToXContent { + private static final ParseField SORT_FIELD = new ParseField("sort"); + private static final ParseField METRICS_FIELD = new ParseField("metrics"); + + private final List sort; + private final Map metrics; + + private TopMetrics(List sort, Map metrics) { + this.sort = sort; + this.metrics = metrics; + } + + /** + * The sort key for these top metrics. + */ + public List getSort() { + return sort; + } + + /** + * The top metric values returned by the aggregation. + */ + public Map getMetrics() { + return metrics; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("top", true, + (args, name) -> { + @SuppressWarnings("unchecked") + List sort = (List) args[0]; + @SuppressWarnings("unchecked") + Map metrics = (Map) args[1]; + return new TopMetrics(sort, metrics); + }); + static { + PARSER.declareFieldArray(constructorArg(), (p, c) -> XContentParserUtils.parseFieldsValue(p), + SORT_FIELD, ObjectParser.ValueType.VALUE_ARRAY); + PARSER.declareObject(constructorArg(), (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), METRICS_FIELD); + } + + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(SORT_FIELD.getPreferredName(), sort); + builder.field(METRICS_FIELD.getPreferredName(), metrics); + builder.endObject(); + return builder; + }; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/StringStatsAggregationBuilder.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/StringStatsAggregationBuilder.java new file mode 100644 index 00000000000..cc39bbe8805 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/StringStatsAggregationBuilder.java @@ -0,0 +1,116 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.analytics; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.builder.SearchSourceBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +/** + * Builds the {@code string_stats} aggregation request. + *

+ * NOTE: This extends {@linkplain AbstractAggregationBuilder} for compatibility + * with {@link SearchSourceBuilder#aggregation(AggregationBuilder)} but it + * doesn't support any "server" side things like + * {@linkplain Writeable#writeTo(StreamOutput)}, + * {@linkplain AggregationBuilder#rewrite(QueryRewriteContext)}, or + * {@linkplain AbstractAggregationBuilder#build(QueryShardContext, AggregatorFactory)}. + */ +public class StringStatsAggregationBuilder extends ValuesSourceAggregationBuilder { + public static final String NAME = "string_stats"; + private static final ParseField SHOW_DISTRIBUTION_FIELD = new ParseField("show_distribution"); + + private boolean showDistribution = false; + + public StringStatsAggregationBuilder(String name) { + super(name, CoreValuesSourceType.BYTES, ValueType.STRING); + } + + /** + * Compute the distribution of each character. Disabled by default. + * @return this for chaining + */ + public StringStatsAggregationBuilder showDistribution(boolean showDistribution) { + this.showDistribution = showDistribution; + return this; + } + + @Override + public String getType() { + return NAME; + } + + @Override + public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + return builder.field(StringStatsAggregationBuilder.SHOW_DISTRIBUTION_FIELD.getPreferredName(), showDistribution); + } + + @Override + protected void innerWriteTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + protected ValuesSourceAggregatorFactory innerBuild(QueryShardContext queryShardContext, ValuesSourceConfig config, + AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map metaData) { + throw new UnsupportedOperationException(); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), showDistribution); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + if (false == super.equals(obj)) { + return false; + } + StringStatsAggregationBuilder other = (StringStatsAggregationBuilder) obj; + return showDistribution == other.showDistribution; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/TopMetricsAggregationBuilder.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/TopMetricsAggregationBuilder.java new file mode 100644 index 00000000000..dbca07901d0 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/TopMetricsAggregationBuilder.java @@ -0,0 +1,97 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.analytics; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.SortBuilder; + +import java.io.IOException; +import java.util.Map; + +/** + * Builds the Top Metrics aggregation request. + *

+ * NOTE: This extends {@linkplain AbstractAggregationBuilder} for compatibility + * with {@link SearchSourceBuilder#aggregation(AggregationBuilder)} but it + * doesn't support any "server" side things like + * {@linkplain Writeable#writeTo(StreamOutput)}, + * {@linkplain AggregationBuilder#rewrite(QueryRewriteContext)}, or + * {@linkplain AbstractAggregationBuilder#build(QueryShardContext, AggregatorFactory)}. + */ +public class TopMetricsAggregationBuilder extends AbstractAggregationBuilder { + public static final String NAME = "top_metrics"; + + private final SortBuilder sort; + private final String metric; + + /** + * Build the request. + * @param name the name of the metric + * @param sort the sort key used to select the top metrics + * @param metric the name of the field to select + */ + public TopMetricsAggregationBuilder(String name, SortBuilder sort, String metric) { + super(name); + this.sort = sort; + this.metric = metric; + } + + @Override + public String getType() { + return NAME; + } + + @Override + protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.startArray("sort"); + sort.toXContent(builder, params); + builder.endArray(); + builder.startObject("metric").field("field", metric).endObject(); + } + return builder.endObject(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + protected AggregatorFactory doBuild(QueryShardContext queryShardContext, AggregatorFactory parent, Builder subfactoriesBuilder) + throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map metaData) { + throw new UnsupportedOperationException(); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index da0eb7eebe1..ea4a4790c26 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.client; import com.fasterxml.jackson.core.JsonParseException; + import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; @@ -675,6 +676,11 @@ public class RestHighLevelClientTests extends ESTestCase { List namedXContents = RestHighLevelClient.getDefaultNamedXContents(); int expectedInternalAggregations = InternalAggregationTestCase.getDefaultNamedXContents().size(); int expectedSuggestions = 3; + + // Explicitly check for metrics from the analytics module because they aren't in InternalAggregationTestCase + assertTrue(namedXContents.removeIf(e -> e.name.getPreferredName().equals("string_stats"))); + assertTrue(namedXContents.removeIf(e -> e.name.getPreferredName().equals("top_metrics"))); + assertEquals(expectedInternalAggregations + expectedSuggestions, namedXContents.size()); Map, Integer> categories = new HashMap<>(); for (NamedXContentRegistry.Entry namedXContent : namedXContents) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/analytics/AnalyticsAggsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/analytics/AnalyticsAggsIT.java new file mode 100644 index 00000000000..d16801e6e3b --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/analytics/AnalyticsAggsIT.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.analytics; + +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; +import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.SortOrder; + +import java.io.IOException; + +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasSize; + +public class AnalyticsAggsIT extends ESRestHighLevelClientTestCase { + public void testStringStats() throws IOException { + BulkRequest bulk = new BulkRequest("test").setRefreshPolicy(RefreshPolicy.IMMEDIATE); + bulk.add(new IndexRequest().source(XContentType.JSON, "message", "trying out elasticsearch")); + bulk.add(new IndexRequest().source(XContentType.JSON, "message", "more words")); + highLevelClient().bulk(bulk, RequestOptions.DEFAULT); + SearchRequest search = new SearchRequest("test"); + search.source().aggregation(new StringStatsAggregationBuilder("test").field("message.keyword").showDistribution(true)); + SearchResponse response = highLevelClient().search(search, RequestOptions.DEFAULT); + ParsedStringStats stats = response.getAggregations().get("test"); + assertThat(stats.getCount(), equalTo(2L)); + assertThat(stats.getMinLength(), equalTo(10)); + assertThat(stats.getMaxLength(), equalTo(24)); + assertThat(stats.getAvgLength(), equalTo(17.0)); + assertThat(stats.getEntropy(), closeTo(4, .1)); + assertThat(stats.getDistribution(), aMapWithSize(18)); + assertThat(stats.getDistribution(), hasEntry(equalTo("o"), closeTo(.09, .005))); + assertThat(stats.getDistribution(), hasEntry(equalTo("r"), closeTo(.12, .005))); + assertThat(stats.getDistribution(), hasEntry(equalTo("t"), closeTo(.09, .005))); + } + + public void testBasic() throws IOException { + BulkRequest bulk = new BulkRequest("test").setRefreshPolicy(RefreshPolicy.IMMEDIATE); + bulk.add(new IndexRequest().source(XContentType.JSON, "s", 1, "v", 2)); + bulk.add(new IndexRequest().source(XContentType.JSON, "s", 2, "v", 3)); + highLevelClient().bulk(bulk, RequestOptions.DEFAULT); + SearchRequest search = new SearchRequest("test"); + search.source().aggregation(new TopMetricsAggregationBuilder( + "test", new FieldSortBuilder("s").order(SortOrder.DESC), "v")); + SearchResponse response = highLevelClient().search(search, RequestOptions.DEFAULT); + ParsedTopMetrics top = response.getAggregations().get("test"); + assertThat(top.getTopMetrics(), hasSize(1)); + ParsedTopMetrics.TopMetrics metric = top.getTopMetrics().get(0); + assertThat(metric.getSort(), equalTo(singletonList(2))); + assertThat(metric.getMetrics(), equalTo(singletonMap("v", 3.0))); + } +} diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index ef576f32dc5..9e04010a20b 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -67,6 +67,9 @@ ENV PATH /usr/share/elasticsearch/bin:\$PATH COPY --chown=1000:0 bin/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh +# Ensure that there are no files with setuid or setgid, in order to mitigate "stackclash" attacks. +RUN find / -xdev -perm -4000 -exec chmod ug-s {} + + # Openshift overrides USER and uses ones with randomly uid>1024 and gid=0 # Allow ENTRYPOINT (and ES) to run even with a different user RUN chgrp 0 /usr/local/bin/docker-entrypoint.sh && \ diff --git a/docs/java-rest/high-level/aggs-builders.asciidoc b/docs/java-rest/high-level/aggs-builders.asciidoc index 3b15243f5b2..4ac24b7f00d 100644 --- a/docs/java-rest/high-level/aggs-builders.asciidoc +++ b/docs/java-rest/high-level/aggs-builders.asciidoc @@ -25,7 +25,9 @@ This page lists all the available aggregations with their corresponding `Aggrega | {ref}/search-aggregations-metrics-stats-aggregation.html[Stats] | {agg-ref}/metrics/stats/StatsAggregationBuilder.html[StatsAggregationBuilder] | {agg-ref}/AggregationBuilders.html#stats-java.lang.String-[AggregationBuilders.stats()] | {ref}/search-aggregations-metrics-sum-aggregation.html[Sum] | {agg-ref}/metrics/sum/SumAggregationBuilder.html[SumAggregationBuilder] | {agg-ref}/AggregationBuilders.html#sum-java.lang.String-[AggregationBuilders.sum()] | {ref}/search-aggregations-metrics-top-hits-aggregation.html[Top hits] | {agg-ref}/metrics/tophits/TopHitsAggregationBuilder.html[TopHitsAggregationBuilder] | {agg-ref}/AggregationBuilders.html#topHits-java.lang.String-[AggregationBuilders.topHits()] +| {ref}/search-aggregations-metrics-top-metrics.html[Top Metrics] | {javadoc-client}/analytics/TopMetricsAggregationBuilder.html[TopMetricsAggregationBuilder] | None | {ref}/search-aggregations-metrics-valuecount-aggregation.html[Value Count] | {agg-ref}/metrics/valuecount/ValueCountAggregationBuilder.html[ValueCountAggregationBuilder] | {agg-ref}/AggregationBuilders.html#count-java.lang.String-[AggregationBuilders.count()] +| {ref}/search-aggregations-metrics-string-stats-aggregation.html[String Stats] | {javadoc-client}/analytics/StringStatsAggregationBuilder.html[StringStatsAggregationBuilder] | None |====== ==== Bucket Aggregations diff --git a/docs/reference/aggregations/metrics.asciidoc b/docs/reference/aggregations/metrics.asciidoc index 5bcc96d9ae8..6c518a2f6cd 100644 --- a/docs/reference/aggregations/metrics.asciidoc +++ b/docs/reference/aggregations/metrics.asciidoc @@ -41,10 +41,13 @@ include::metrics/sum-aggregation.asciidoc[] include::metrics/tophits-aggregation.asciidoc[] +include::metrics/top-metrics-aggregation.asciidoc[] + include::metrics/valuecount-aggregation.asciidoc[] include::metrics/median-absolute-deviation-aggregation.asciidoc[] +include::metrics/boxplot-aggregation.asciidoc[] diff --git a/docs/reference/aggregations/metrics/boxplot-aggregation.asciidoc b/docs/reference/aggregations/metrics/boxplot-aggregation.asciidoc index 74c20e805fb..51317ab125e 100644 --- a/docs/reference/aggregations/metrics/boxplot-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/boxplot-aggregation.asciidoc @@ -4,7 +4,8 @@ === Boxplot Aggregation A `boxplot` metrics aggregation that computes boxplot of numeric values extracted from the aggregated documents. -These values can be extracted either from specific numeric fields in the documents, or be generated by a provided script. +These values can be generated by a provided script or extracted from specific numeric or +<> in the documents. The `boxplot` aggregation returns essential information for making a https://en.wikipedia.org/wiki/Box_plot[box plot]: minimum, maximum median, first quartile (25th percentile) and third quartile (75th percentile) values. diff --git a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc index 7e9869a0039..10439c25a26 100644 --- a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc @@ -285,7 +285,7 @@ GET latency/_search <1> Compression controls memory usage and approximation error -// tag::[t-digest] +// tag::t-digest[] The TDigest algorithm uses a number of "nodes" to approximate percentiles -- the more nodes available, the higher the accuracy (and large memory footprint) proportional to the volume of data. The `compression` parameter limits the maximum number of @@ -301,7 +301,7 @@ A "node" uses roughly 32 bytes of memory, so under worst-case scenarios (large a of data which arrives sorted and in-order) the default settings will produce a TDigest roughly 64KB in size. In practice data tends to be more random and the TDigest will use less memory. -// tag::[t-digest] +// end::t-digest[] ==== HDR Histogram diff --git a/docs/reference/aggregations/metrics/top-metrics-aggregation.asciidoc b/docs/reference/aggregations/metrics/top-metrics-aggregation.asciidoc new file mode 100644 index 00000000000..c86bfac59a1 --- /dev/null +++ b/docs/reference/aggregations/metrics/top-metrics-aggregation.asciidoc @@ -0,0 +1,284 @@ +[role="xpack"] +[testenv="basic"] +[[search-aggregations-metrics-top-metrics]] +=== Top Metrics Aggregation + +experimental[We expect to change the response format of this aggregation as we add more features., https://github.com/elastic/elasticsearch/issues/51813] + +The `top_metrics` aggregation selects metrics from the document with the largest or smallest "sort" +value. For example, This gets the value of the `v` field on the document with the largest value of `s`: + +[source,console,id=search-aggregations-metrics-top-metrics-simple] +---- +POST /test/_bulk?refresh +{"index": {}} +{"s": 1, "v": 3.1415} +{"index": {}} +{"s": 2, "v": 1} +{"index": {}} +{"s": 3, "v": 2.71828} +POST /test/_search?filter_path=aggregations +{ + "aggs": { + "tm": { + "top_metrics": { + "metric": {"field": "v"}, + "sort": {"s": "desc"} + } + } + } +} +---- + +Which returns: + +[source,js] +---- +{ + "aggregations": { + "tm": { + "top": [ {"sort": [3], "metrics": {"v": 2.718280076980591 } } ] + } + } +} +---- +// TESTRESPONSE + +`top_metrics` is fairly similar to <> +in spirit but because it is more limited it is able to do its job using less memory and is often +faster. + +==== `sort` + +The `sort` field in the metric request functions exactly the same as the `sort` field in the +<> request except: +* It can't be used on <>, <, <>, +<>, or <> fields. +* It only supports a single sort value. + +The metrics that the aggregation returns is the first hit that would be returned by the search +request. So, + +`"sort": {"s": "desc"}`:: gets metrics from the document with the highest `s` +`"sort": {"s": "asc"}`:: gets the metrics from the document with the lowest `s` +`"sort": {"_geo_distance": {"location": "35.7796, -78.6382"}}`:: + gets metrics from the documents with `location` *closest* to `35.7796, -78.6382` +`"sort": "_score"`:: gets metrics from the document with the highest score + +NOTE: This aggregation doesn't support any sort of "tie breaking". If two documents have +the same sort values then this aggregation could return either document's fields. + +==== `metric` + +At this point `metric` supports only `{"field": "field_name"}` and all metrics +are returned as double precision floating point numbers. Expect more to +come here. + +==== Examples + +===== Use with terms + +This aggregation should be quite useful inside of <> +aggregation, to, say, find the last value reported by each server. + +[source,console,id=search-aggregations-metrics-top-metrics-terms] +---- +PUT /node +{ + "mappings": { + "properties": { + "ip": {"type": "ip"}, + "date": {"type": "date"} + } + } +} +POST /node/_bulk?refresh +{"index": {}} +{"ip": "192.168.0.1", "date": "2020-01-01T01:01:01", "v": 1} +{"index": {}} +{"ip": "192.168.0.1", "date": "2020-01-01T02:01:01", "v": 2} +{"index": {}} +{"ip": "192.168.0.2", "date": "2020-01-01T02:01:01", "v": 3} +POST /node/_search?filter_path=aggregations +{ + "aggs": { + "ip": { + "terms": { + "field": "ip" + }, + "aggs": { + "tm": { + "top_metrics": { + "metric": {"field": "v"}, + "sort": {"date": "desc"} + } + } + } + } + } +} +---- + +Which returns: + +[source,js] +---- +{ + "aggregations": { + "ip": { + "buckets": [ + { + "key": "192.168.0.1", + "doc_count": 2, + "tm": { + "top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 2.0 } } ] + } + }, + { + "key": "192.168.0.2", + "doc_count": 1, + "tm": { + "top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 3.0 } } ] + } + } + ], + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 0 + } + } +} +---- +// TESTRESPONSE + +Unlike `top_hits`, you can sort buckets by the results of this metric: + +[source,console] +---- +POST /node/_search?filter_path=aggregations +{ + "aggs": { + "ip": { + "terms": { + "field": "ip", + "order": {"tm.v": "desc"} + }, + "aggs": { + "tm": { + "top_metrics": { + "metric": {"field": "v"}, + "sort": {"date": "desc"} + } + } + } + } + } +} +---- +// TEST[continued] + +Which returns: + +[source,js] +---- +{ + "aggregations": { + "ip": { + "buckets": [ + { + "key": "192.168.0.2", + "doc_count": 1, + "tm": { + "top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 3.0 } } ] + } + }, + { + "key": "192.168.0.1", + "doc_count": 2, + "tm": { + "top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 2.0 } } ] + } + } + ], + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 0 + } + } +} +---- +// TESTRESPONSE + +===== Mixed sort types + +Sorting `top_metrics` by a field that has different types across different +indices producs somewhat suprising results: floating point fields are +always sorted independantly of whole numbered fields. + +[source,console,id=search-aggregations-metrics-top-metrics-mixed-sort] +---- +POST /test/_bulk?refresh +{"index": {"_index": "test1"}} +{"s": 1, "v": 3.1415} +{"index": {"_index": "test1"}} +{"s": 2, "v": 1} +{"index": {"_index": "test2"}} +{"s": 3.1, "v": 2.71828} +POST /test*/_search?filter_path=aggregations +{ + "aggs": { + "tm": { + "top_metrics": { + "metric": {"field": "v"}, + "sort": {"s": "asc"} + } + } + } +} +---- + +Which returns: + +[source,js] +---- +{ + "aggregations": { + "tm": { + "top": [ {"sort": [3.0999999046325684], "metrics": {"v": 2.718280076980591 } } ] + } + } +} +---- +// TESTRESPONSE + +While this is better than an error it *probably* isn't what you were going for. +While it does lose some precision, you can explictly cast the whole number +fields to floating points with something like: + +[source,console] +---- +POST /test*/_search?filter_path=aggregations +{ + "aggs": { + "tm": { + "top_metrics": { + "metric": {"field": "v"}, + "sort": {"s": {"order": "asc", "numeric_type": "double"}} + } + } + } +} +---- +// TEST[continued] + +Which returns the much more expected: + +[source,js] +---- +{ + "aggregations": { + "tm": { + "top": [ {"sort": [1.0], "metrics": {"v": 3.1414999961853027 } } ] + } + } +} +---- +// TESTRESPONSE diff --git a/docs/reference/analysis/configure-text-analysis.asciidoc b/docs/reference/analysis/configure-text-analysis.asciidoc index 2c91725c347..ddafc257e94 100644 --- a/docs/reference/analysis/configure-text-analysis.asciidoc +++ b/docs/reference/analysis/configure-text-analysis.asciidoc @@ -20,7 +20,7 @@ the process. * <> * <> * <> -* > +* <> include::testing.asciidoc[] diff --git a/docs/reference/eql/index.asciidoc b/docs/reference/eql/index.asciidoc index 8de75449607..328f40a36a5 100644 --- a/docs/reference/eql/index.asciidoc +++ b/docs/reference/eql/index.asciidoc @@ -30,7 +30,11 @@ Consider using EQL if you: === In this section * <> +* <> * <> +* <> include::requirements.asciidoc[] +include::search.asciidoc[] include::syntax.asciidoc[] +include::limitations.asciidoc[] diff --git a/docs/reference/eql/limitations.asciidoc b/docs/reference/eql/limitations.asciidoc new file mode 100644 index 00000000000..5e6cc74c319 --- /dev/null +++ b/docs/reference/eql/limitations.asciidoc @@ -0,0 +1,29 @@ +[role="xpack"] +[testenv="basic"] +[[eql-limitations]] +== EQL limitations +++++ +Limitations +++++ + +experimental::[] + +[discrete] +[[eql-unsupported-syntax]] +=== Unsupported syntax + +{es} supports a subset of {eql-ref}/index.html[EQL syntax]. {es} cannot run EQL +queries that contain: + +* {eql-ref}/functions.html[Functions] + +* {eql-ref}/joins.html[Joins] + +* {eql-ref}/basic-syntax.html#event-relationships[Lineage-related keywords]: +** `child of` +** `descendant of` +** `event of` + +* {eql-ref}/pipes.html[Pipes] + +* {eql-ref}/sequences.html[Sequences] \ No newline at end of file diff --git a/docs/reference/eql/requirements.asciidoc b/docs/reference/eql/requirements.asciidoc index 1791b547d50..233a29d661f 100644 --- a/docs/reference/eql/requirements.asciidoc +++ b/docs/reference/eql/requirements.asciidoc @@ -6,6 +6,8 @@ Requirements ++++ +experimental::[] + EQL is schemaless and works out-of-the-box with most common log formats. If you use a standard log format and already know what fields in your index contain event type and timestamp information, you can skip this page. diff --git a/docs/reference/eql/search.asciidoc b/docs/reference/eql/search.asciidoc new file mode 100644 index 00000000000..10dc96a5b9e --- /dev/null +++ b/docs/reference/eql/search.asciidoc @@ -0,0 +1,46 @@ +[role="xpack"] +[testenv="basic"] +[[eql-search]] +== Run an EQL search + +experimental::[] + +To start using EQL in {es}, first ensure your event data meets +<>. Then ingest or add the data to an {es} +index. + +The following <> request adds some example log data to the +`sec_logs` index. This log data follows the {ecs-ref}[Elastic Common Schema +(ECS)]. + +[source,console] +---- +PUT sec_logs/_bulk?refresh +{"index":{"_index" : "sec_logs"}} +{ "@timestamp": "2020-12-07T11:06:07.000Z", "agent": { "id": "8a4f500d" }, "event": { "category": "process" }, "process": { "name": "cmd.exe", "path": "C:\\Windows\\System32\\cmd.exe" } } +{"index":{"_index" : "sec_logs"}} +{ "@timestamp": "2020-12-07T11:07:08.000Z", "agent": { "id": "8a4f500d" }, "event": { "category": "image_load" }, "file": { "name": "cmd.exe", "path": "C:\\Windows\\System32\\cmd.exe" }, "process": { "name": "cmd.exe", "path": "C:\\Windows\\System32\\cmd.exe" } } +{"index":{"_index" : "sec_logs"}} +{ "@timestamp": "2020-12-07T11:07:09.000Z", "agent": { "id": "8a4f500d" }, "event": { "category": "process" }, "process": { "name": "regsvr32.exe", "path": "C:\\Windows\\System32\\regsvr32.exe" } } +---- + +You can now use the EQL search API to search this index using an EQL query. + +The following request searches the `sec_logs` index using the EQL query +specified in the `rule` parameter. The EQL query matches events with an +`event.category` of `process` that have a `process.name` of `cmd.exe`. + +[source,console] +---- +GET sec_logs/_eql/search +{ + "rule": """ + process where process.name == "cmd.exe" + """ +} +---- +// TEST[continued] + +Because the `sec_log` index follows the ECS, you don't need to specify the +event type or timestamp fields. The request uses the `event.category` and +`@timestamp` fields by default. diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 1172f9cf4d1..6899fb2581c 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -135,7 +135,6 @@ for the alias's indexing operations. See <> for an example. `search_routing`:: -`index_routing`:: (Optional, string) Custom <> used for the alias's search operations. diff --git a/docs/reference/mapping/types/date_nanos.asciidoc b/docs/reference/mapping/types/date_nanos.asciidoc index cf7082571ec..4267d029368 100644 --- a/docs/reference/mapping/types/date_nanos.asciidoc +++ b/docs/reference/mapping/types/date_nanos.asciidoc @@ -63,7 +63,7 @@ GET my_index/_search "my_field" : { "script" : { "lang" : "painless", - "source" : "doc['date'].date.nanos" <6> + "source" : "doc['date'].value.nano" <6> } } } diff --git a/docs/reference/mapping/types/geo-shape.asciidoc b/docs/reference/mapping/types/geo-shape.asciidoc index 5ff464da9b9..d2aaa146512 100644 --- a/docs/reference/mapping/types/geo-shape.asciidoc +++ b/docs/reference/mapping/types/geo-shape.asciidoc @@ -252,6 +252,10 @@ between index size and a reasonable level of precision of 50m at the equator. This allows for indexing tens of millions of shapes without overly bloating the resulting index too much relative to the input size. +[NOTE] +Geo-shape queries on geo-shapes implemented with PrefixTrees will not be executed if +<> is set to false. + [[input-structure]] [float] ==== Input Structure diff --git a/docs/reference/mapping/types/histogram.asciidoc b/docs/reference/mapping/types/histogram.asciidoc index fe4209c52b7..440530b1102 100644 --- a/docs/reference/mapping/types/histogram.asciidoc +++ b/docs/reference/mapping/types/histogram.asciidoc @@ -37,6 +37,7 @@ following aggregations and queries: * <> aggregation * <> aggregation +* <> aggregation * <> query [[mapping-types-histogram-building-histogram]] diff --git a/docs/reference/ml/ml-shared.asciidoc b/docs/reference/ml/ml-shared.asciidoc index 893bf0c9e48..0ec04e8087e 100644 --- a/docs/reference/ml/ml-shared.asciidoc +++ b/docs/reference/ml/ml-shared.asciidoc @@ -642,23 +642,9 @@ to `false`. When `true`, only a single model must match the ID patterns provided, otherwise a bad request is returned. end::include-model-definition[] -tag::tags[] -A comma delimited string of tags. A {infer} model can have many tags, or none. -When supplied, only {infer} models that contain all the supplied tags are -returned. -end::tags[] - tag::indices[] An array of index names. Wildcards are supported. For example: `["it_ops_metrics", "server*"]`. - -tag::num-top-feature-importance-values[] -Advanced configuration option. If set, feature importance for the top -most important features will be computed. Importance is calculated -using the SHAP (SHapley Additive exPlanations) method as described in -https://papers.nips.cc/paper/7062-a-unified-approach-to-interpreting-model-predictions.pdf[Lundberg, S. M., & Lee, S.-I. A Unified Approach to Interpreting Model Predictions. In NeurIPS 2017.]. -end::num-top-feature-importance-values[] - + -- NOTE: If any indices are in remote clusters then `cluster.remote.connect` must @@ -918,6 +904,13 @@ total number of categories (in the {version} version of the {stack}, it's two) to predict then we will report all category probabilities. Defaults to 2. end::num-top-classes[] +tag::num-top-feature-importance-values[] +Advanced configuration option. If set, feature importance for the top +most important features will be computed. Importance is calculated +using the SHAP (SHapley Additive exPlanations) method as described in +https://papers.nips.cc/paper/7062-a-unified-approach-to-interpreting-model-predictions.pdf[Lundberg, S. M., & Lee, S.-I. A Unified Approach to Interpreting Model Predictions. In NeurIPS 2017.]. +end::num-top-feature-importance-values[] + tag::over-field-name[] The field used to split the data. In particular, this property is used for analyzing the splits with respect to the history of all splits. It is used for @@ -1062,6 +1055,12 @@ function. -- end::summary-count-field-name[] +tag::tags[] +A comma delimited string of tags. A {infer} model can have many tags, or none. +When supplied, only {infer} models that contain all the supplied tags are +returned. +end::tags[] + tag::time-format[] The time format, which can be `epoch`, `epoch_ms`, or a custom pattern. The default value is `epoch`, which refers to UNIX or Epoch time (the number of diff --git a/docs/reference/query-dsl.asciidoc b/docs/reference/query-dsl.asciidoc index 58ebe3190a3..51889a5f2c1 100644 --- a/docs/reference/query-dsl.asciidoc +++ b/docs/reference/query-dsl.asciidoc @@ -25,6 +25,27 @@ or to alter their behaviour (such as the Query clauses behave differently depending on whether they are used in <>. + +[[query-dsl-allow-expensive-queries]] +Allow expensive queries:: +Certain types of queries will generally execute slowly due to the way they are implemented, which can affect +the stability of the cluster. Those queries can be categorised as follows: +* Queries that need to do linear scans to identify matches: +** <> +* Queries that have a high up-front cost: +** <> +** <> +** <> without <> +** <> +** <> on <> and <> fields +* <> +* Queries on <> +* Queries that may have a high per-document cost: +** <> +** <> + +The execution of such queries can be prevented by setting the value of the `search.allow_expensive_queries` +setting to `false` (defaults to `true`). -- include::query-dsl/query_filter_context.asciidoc[] @@ -51,4 +72,4 @@ include::query-dsl/minimum-should-match.asciidoc[] include::query-dsl/multi-term-rewrite.asciidoc[] -include::query-dsl/regexp-syntax.asciidoc[] \ No newline at end of file +include::query-dsl/regexp-syntax.asciidoc[] diff --git a/docs/reference/query-dsl/fuzzy-query.asciidoc b/docs/reference/query-dsl/fuzzy-query.asciidoc index bb20e0bd7e7..3a6d4bf7866 100644 --- a/docs/reference/query-dsl/fuzzy-query.asciidoc +++ b/docs/reference/query-dsl/fuzzy-query.asciidoc @@ -97,4 +97,8 @@ adjacent characters (ab → ba). Defaults to `true`. `rewrite`:: (Optional, string) Method used to rewrite the query. For valid values and more -information, see the <>. \ No newline at end of file +information, see the <>. + +==== Notes +Fuzzy queries will not be executed if <> +is set to false. diff --git a/docs/reference/query-dsl/geo-shape-query.asciidoc b/docs/reference/query-dsl/geo-shape-query.asciidoc index 19a22ee103d..9706b90d828 100644 --- a/docs/reference/query-dsl/geo-shape-query.asciidoc +++ b/docs/reference/query-dsl/geo-shape-query.asciidoc @@ -161,3 +161,7 @@ and will not match any documents for this query. This can be useful when querying multiple indexes which might have different mappings. When set to `false` (the default value) the query will throw an exception if the field is not mapped. + +==== Notes +Geo-shape queries on geo-shapes implemented with <> will not be executed if +<> is set to false. diff --git a/docs/reference/query-dsl/joining-queries.asciidoc b/docs/reference/query-dsl/joining-queries.asciidoc index 69fcca86900..140fe111fed 100644 --- a/docs/reference/query-dsl/joining-queries.asciidoc +++ b/docs/reference/query-dsl/joining-queries.asciidoc @@ -29,4 +29,7 @@ include::has-parent-query.asciidoc[] include::parent-id-query.asciidoc[] - +=== Notes +==== Allow expensive queries +Joining queries will not be executed if <> +is set to false. diff --git a/docs/reference/query-dsl/percolate-query.asciidoc b/docs/reference/query-dsl/percolate-query.asciidoc index a1b2cf7ff5d..cfd9501060d 100644 --- a/docs/reference/query-dsl/percolate-query.asciidoc +++ b/docs/reference/query-dsl/percolate-query.asciidoc @@ -693,3 +693,8 @@ being percolated, as opposed to a single index as we do in examples. There are a allows for fields to be stored in a denser, more efficient way. - Percolate queries do not scale in the same way as other queries, so percolation performance may benefit from using a different index configuration, like the number of primary shards. + +=== Notes +==== Allow expensive queries +Percolate queries will not be executed if <> +is set to false. diff --git a/docs/reference/query-dsl/prefix-query.asciidoc b/docs/reference/query-dsl/prefix-query.asciidoc index 780de433aab..8501f0f7d03 100644 --- a/docs/reference/query-dsl/prefix-query.asciidoc +++ b/docs/reference/query-dsl/prefix-query.asciidoc @@ -64,4 +64,10 @@ GET /_search You can speed up prefix queries using the <> mapping parameter. If enabled, {es} indexes prefixes between 2 and 5 characters in a separate field. This lets {es} run prefix queries more -efficiently at the cost of a larger index. \ No newline at end of file +efficiently at the cost of a larger index. + +[[prefix-query-allow-expensive-queries]] +===== Allow expensive queries +Prefix queries will not be executed if <> +is set to false. However, if <> are enabled, an optimised query is built which +is not considered slow, and will be executed in spite of this setting. diff --git a/docs/reference/query-dsl/query-string-query.asciidoc b/docs/reference/query-dsl/query-string-query.asciidoc index 56eb3b6efb5..47523a07934 100644 --- a/docs/reference/query-dsl/query-string-query.asciidoc +++ b/docs/reference/query-dsl/query-string-query.asciidoc @@ -537,3 +537,9 @@ The example above creates a boolean query: `(blended(terms:[field2:this, field1:this]) blended(terms:[field2:that, field1:that]) blended(terms:[field2:thus, field1:thus]))~2` that matches documents with at least two of the three per-term blended queries. + +==== Notes +===== Allow expensive queries +Query string query can be internally be transformed to a <> which means +that if the prefix queries are disabled as explained <> the query will not be +executed and an exception will be thrown. diff --git a/docs/reference/query-dsl/range-query.asciidoc b/docs/reference/query-dsl/range-query.asciidoc index 73fda308a0b..ea7e7e95294 100644 --- a/docs/reference/query-dsl/range-query.asciidoc +++ b/docs/reference/query-dsl/range-query.asciidoc @@ -134,6 +134,11 @@ increases the relevance score. [[range-query-notes]] ==== Notes +[[ranges-on-text-and-keyword]] +===== Using the `range` query with `text` and `keyword` fields +Range queries on <> or <> files will not be executed if +<> is set to false. + [[ranges-on-dates]] ===== Using the `range` query with `date` fields diff --git a/docs/reference/query-dsl/regexp-query.asciidoc b/docs/reference/query-dsl/regexp-query.asciidoc index e92424afbc2..522bc68adf8 100644 --- a/docs/reference/query-dsl/regexp-query.asciidoc +++ b/docs/reference/query-dsl/regexp-query.asciidoc @@ -86,3 +86,8 @@ regular expressions. `rewrite`:: (Optional, string) Method used to rewrite the query. For valid values and more information, see the <>. + +==== Notes +===== Allow expensive queries +Regexp queries will not be executed if <> +is set to false. diff --git a/docs/reference/query-dsl/script-query.asciidoc b/docs/reference/query-dsl/script-query.asciidoc index 5ed6d4e91f6..cadf3c080df 100644 --- a/docs/reference/query-dsl/script-query.asciidoc +++ b/docs/reference/query-dsl/script-query.asciidoc @@ -69,3 +69,7 @@ GET /_search } } ---- + +===== Allow expensive queries +Script queries will not be executed if <> +is set to false. diff --git a/docs/reference/query-dsl/script-score-query.asciidoc b/docs/reference/query-dsl/script-score-query.asciidoc index 029cc7469e0..d38021119d9 100644 --- a/docs/reference/query-dsl/script-score-query.asciidoc +++ b/docs/reference/query-dsl/script-score-query.asciidoc @@ -221,6 +221,10 @@ and default time zone. Also calculations with `now` are not supported. <> are accessible through `script_score` query. +===== Allow expensive queries +Script score queries will not be executed if <> +is set to false. + [[script-score-faster-alt]] ===== Faster alternatives The `script_score` query calculates the score for diff --git a/docs/reference/query-dsl/wildcard-query.asciidoc b/docs/reference/query-dsl/wildcard-query.asciidoc index 5cc1dacfb6e..3df6570897d 100644 --- a/docs/reference/query-dsl/wildcard-query.asciidoc +++ b/docs/reference/query-dsl/wildcard-query.asciidoc @@ -67,4 +67,9 @@ increases the relevance score. `rewrite`:: (Optional, string) Method used to rewrite the query. For valid values and more information, see the -<>. \ No newline at end of file +<>. + +==== Notes +===== Allow expensive queries +Wildcard queries will not be executed if <> +is set to false. diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index f8cd699a756..1f21b99f11f 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -330,6 +330,11 @@ See <>. See <>. +[role="exclude",id="slm-api-execute-policy"] +=== {slm-init} execute lifecycle API + +See <>. + [role="exclude",id="slm-api-get"] === {slm-init} get policy API @@ -340,7 +345,22 @@ See <>. See <>. +[role="exclude",id="slm-get-status"] +=== {slm-init} status API + +See <>. + [role="exclude",id="slm-api-put"] === {slm-init} put policy API See <>. + +[role="exclude",id="slm-start"] +=== Start {slm} API + +See <>. + +[role="exclude",id="slm-stop"] +=== Stop {slm} API + +See <>. diff --git a/docs/reference/release-notes/highlights-7.6.0.asciidoc b/docs/reference/release-notes/highlights-7.6.0.asciidoc index 9714541cb77..3a38256c8b9 100644 --- a/docs/reference/release-notes/highlights-7.6.0.asciidoc +++ b/docs/reference/release-notes/highlights-7.6.0.asciidoc @@ -30,3 +30,43 @@ This lets {es} skip non-competitive hits, which often improves query speed. In benchmarking tests, this sped up sorts on `long` fields by 10x. // end::notable-highlights[] + +// tag::notable-highlights[] +[float] +==== Simplifying and operationalizing machine learning + +With the release of 7.6 the {stack} delivers an end-to-end {ml} pipeline +providing the path from raw data to building, testing, and deploying {ml} models +in production. Up to this point {ml} in the {stack} had primarily focused on +unsupervised techniques by using sophisticated pattern recognition that builds +time series models used for {anomaly-detect}. With the new {dfanalytics}, you +can now use labelled data to train and test your own models, store those models +as {es} indices, and use {ml-docs}/ml-inference.html[inference] to add predicted +values to the indices based on your trained models. + +One packaged model that we are releasing in 7.6 is +{ml-docs}/ml-lang-ident.html[{lang-ident}]. If you have documents or sources +that come in a variety of languages, {lang-ident} can be used to determine the +language of text so you can improve the overall search relevance. +{lang-ident-cap} is a trained model that can provide a prediction of the +language of any text field. +// end::notable-highlights[] + +// tag::notable-highlights[] +[float] +==== {ccs-cap} in {transforms} + +{ref}/transforms.html[{transforms-cap}] can now use {ccs} (CCS) for the source +index. Now you can have separate clusters (for example, project clusters) build +entity-centric or feature indices against a primary cluster. + +// end::notable-highlights[] + +[float] +=== Learn more + +Get more details on these features in the +https://www.elastic.co/blog/elasticsearch-7-6-0-released[{es} 7.6 release blog]. +For a complete list of enhancements and other changes, check out the +<>. + diff --git a/docs/reference/sql/limitations.asciidoc b/docs/reference/sql/limitations.asciidoc index 328c11e1ae2..5966be4fcf6 100644 --- a/docs/reference/sql/limitations.asciidoc +++ b/docs/reference/sql/limitations.asciidoc @@ -118,6 +118,17 @@ SELECT * FROM test GROUP BY age ORDER BY COUNT(*) LIMIT 100; It is possible to run the same queries without a `LIMIT` however in that case if the maximum size (*10000*) is passed, an exception will be returned as {es-sql} is unable to track (and sort) all the results returned. +Moreover, the aggregation(s) used in the `ORDER BY` must be only plain aggregate functions. No scalar +functions or operators can be used, and therefore no complex columns that combine two ore more aggregate +functions can be used for ordering. Here are some examples of queries that are *not allowed*: + +[source, sql] +-------------------------------------------------- +SELECT age, ROUND(AVG(salary)) AS avg FROM test GROUP BY age ORDER BY avg; + +SELECT age, MAX(salary) - MIN(salary) AS diff FROM test GROUP BY age ORDER BY diff; +-------------------------------------------------- + [float] === Using aggregation functions on top of scalar functions diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java index b5b4dcd00ca..b041e6aa8a7 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java @@ -148,6 +148,15 @@ public abstract class AbstractObjectParser declareField(consumer, (p, c) -> objectParser.parse(p, c), field, ValueType.OBJECT); } + /** + * Declare an object field that parses explicit {@code null}s in the json to a default value. + */ + public void declareObjectOrNull(BiConsumer consumer, ContextParser objectParser, T nullValue, + ParseField field) { + declareField(consumer, (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? nullValue : objectParser.parse(p, c), + field, ValueType.OBJECT_OR_NULL); + } + public void declareFloat(BiConsumer consumer, ParseField field) { // Using a method reference here angers some compilers declareField(consumer, p -> p.floatValue(), field, ValueType.FLOAT); @@ -158,16 +167,33 @@ public abstract class AbstractObjectParser declareField(consumer, p -> p.doubleValue(), field, ValueType.DOUBLE); } + /** + * Declare a double field that parses explicit {@code null}s in the json to a default value. + */ + public void declareDoubleOrNull(BiConsumer consumer, double nullValue, ParseField field) { + declareField(consumer, p -> p.currentToken() == XContentParser.Token.VALUE_NULL ? nullValue : p.doubleValue(), + field, ValueType.DOUBLE_OR_NULL); + } + public void declareLong(BiConsumer consumer, ParseField field) { // Using a method reference here angers some compilers declareField(consumer, p -> p.longValue(), field, ValueType.LONG); } public void declareInt(BiConsumer consumer, ParseField field) { - // Using a method reference here angers some compilers + // Using a method reference here angers some compilers declareField(consumer, p -> p.intValue(), field, ValueType.INT); } + /** + * Declare a double field that parses explicit {@code null}s in the json to a default value. + */ + public void declareIntOrNull(BiConsumer consumer, int nullValue, ParseField field) { + declareField(consumer, p -> p.currentToken() == XContentParser.Token.VALUE_NULL ? nullValue : p.intValue(), + field, ValueType.INT_OR_NULL); + } + + public void declareString(BiConsumer consumer, ParseField field) { declareField(consumer, XContentParser::text, field, ValueType.STRING); } diff --git a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java index 25094d257ba..7c44b29b259 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java @@ -43,6 +43,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public class ObjectParserTests extends ESTestCase { @@ -275,6 +276,24 @@ public class ObjectParserTests extends ESTestCase { assertNotNull(s.object); } + public void testObjectOrNullWhenNull() throws IOException { + StaticTestStruct nullMarker = new StaticTestStruct(); + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"object\" : null}"); + ObjectParser objectParser = new ObjectParser<>("foo", StaticTestStruct::new); + objectParser.declareObjectOrNull(StaticTestStruct::setObject, objectParser, nullMarker, new ParseField("object")); + StaticTestStruct s = objectParser.parse(parser, null); + assertThat(s.object, equalTo(nullMarker)); + } + + public void testObjectOrNullWhenNonNull() throws IOException { + StaticTestStruct nullMarker = new StaticTestStruct(); + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"object\" : {}}"); + ObjectParser objectParser = new ObjectParser<>("foo", StaticTestStruct::new); + objectParser.declareObjectOrNull(StaticTestStruct::setObject, objectParser, nullMarker, new ParseField("object")); + StaticTestStruct s = objectParser.parse(parser, null); + assertThat(s.object, not(nullValue())); + } + public void testEmptyObjectInArray() throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"object_array\" : [{}]}"); ObjectParser objectParser = new ObjectParser<>("foo", StaticTestStruct::new); @@ -321,15 +340,32 @@ public class ObjectParserTests extends ESTestCase { } public void testAllVariants() throws IOException { + double expectedNullableDouble; + int expectedNullableInt; + XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); builder.startObject(); builder.field("int_field", randomBoolean() ? "1" : 1); + if (randomBoolean()) { + builder.nullField("nullable_int_field"); + expectedNullableInt = -1; + } else { + expectedNullableInt = randomInt(); + builder.field("nullable_int_field", expectedNullableInt); + } if (randomBoolean()) { builder.array("int_array_field", randomBoolean() ? "1" : 1); } else { builder.field("int_array_field", randomBoolean() ? "1" : 1); } builder.field("double_field", randomBoolean() ? "2.1" : 2.1d); + if (randomBoolean()) { + builder.nullField("nullable_double_field"); + expectedNullableDouble = Double.NaN; + } else { + expectedNullableDouble = randomDouble(); + builder.field("nullable_double_field", expectedNullableDouble); + } if (randomBoolean()) { builder.array("double_array_field", randomBoolean() ? "2.1" : 2.1d); } else { @@ -364,9 +400,11 @@ public class ObjectParserTests extends ESTestCase { XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder)); class TestStruct { int int_field; + int nullableIntField; long long_field; float float_field; double double_field; + double nullableDoubleField; String string_field; List int_array_field; List long_array_field; @@ -378,6 +416,9 @@ public class ObjectParserTests extends ESTestCase { public void setInt_field(int int_field) { this.int_field = int_field; } + public void setNullableIntField(int nullableIntField) { + this.nullableIntField = nullableIntField; + } public void setLong_field(long long_field) { this.long_field = long_field; } @@ -387,6 +428,9 @@ public class ObjectParserTests extends ESTestCase { public void setDouble_field(double double_field) { this.double_field = double_field; } + public void setNullableDoubleField(double nullableDoubleField) { + this.nullableDoubleField = nullableDoubleField; + } public void setString_field(String string_field) { this.string_field = string_field; } @@ -416,10 +460,12 @@ public class ObjectParserTests extends ESTestCase { } ObjectParser objectParser = new ObjectParser<>("foo"); objectParser.declareInt(TestStruct::setInt_field, new ParseField("int_field")); + objectParser.declareIntOrNull(TestStruct::setNullableIntField, -1, new ParseField("nullable_int_field")); objectParser.declareIntArray(TestStruct::setInt_array_field, new ParseField("int_array_field")); objectParser.declareLong(TestStruct::setLong_field, new ParseField("long_field")); objectParser.declareLongArray(TestStruct::setLong_array_field, new ParseField("long_array_field")); objectParser.declareDouble(TestStruct::setDouble_field, new ParseField("double_field")); + objectParser.declareDoubleOrNull(TestStruct::setNullableDoubleField, Double.NaN, new ParseField("nullable_double_field")); objectParser.declareDoubleArray(TestStruct::setDouble_array_field, new ParseField("double_array_field")); objectParser.declareFloat(TestStruct::setFloat_field, new ParseField("float_field")); objectParser.declareFloatArray(TestStruct::setFloat_array_field, new ParseField("float_array_field")); @@ -431,6 +477,7 @@ public class ObjectParserTests extends ESTestCase { TestStruct parse = objectParser.parse(parser, new TestStruct(), null); assertArrayEquals(parse.double_array_field.toArray(), Collections.singletonList(2.1d).toArray()); assertEquals(parse.double_field, 2.1d, 0.0d); + assertThat(parse.nullableDoubleField, equalTo(expectedNullableDouble)); assertArrayEquals(parse.long_array_field.toArray(), Collections.singletonList(4L).toArray()); assertEquals(parse.long_field, 4L); @@ -440,6 +487,7 @@ public class ObjectParserTests extends ESTestCase { assertArrayEquals(parse.int_array_field.toArray(), Collections.singletonList(1).toArray()); assertEquals(parse.int_field, 1); + assertThat(parse.nullableIntField, equalTo(expectedNullableInt)); assertArrayEquals(parse.float_array_field.toArray(), Collections.singletonList(3.1f).toArray()); assertEquals(parse.float_field, 3.1f, 0.0f); diff --git a/modules/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yml b/modules/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yml index 7ca15165b00..845547a97d0 100644 --- a/modules/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yml +++ b/modules/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yml @@ -81,6 +81,8 @@ --- "Test user agent processor with non-ECS schema": - skip: + version : "all" + reason : "tracked at https://github.com/elastic/elasticsearch/issues/52266" features: warnings - do: diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/85_script_score_random_score.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/85_script_score_random_score.yml index d1f0e6ff125..d96b0ce42ec 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/85_script_score_random_score.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/85_script_score_random_score.yml @@ -2,8 +2,8 @@ setup: - skip: - version: " - 7.09.99" - reason: "random score function of script score was added in 7.1" + version: " - 7.1.99" + reason: "random score function of script score was added in 7.2" --- "Random score function with _seq_no field": diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java index 38d635ab393..d71992b8ed1 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; @@ -59,6 +60,8 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; import java.math.BigDecimal; @@ -522,6 +525,12 @@ public class ScaledFloatFieldMapper extends FieldMapper { return new SortField(getFieldName(), source, reverse); } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + return new DoubleValuesComparatorSource(this, missingValue, sortMode, nested).newBucketedSort(bigArrays, sortOrder, format); + } + @Override public void clear() { scaledFieldData.clear(); diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldTypeTests.java index 523de918091..b9cdae13ec1 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldTypeTests.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.index.mapper.SearchAsYouTypeFieldMapper.Defaults; import org.elasticsearch.index.mapper.SearchAsYouTypeFieldMapper.PrefixFieldType; import org.elasticsearch.index.mapper.SearchAsYouTypeFieldMapper.SearchAsYouTypeFieldType; @@ -100,14 +101,19 @@ public class SearchAsYouTypeFieldTypeTests extends FieldTypeTestCase { // this term should be a length that can be rewriteable to a term query on the prefix field final String withinBoundsTerm = "foo"; - assertThat(fieldType.prefixQuery(withinBoundsTerm, CONSTANT_SCORE_REWRITE, null), + assertThat(fieldType.prefixQuery(withinBoundsTerm, CONSTANT_SCORE_REWRITE, randomMockShardContext()), equalTo(new ConstantScoreQuery(new TermQuery(new Term(PREFIX_NAME, withinBoundsTerm))))); // our defaults don't allow a situation where a term can be too small // this term should be too long to be rewriteable to a term query on the prefix field final String longTerm = "toolongforourprefixfieldthistermis"; - assertThat(fieldType.prefixQuery(longTerm, CONSTANT_SCORE_REWRITE, null), + assertThat(fieldType.prefixQuery(longTerm, CONSTANT_SCORE_REWRITE, MOCK_QSC), equalTo(new PrefixQuery(new Term(NAME, longTerm)))); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> fieldType.prefixQuery(longTerm, CONSTANT_SCORE_REWRITE, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[prefix] queries cannot be executed when 'search.allow_expensive_queries' is set to false. " + + "For optimised prefix queries on text fields please enable [index_prefixes].", ee.getMessage()); } } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java index 56bf0112044..9c05891d4af 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java @@ -27,6 +27,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.join.JoinUtil; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.similarities.Similarity; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; @@ -55,6 +56,8 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; + /** * A query builder for {@code has_child} query. */ @@ -302,6 +305,11 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "parent_id"; @@ -153,6 +156,11 @@ public final class ParentIdQueryBuilder extends AbstractQueryBuilder { @@ -371,5 +374,18 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase InnerHitContextBuilder.extractInnerHits(queryBuilder, Collections.singletonMap("some_name", null))); + assertEquals("[inner_hits] already contains an entry for key [some_name]", e.getMessage()); + } + + public void testDisallowExpensiveQueries() { + QueryShardContext queryShardContext = mock(QueryShardContext.class); + when(queryShardContext.allowExpensiveQueries()).thenReturn(false); + + HasChildQueryBuilder queryBuilder = + hasChildQuery(CHILD_DOC, new TermQueryBuilder("custom_string", "value"), ScoreMode.None); + ElasticsearchException e = expectThrows(ElasticsearchException.class, + () -> queryBuilder.toQuery(queryShardContext)); + assertEquals("[joining] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + e.getMessage()); } } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java index bdb08fd66f9..8709d5555cb 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.join.query; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; @@ -57,6 +58,8 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class HasParentQueryBuilderTests extends AbstractQueryTestCase { private static final String TYPE = "_doc"; @@ -265,5 +268,18 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase InnerHitContextBuilder.extractInnerHits(queryBuilder, Collections.singletonMap("some_name", null))); + assertEquals("[inner_hits] already contains an entry for key [some_name]", e.getMessage()); + } + + public void testDisallowExpensiveQueries() { + QueryShardContext queryShardContext = mock(QueryShardContext.class); + when(queryShardContext.allowExpensiveQueries()).thenReturn(false); + + HasParentQueryBuilder queryBuilder = new HasParentQueryBuilder( + CHILD_DOC, new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()), false); + ElasticsearchException e = expectThrows(ElasticsearchException.class, + () -> queryBuilder.toQuery(queryShardContext)); + assertEquals("[joining] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + e.getMessage()); } } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java index f43214515be..27fbce48d9f 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; @@ -48,6 +49,8 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class ParentIdQueryBuilderTests extends AbstractQueryTestCase { @@ -154,4 +157,14 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase queryBuilder.toQuery(queryShardContext)); + assertEquals("[joining] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + e.getMessage()); + } } diff --git a/modules/parent-join/src/test/resources/rest-api-spec/test/11_parent_child.yml b/modules/parent-join/src/test/resources/rest-api-spec/test/11_parent_child.yml index d85f89e768d..a3c2d78690a 100644 --- a/modules/parent-join/src/test/resources/rest-api-spec/test/11_parent_child.yml +++ b/modules/parent-join/src/test/resources/rest-api-spec/test/11_parent_child.yml @@ -26,6 +26,18 @@ setup: - do: indices.refresh: {} +--- +teardown: + - skip: + version: " - 7.6.99" + reason: "implemented in 7.7.0" + + - do: + cluster.put_settings: + body: + transient: + search.allow_expensive_queries: null + --- "Parent/child inner hits": - do: @@ -53,3 +65,24 @@ setup: - is_false: hits.hits.0.inner_hits.child.hits.hits.0._nested - gte: { hits.hits.0.inner_hits.child.hits.hits.0._seq_no: 0 } - gte: { hits.hits.0.inner_hits.child.hits.hits.0._primary_term: 1 } + +--- +"HasChild disallow expensive queries": + - skip: + version: " - 7.6.99" + reason: "implemented in 7.7.0" + + ### Update setting to false + - do: + cluster.put_settings: + body: + transient: + search.allow_expensive_queries: "false" + flat_settings: true + + - match: {transient: {search.allow_expensive_queries: "false"}} + + - do: + catch: /\[joining\] queries cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + body: { "query": { "has_child": { "type": "child", "query": { "match_all": {} }, "inner_hits": {} } } } diff --git a/modules/parent-join/src/test/resources/rest-api-spec/test/20_parent_join.yml b/modules/parent-join/src/test/resources/rest-api-spec/test/20_parent_join.yml index 9e4b1639d2b..aa8dc5d5865 100644 --- a/modules/parent-join/src/test/resources/rest-api-spec/test/20_parent_join.yml +++ b/modules/parent-join/src/test/resources/rest-api-spec/test/20_parent_join.yml @@ -51,6 +51,18 @@ setup: - do: indices.refresh: {} +--- +teardown: + - skip: + version: " - 7.6.99" + reason: "implemented in 7.7.0" + + - do: + cluster.put_settings: + body: + transient: + search.allow_expensive_queries: null + --- "Test basic": - do: @@ -116,3 +128,29 @@ setup: - match: { hits.hits.1._id: "4" } - match: { hits.hits.1._source.join_field.name: "child" } - match: { hits.hits.1._source.join_field.parent: "1" } + +--- +"HasChild disallow expensive queries": + - skip: + version: " - 7.6.99" + reason: "implemented in 7.7.0" + + ### Update setting to false + - do: + cluster.put_settings: + body: + transient: + search.allow_expensive_queries: "false" + flat_settings: true + + - match: {transient: {search.allow_expensive_queries: "false"}} + + - do: + catch: /\[joining\] queries cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + body: + sort: [ "id" ] + query: + parent_id: + type: child + id: 1 diff --git a/modules/parent-join/src/test/resources/rest-api-spec/test/30_inner_hits.yml b/modules/parent-join/src/test/resources/rest-api-spec/test/30_inner_hits.yml index b0bed8e1d11..9f9bbbacf12 100644 --- a/modules/parent-join/src/test/resources/rest-api-spec/test/30_inner_hits.yml +++ b/modules/parent-join/src/test/resources/rest-api-spec/test/30_inner_hits.yml @@ -1,3 +1,61 @@ +--- +setup: + - skip: + version: " - 7.6.99" + reason: "implemented in 7.7.0" + + - do: + indices.create: + index: test + body: + mappings: + properties: + entity_type: { "type": "keyword" } + join_field: { "type": "join", "relations": { "question": "answer", "person": "address" } } + settings: + number_of_shards: 1 + + - do: + index: + index: test + id: 1 + body: { "join_field": { "name": "question" }, "entity_type": "question" } + + - do: + index: + index: test + id: 2 + routing: 1 + body: { "join_field": { "name": "answer", "parent": 1} , "entity_type": "answer" } + + - do: + index: + index: test + id: 3 + body: { "join_field": { "name": "person" }, "entity_type": "person" } + + - do: + index: + index: test + routing: 3 + id: 4 + body: { "join_field": { "name": "address", "parent": 3 }, "entity_type": "address" } + + - do: + indices.refresh: {} + +--- +teardown: + - skip: + version: " - 7.6.99" + reason: "implemented in 7.7.0" + + - do: + cluster.put_settings: + body: + transient: + search.allow_expensive_queries: null + --- "Test two sub-queries with only one having inner_hits": - skip: @@ -66,3 +124,35 @@ - match: { hits.hits.1._id: "2" } - match: { hits.hits.1.inner_hits.question.hits.total.value: 1} - match: { hits.hits.1.inner_hits.question.hits.hits.0._id: "1"} + +--- +"HasParent disallow expensive queries": + - skip: + version: " - 7.6.99" + reason: "implemented in 7.7.0" + + ### Update setting to false + - do: + cluster.put_settings: + body: + transient: + search.allow_expensive_queries: "false" + flat_settings: true + + - match: {transient: {search.allow_expensive_queries: "false"}} + + - do: + catch: /\[joining\] queries cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + index: test + body: + query: + bool: + should: + - term: + entity_type: person + - has_parent: + parent_type: question + query: + match_all: {} + inner_hits: {} diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 67db4ba9c0e..882ff082d37 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -91,6 +91,7 @@ import java.util.Objects; import java.util.function.Supplier; import static org.elasticsearch.percolator.PercolatorFieldMapper.parseQuery; +import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; public class PercolateQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "percolate"; @@ -569,6 +570,11 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder { @@ -364,4 +367,14 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase queryBuilder.toQuery(queryShardContext)); + assertEquals("[percolate] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + e.getMessage()); + } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index ba0fb8a85f2..a463ca84b79 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -19,11 +19,14 @@ package org.elasticsearch.percolator; import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -37,6 +40,7 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; +import java.io.IOException; import java.util.Arrays; import java.util.Collections; @@ -886,4 +890,55 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase { assertThat(item.getFailureMessage(), containsString("[test/type/6] couldn't be found")); } + public void testDisallowExpensiveQueries() throws IOException { + try { + assertAcked(client().admin().indices().prepareCreate("test") + .addMapping("_doc", "id", "type=keyword", "field1", "type=keyword", "query", "type=percolator") + ); + + client().prepareIndex("test", "_doc").setId("1") + .setSource(jsonBuilder().startObject() + .field("id", "1") + .field("query", matchQuery("field1", "value")).endObject()) + .get(); + refresh(); + + // Execute with search.allow_expensive_queries = null => default value = false => success + BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()); + SearchResponse response = client().prepareSearch() + .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) + .get(); + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); + + // Set search.allow_expensive_queries to "false" => assert failure + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", false)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + + ElasticsearchException e = expectThrows(ElasticsearchException.class, + () -> client().prepareSearch() + .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) + .get()); + assertEquals("[percolate] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + e.getCause().getMessage()); + + // Set search.allow_expensive_queries setting to "true" ==> success + updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", true)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + + response = client().prepareSearch() + .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) + .get(); + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); + } finally { + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + } + } } diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java index 4b29d314356..883468941a5 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java @@ -158,7 +158,7 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper { @Override public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, - boolean transpositions) { + boolean transpositions, QueryShardContext context) { throw new UnsupportedOperationException("[fuzzy] queries are not supported on [" + CONTENT_TYPE + "] fields."); } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java index a261e8b3b7e..c44ead42557 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java @@ -28,6 +28,7 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.ICUCollationKeywordFieldMapper.CollationFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; @@ -101,32 +102,36 @@ public class CollationFieldTypeTests extends FieldTypeTestCase { MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); ft.setIndexOptions(IndexOptions.DOCS); - expectThrows(UnsupportedOperationException.class, - () -> ft.regexpQuery("foo.*", 0, 10, null, null)); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, + () -> ft.regexpQuery("foo.*", 0, 10, null, randomMockShardContext())); + assertEquals("[regexp] queries are not supported on [icu_collation_keyword] fields.", e.getMessage()); } public void testFuzzyQuery() { MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); ft.setIndexOptions(IndexOptions.DOCS); - expectThrows(UnsupportedOperationException.class, - () -> ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true)); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, + () -> ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, randomMockShardContext())); + assertEquals("[fuzzy] queries are not supported on [icu_collation_keyword] fields.", e.getMessage()); } public void testPrefixQuery() { MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); ft.setIndexOptions(IndexOptions.DOCS); - expectThrows(UnsupportedOperationException.class, - () -> ft.prefixQuery("prefix", null, null)); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, + () -> ft.prefixQuery("prefix", null, randomMockShardContext())); + assertEquals("[prefix] queries are not supported on [icu_collation_keyword] fields.", e.getMessage()); } public void testWildcardQuery() { MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); ft.setIndexOptions(IndexOptions.DOCS); - expectThrows(UnsupportedOperationException.class, - () -> ft.wildcardQuery("foo*", null, null)); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, + () -> ft.wildcardQuery("foo*", null, randomMockShardContext())); + assertEquals("[wildcard] queries are not supported on [icu_collation_keyword] fields.", e.getMessage()); } public void testRangeQuery() { @@ -143,11 +148,16 @@ public class CollationFieldTypeTests extends FieldTypeTestCase { TermRangeQuery expected = new TermRangeQuery("field", new BytesRef(aKey.bytes, 0, aKey.size), new BytesRef(bKey.bytes, 0, bKey.size), false, false); - assertEquals(expected, ft.rangeQuery("a", "b", false, false, null, null, null, null)); + assertEquals(expected, ft.rangeQuery("a", "b", false, false, null, null, null, MOCK_QSC)); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.rangeQuery("a", "b", true, true, null, null, null, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[range] queries on [text] or [keyword] fields cannot be executed when " + + "'search.allow_expensive_queries' is set to false.", ee.getMessage()); ft.setIndexOptions(IndexOptions.NONE); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> ft.rangeQuery("a", "b", false, false, null, null, null, null)); + () -> ft.rangeQuery("a", "b", false, false, null, null, null, MOCK_QSC)); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); } } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java index 546634c88cf..9c7207ef6b2 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java @@ -55,8 +55,7 @@ class AwsEc2ServiceImpl implements AwsEc2Service { // proxy for testing AmazonEC2 buildClient(AWSCredentialsProvider credentials, ClientConfiguration configuration) { - final AmazonEC2 client = new AmazonEC2Client(credentials, configuration); - return client; + return new AmazonEC2Client(credentials, configuration); } // pkg private for tests diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AbstractEC2MockAPITestCase.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AbstractEC2MockAPITestCase.java new file mode 100644 index 00000000000..8f1e73f388f --- /dev/null +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AbstractEC2MockAPITestCase.java @@ -0,0 +1,214 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.discovery.ec2; + +import com.amazonaws.services.ec2.model.Instance; +import com.amazonaws.services.ec2.model.Tag; +import com.sun.net.httpserver.HttpServer; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.mocksocket.MockHttpServer; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import javax.xml.XMLConstants; +import javax.xml.stream.XMLOutputFactory; +import javax.xml.stream.XMLStreamWriter; + +import java.io.StringWriter; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.util.Collections; +import java.util.List; +import java.util.UUID; + +import static java.nio.charset.StandardCharsets.UTF_8; + +@SuppressForbidden(reason = "use a http server") +public abstract class AbstractEC2MockAPITestCase extends ESTestCase { + + protected HttpServer httpServer; + + protected ThreadPool threadPool; + + protected MockTransportService transportService; + + protected NetworkService networkService = new NetworkService(Collections.emptyList()); + + @Before + public void setUp() throws Exception { + httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); + httpServer.start(); + threadPool = new TestThreadPool(EC2RetriesTests.class.getName()); + transportService = createTransportService(); + super.setUp(); + } + + protected abstract MockTransportService createTransportService(); + + protected Settings buildSettings(String accessKey) { + final InetSocketAddress address = httpServer.getAddress(); + final String endpoint = "http://" + InetAddresses.toUriString(address.getAddress()) + ":" + address.getPort(); + final MockSecureSettings mockSecure = new MockSecureSettings(); + mockSecure.setString(Ec2ClientSettings.ACCESS_KEY_SETTING.getKey(), accessKey); + mockSecure.setString(Ec2ClientSettings.SECRET_KEY_SETTING.getKey(), "ec2_secret"); + return Settings.builder().put(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), endpoint).setSecureSettings(mockSecure).build(); + } + + @After + public void tearDown() throws Exception { + try { + IOUtils.close(transportService, () -> terminate(threadPool), () -> httpServer.stop(0)); + } finally { + super.tearDown(); + } + } + + /** + * Generates a XML response that describe the EC2 instances + * TODO: org.elasticsearch.discovery.ec2.AmazonEC2Fixture uses pretty much the same code. We should dry up that test fixture. + */ + static byte[] generateDescribeInstancesResponse(List instances) { + final XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newFactory(); + xmlOutputFactory.setProperty(XMLOutputFactory.IS_REPAIRING_NAMESPACES, true); + + final StringWriter out = new StringWriter(); + XMLStreamWriter sw; + try { + sw = xmlOutputFactory.createXMLStreamWriter(out); + sw.writeStartDocument(); + + String namespace = "http://ec2.amazonaws.com/doc/2013-02-01/"; + sw.setDefaultNamespace(namespace); + sw.writeStartElement(XMLConstants.DEFAULT_NS_PREFIX, "DescribeInstancesResponse", namespace); + { + sw.writeStartElement("requestId"); + sw.writeCharacters(UUID.randomUUID().toString()); + sw.writeEndElement(); + + sw.writeStartElement("reservationSet"); + { + for (Instance instance : instances) { + sw.writeStartElement("item"); + { + sw.writeStartElement("reservationId"); + sw.writeCharacters(UUID.randomUUID().toString()); + sw.writeEndElement(); + + sw.writeStartElement("instancesSet"); + { + sw.writeStartElement("item"); + { + sw.writeStartElement("instanceId"); + sw.writeCharacters(instance.getInstanceId()); + sw.writeEndElement(); + + sw.writeStartElement("imageId"); + sw.writeCharacters(instance.getImageId()); + sw.writeEndElement(); + + sw.writeStartElement("instanceState"); + { + sw.writeStartElement("code"); + sw.writeCharacters("16"); + sw.writeEndElement(); + + sw.writeStartElement("name"); + sw.writeCharacters("running"); + sw.writeEndElement(); + } + sw.writeEndElement(); + + sw.writeStartElement("privateDnsName"); + sw.writeCharacters(instance.getPrivateDnsName()); + sw.writeEndElement(); + + sw.writeStartElement("dnsName"); + sw.writeCharacters(instance.getPublicDnsName()); + sw.writeEndElement(); + + sw.writeStartElement("instanceType"); + sw.writeCharacters("m1.medium"); + sw.writeEndElement(); + + sw.writeStartElement("placement"); + { + sw.writeStartElement("availabilityZone"); + sw.writeCharacters("use-east-1e"); + sw.writeEndElement(); + + sw.writeEmptyElement("groupName"); + + sw.writeStartElement("tenancy"); + sw.writeCharacters("default"); + sw.writeEndElement(); + } + sw.writeEndElement(); + + sw.writeStartElement("privateIpAddress"); + sw.writeCharacters(instance.getPrivateIpAddress()); + sw.writeEndElement(); + + sw.writeStartElement("ipAddress"); + sw.writeCharacters(instance.getPublicIpAddress()); + sw.writeEndElement(); + + sw.writeStartElement("tagSet"); + for (Tag tag : instance.getTags()) { + sw.writeStartElement("item"); + { + sw.writeStartElement("key"); + sw.writeCharacters(tag.getKey()); + sw.writeEndElement(); + + sw.writeStartElement("value"); + sw.writeCharacters(tag.getValue()); + sw.writeEndElement(); + } + sw.writeEndElement(); + } + sw.writeEndElement(); + } + sw.writeEndElement(); + } + sw.writeEndElement(); + } + sw.writeEndElement(); + } + sw.writeEndElement(); + } + sw.writeEndElement(); + + sw.writeEndDocument(); + sw.flush(); + } + } catch (Exception e) { + throw new RuntimeException(e); + } + return out.toString().getBytes(UTF_8); + } +} diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java deleted file mode 100644 index 040472723fb..00000000000 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.discovery.ec2; - -import com.amazonaws.AmazonClientException; -import com.amazonaws.ClientConfiguration; -import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.services.ec2.AbstractAmazonEC2; -import com.amazonaws.services.ec2.model.DescribeInstancesRequest; -import com.amazonaws.services.ec2.model.DescribeInstancesResult; -import com.amazonaws.services.ec2.model.Filter; -import com.amazonaws.services.ec2.model.Instance; -import com.amazonaws.services.ec2.model.InstanceState; -import com.amazonaws.services.ec2.model.InstanceStateName; -import com.amazonaws.services.ec2.model.Reservation; -import com.amazonaws.services.ec2.model.Tag; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class AmazonEC2Mock extends AbstractAmazonEC2 { - - private static final Logger logger = LogManager.getLogger(AmazonEC2Mock.class); - - public static final String PREFIX_PRIVATE_IP = "10.0.0."; - public static final String PREFIX_PUBLIC_IP = "8.8.8."; - public static final String PREFIX_PUBLIC_DNS = "mock-ec2-"; - public static final String SUFFIX_PUBLIC_DNS = ".amazon.com"; - public static final String PREFIX_PRIVATE_DNS = "mock-ip-"; - public static final String SUFFIX_PRIVATE_DNS = ".ec2.internal"; - - final List instances = new ArrayList<>(); - String endpoint; - final AWSCredentialsProvider credentials; - final ClientConfiguration configuration; - - public AmazonEC2Mock(int nodes, List> tagsList, AWSCredentialsProvider credentials, ClientConfiguration configuration) { - if (tagsList != null) { - assert tagsList.size() == nodes; - } - - for (int node = 1; node < nodes + 1; node++) { - String instanceId = "node" + node; - - Instance instance = new Instance() - .withInstanceId(instanceId) - .withState(new InstanceState().withName(InstanceStateName.Running)) - .withPrivateDnsName(PREFIX_PRIVATE_DNS + instanceId + SUFFIX_PRIVATE_DNS) - .withPublicDnsName(PREFIX_PUBLIC_DNS + instanceId + SUFFIX_PUBLIC_DNS) - .withPrivateIpAddress(PREFIX_PRIVATE_IP + node) - .withPublicIpAddress(PREFIX_PUBLIC_IP + node); - - if (tagsList != null) { - instance.setTags(tagsList.get(node-1)); - } - - instances.add(instance); - } - this.credentials = credentials; - this.configuration = configuration; - } - - @Override - public DescribeInstancesResult describeInstances(DescribeInstancesRequest describeInstancesRequest) - throws AmazonClientException { - Collection filteredInstances = new ArrayList<>(); - - logger.debug("--> mocking describeInstances"); - - for (Instance instance : instances) { - boolean tagFiltered = false; - boolean instanceFound = false; - - Map> expectedTags = new HashMap<>(); - Map> instanceTags = new HashMap<>(); - - for (Tag tag : instance.getTags()) { - List tags = instanceTags.get(tag.getKey()); - if (tags == null) { - tags = new ArrayList<>(); - instanceTags.put(tag.getKey(), tags); - } - tags.add(tag.getValue()); - } - - for (Filter filter : describeInstancesRequest.getFilters()) { - // If we have the same tag name and one of the values, we add the instance - if (filter.getName().startsWith("tag:")) { - tagFiltered = true; - String tagName = filter.getName().substring(4); - // if we have more than one value for the same key, then the key is appended with .x - Pattern p = Pattern.compile("\\.\\d+", Pattern.DOTALL); - Matcher m = p.matcher(tagName); - if (m.find()) { - int i = tagName.lastIndexOf("."); - tagName = tagName.substring(0, i); - } - - List tags = expectedTags.get(tagName); - if (tags == null) { - tags = new ArrayList<>(); - expectedTags.put(tagName, tags); - } - tags.addAll(filter.getValues()); - } - } - - if (tagFiltered) { - logger.debug("--> expected tags: [{}]", expectedTags); - logger.debug("--> instance tags: [{}]", instanceTags); - - instanceFound = true; - for (Map.Entry> expectedTagsEntry : expectedTags.entrySet()) { - List instanceTagValues = instanceTags.get(expectedTagsEntry.getKey()); - if (instanceTagValues == null) { - instanceFound = false; - break; - } - - for (String expectedValue : expectedTagsEntry.getValue()) { - boolean valueFound = false; - for (String instanceTagValue : instanceTagValues) { - if (instanceTagValue.equals(expectedValue)) { - valueFound = true; - } - } - if (valueFound == false) { - instanceFound = false; - } - } - } - } - - if (tagFiltered == false || instanceFound) { - logger.debug("--> instance added"); - filteredInstances.add(instance); - } else { - logger.debug("--> instance filtered"); - } - } - - return new DescribeInstancesResult().withReservations( - new Reservation().withInstances(filteredInstances) - ); - } - - @Override - public void setEndpoint(String endpoint) throws IllegalArgumentException { - this.endpoint = endpoint; - } - - @Override - public void shutdown() { - } -} diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java deleted file mode 100644 index e44087f9413..00000000000 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.discovery.ec2; - -import com.amazonaws.ClientConfiguration; -import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.services.ec2.AmazonEC2; -import com.amazonaws.services.ec2.model.Tag; - -import java.util.List; - -public class AwsEc2ServiceMock extends AwsEc2ServiceImpl { - - private final int nodes; - private final List> tagsList; - - public AwsEc2ServiceMock(int nodes, List> tagsList) { - this.nodes = nodes; - this.tagsList = tagsList; - } - - @Override - AmazonEC2 buildClient(AWSCredentialsProvider credentials, ClientConfiguration configuration) { - return new AmazonEC2Mock(nodes, tagsList, credentials, configuration); - } - -} diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/EC2RetriesTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/EC2RetriesTests.java index 8a890f77c2e..453f165a8a1 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/EC2RetriesTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/EC2RetriesTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.discovery.ec2; import com.amazonaws.http.HttpMethodName; -import com.sun.net.httpserver.HttpServer; +import com.amazonaws.services.ec2.model.Instance; import org.apache.http.HttpStatus; import org.apache.http.NameValuePair; import org.apache.http.client.utils.URLEncodedUtils; @@ -28,82 +28,41 @@ import org.elasticsearch.Version; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.network.InetAddresses; -import org.elasticsearch.common.network.NetworkService; -import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.discovery.SeedHostsProvider; import org.elasticsearch.discovery.SeedHostsResolver; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.mocksocket.MockHttpServer; -import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.nio.MockNioTransport; import org.hamcrest.Matchers; -import org.junit.After; -import org.junit.Before; - -import javax.xml.XMLConstants; -import javax.xml.stream.XMLOutputFactory; -import javax.xml.stream.XMLStreamWriter; import java.io.IOException; -import java.io.StringWriter; -import java.net.InetAddress; -import java.net.InetSocketAddress; import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; import static java.nio.charset.StandardCharsets.UTF_8; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.is; @SuppressForbidden(reason = "use a http server") -public class EC2RetriesTests extends ESTestCase { +public class EC2RetriesTests extends AbstractEC2MockAPITestCase { - private HttpServer httpServer; - - private ThreadPool threadPool; - - private MockTransportService transportService; - - private NetworkService networkService = new NetworkService(Collections.emptyList()); - - @Before - public void setUp() throws Exception { - httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); - httpServer.start(); - threadPool = new TestThreadPool(EC2RetriesTests.class.getName()); - final MockNioTransport transport = new MockNioTransport(Settings.EMPTY, Version.CURRENT, threadPool, networkService, + @Override + protected MockTransportService createTransportService() { + return new MockTransportService(Settings.EMPTY, new MockNioTransport(Settings.EMPTY, Version.CURRENT, threadPool, networkService, PageCacheRecycler.NON_RECYCLING_INSTANCE, new NamedWriteableRegistry(Collections.emptyList()), - new NoneCircuitBreakerService()); - transportService = - new MockTransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); - super.setUp(); - } - - @After - public void tearDown() throws Exception { - try { - IOUtils.close(transportService, () -> terminate(threadPool), () -> httpServer.stop(0)); - } finally { - super.tearDown(); - } + new NoneCircuitBreakerService()), threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); } public void testEC2DiscoveryRetriesOnRateLimiting() throws IOException { - assumeFalse("https://github.com/elastic/elasticsearch/issues/51685", inFipsJvm()); final String accessKey = "ec2_access"; - final List hosts = Collections.singletonList("127.0.0.1:9000"); + final List hosts = Collections.singletonList("127.0.0.1:9300"); final Map failedRequests = new ConcurrentHashMap<>(); // retry the same request 5 times at most final int maxRetries = randomIntBetween(1, 5); @@ -125,7 +84,8 @@ public class EC2RetriesTests extends ESTestCase { byte[] responseBody = null; for (NameValuePair parse : URLEncodedUtils.parse(request, UTF_8)) { if ("Action".equals(parse.getName())) { - responseBody = generateDescribeInstancesResponse(hosts); + responseBody = generateDescribeInstancesResponse(hosts.stream().map( + address -> new Instance().withPublicIpAddress(address)).collect(Collectors.toList())); break; } } @@ -138,14 +98,7 @@ public class EC2RetriesTests extends ESTestCase { } fail("did not send response"); }); - - final InetSocketAddress address = httpServer.getAddress(); - final String endpoint = "http://" + InetAddresses.toUriString(address.getAddress()) + ":" + address.getPort(); - final MockSecureSettings mockSecure = new MockSecureSettings(); - mockSecure.setString(Ec2ClientSettings.ACCESS_KEY_SETTING.getKey(), accessKey); - mockSecure.setString(Ec2ClientSettings.SECRET_KEY_SETTING.getKey(), "ec2_secret"); - try (Ec2DiscoveryPlugin plugin = new Ec2DiscoveryPlugin( - Settings.builder().put(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), endpoint).setSecureSettings(mockSecure).build())) { + try (Ec2DiscoveryPlugin plugin = new Ec2DiscoveryPlugin(buildSettings(accessKey))) { final SeedHostsProvider seedHostsProvider = plugin.getSeedHostProviders(transportService, networkService).get("ec2").get(); final SeedHostsResolver resolver = new SeedHostsResolver("test", Settings.EMPTY, transportService, seedHostsProvider); resolver.start(); @@ -156,112 +109,4 @@ public class EC2RetriesTests extends ESTestCase { assertThat(failedRequests.values().iterator().next(), is(maxRetries)); } } - - /** - * Generates a XML response that describe the EC2 instances - * TODO: org.elasticsearch.discovery.ec2.AmazonEC2Fixture uses pretty much the same code. We should dry up that test fixture. - */ - private byte[] generateDescribeInstancesResponse(List nodes) { - final XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newFactory(); - xmlOutputFactory.setProperty(XMLOutputFactory.IS_REPAIRING_NAMESPACES, true); - - final StringWriter out = new StringWriter(); - XMLStreamWriter sw; - try { - sw = xmlOutputFactory.createXMLStreamWriter(out); - sw.writeStartDocument(); - - String namespace = "http://ec2.amazonaws.com/doc/2013-02-01/"; - sw.setDefaultNamespace(namespace); - sw.writeStartElement(XMLConstants.DEFAULT_NS_PREFIX, "DescribeInstancesResponse", namespace); - { - sw.writeStartElement("requestId"); - sw.writeCharacters(UUID.randomUUID().toString()); - sw.writeEndElement(); - - sw.writeStartElement("reservationSet"); - { - for (String address : nodes) { - sw.writeStartElement("item"); - { - sw.writeStartElement("reservationId"); - sw.writeCharacters(UUID.randomUUID().toString()); - sw.writeEndElement(); - - sw.writeStartElement("instancesSet"); - { - sw.writeStartElement("item"); - { - sw.writeStartElement("instanceId"); - sw.writeCharacters(UUID.randomUUID().toString()); - sw.writeEndElement(); - - sw.writeStartElement("imageId"); - sw.writeCharacters(UUID.randomUUID().toString()); - sw.writeEndElement(); - - sw.writeStartElement("instanceState"); - { - sw.writeStartElement("code"); - sw.writeCharacters("16"); - sw.writeEndElement(); - - sw.writeStartElement("name"); - sw.writeCharacters("running"); - sw.writeEndElement(); - } - sw.writeEndElement(); - - sw.writeStartElement("privateDnsName"); - sw.writeCharacters(address); - sw.writeEndElement(); - - sw.writeStartElement("dnsName"); - sw.writeCharacters(address); - sw.writeEndElement(); - - sw.writeStartElement("instanceType"); - sw.writeCharacters("m1.medium"); - sw.writeEndElement(); - - sw.writeStartElement("placement"); - { - sw.writeStartElement("availabilityZone"); - sw.writeCharacters("use-east-1e"); - sw.writeEndElement(); - - sw.writeEmptyElement("groupName"); - - sw.writeStartElement("tenancy"); - sw.writeCharacters("default"); - sw.writeEndElement(); - } - sw.writeEndElement(); - - sw.writeStartElement("privateIpAddress"); - sw.writeCharacters(address); - sw.writeEndElement(); - - sw.writeStartElement("ipAddress"); - sw.writeCharacters(address); - sw.writeEndElement(); - } - sw.writeEndElement(); - } - sw.writeEndElement(); - } - sw.writeEndElement(); - } - sw.writeEndElement(); - } - sw.writeEndElement(); - - sw.writeEndDocument(); - sw.flush(); - } - } catch (Exception e) { - throw new RuntimeException(e); - } - return out.toString().getBytes(UTF_8); - } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java deleted file mode 100644 index bc45a95c2f3..00000000000 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.discovery.ec2; - -import com.amazonaws.services.ec2.model.Tag; - -import org.elasticsearch.common.settings.Settings; - -import java.util.List; - -public class Ec2DiscoveryPluginMock extends Ec2DiscoveryPlugin { - - Ec2DiscoveryPluginMock(Settings settings) { - this(settings, 1, null); - } - - public Ec2DiscoveryPluginMock(Settings settings, int nodes, List> tagsList) { - super(settings, new AwsEc2ServiceMock(nodes, tagsList)); - } - -} diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java index 720ffaddd74..661b5815b40 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java @@ -19,9 +19,13 @@ package org.elasticsearch.discovery.ec2; +import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.BasicSessionCredentials; +import com.amazonaws.services.ec2.AbstractAmazonEC2; +import com.amazonaws.services.ec2.AmazonEC2; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; @@ -189,4 +193,37 @@ public class Ec2DiscoveryPluginTests extends ESTestCase { } } } + + private static class Ec2DiscoveryPluginMock extends Ec2DiscoveryPlugin { + + Ec2DiscoveryPluginMock(Settings settings) { + super(settings, new AwsEc2ServiceImpl() { + @Override + AmazonEC2 buildClient(AWSCredentialsProvider credentials, ClientConfiguration configuration) { + return new AmazonEC2Mock(credentials, configuration); + } + }); + } + } + + private static class AmazonEC2Mock extends AbstractAmazonEC2 { + + String endpoint; + final AWSCredentialsProvider credentials; + final ClientConfiguration configuration; + + AmazonEC2Mock(AWSCredentialsProvider credentials, ClientConfiguration configuration) { + this.credentials = credentials; + this.configuration = configuration; + } + + @Override + public void setEndpoint(String endpoint) throws IllegalArgumentException { + this.endpoint = endpoint; + } + + @Override + public void shutdown() { + } + } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index ba318484643..a8508a68dea 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -19,71 +19,68 @@ package org.elasticsearch.discovery.ec2; +import com.amazonaws.http.HttpMethodName; +import com.amazonaws.services.ec2.model.Instance; +import com.amazonaws.services.ec2.model.InstanceState; +import com.amazonaws.services.ec2.model.InstanceStateName; import com.amazonaws.services.ec2.model.Tag; +import org.apache.http.HttpStatus; +import org.apache.http.NameValuePair; +import org.apache.http.client.utils.URLEncodedUtils; import org.elasticsearch.Version; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.nio.MockNioTransport; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -public class Ec2DiscoveryTests extends ESTestCase { +@SuppressForbidden(reason = "use a http server") +public class Ec2DiscoveryTests extends AbstractEC2MockAPITestCase { + + private static final String SUFFIX_PRIVATE_DNS = ".ec2.internal"; + private static final String PREFIX_PRIVATE_DNS = "mock-ip-"; + private static final String SUFFIX_PUBLIC_DNS = ".amazon.com"; + private static final String PREFIX_PUBLIC_DNS = "mock-ec2-"; + private static final String PREFIX_PUBLIC_IP = "8.8.8."; + private static final String PREFIX_PRIVATE_IP = "10.0.0."; - protected static ThreadPool threadPool; - protected MockTransportService transportService; private Map poorMansDNS = new ConcurrentHashMap<>(); - @BeforeClass - public static void createThreadPool() { - threadPool = new TestThreadPool(Ec2DiscoveryTests.class.getName()); - } - - @AfterClass - public static void stopThreadPool() throws InterruptedException { - if (threadPool !=null) { - terminate(threadPool); - threadPool = null; - } - } - - @Before - public void createTransportService() { - NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.emptyList()); + protected MockTransportService createTransportService() { final Transport transport = new MockNioTransport(Settings.EMPTY, Version.CURRENT, threadPool, - new NetworkService(Collections.emptyList()), PageCacheRecycler.NON_RECYCLING_INSTANCE, namedWriteableRegistry, + new NetworkService(Collections.emptyList()), PageCacheRecycler.NON_RECYCLING_INSTANCE, writableRegistry(), new NoneCircuitBreakerService()) { @Override - public TransportAddress[] addressesFromString(String address) throws UnknownHostException { + public TransportAddress[] addressesFromString(String address) { // we just need to ensure we don't resolve DNS here return new TransportAddress[] {poorMansDNS.getOrDefault(address, buildNewFakeTransportAddress())}; } }; - transportService = new MockTransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, - null); + return new MockTransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); } protected List buildDynamicHosts(Settings nodeSettings, int nodes) { @@ -91,8 +88,65 @@ public class Ec2DiscoveryTests extends ESTestCase { } protected List buildDynamicHosts(Settings nodeSettings, int nodes, List> tagsList) { - try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY, nodes, tagsList)) { + final String accessKey = "ec2_key"; + try (Ec2DiscoveryPlugin plugin = new Ec2DiscoveryPlugin(buildSettings(accessKey))) { AwsEc2SeedHostsProvider provider = new AwsEc2SeedHostsProvider(nodeSettings, transportService, plugin.ec2Service); + httpServer.createContext("/", exchange -> { + if (exchange.getRequestMethod().equals(HttpMethodName.POST.name())) { + final String request = Streams.readFully(exchange.getRequestBody()).toBytesRef().utf8ToString(); + final String userAgent = exchange.getRequestHeaders().getFirst("User-Agent"); + if (userAgent != null && userAgent.startsWith("aws-sdk-java")) { + final String auth = exchange.getRequestHeaders().getFirst("Authorization"); + if (auth == null || auth.contains(accessKey) == false) { + throw new IllegalArgumentException("wrong access key: " + auth); + } + // Simulate an EC2 DescribeInstancesResponse + final Map> tagsIncluded = new HashMap<>(); + final String[] params = request.split("&"); + Arrays.stream(params).filter(entry -> entry.startsWith("Filter.") && entry.contains("=tag%3A")) + .forEach(entry -> { + final int startIndex = "Filter.".length(); + final int filterId = Integer.parseInt(entry.substring(startIndex, entry.indexOf(".", startIndex))); + tagsIncluded.put(entry.substring(entry.indexOf("=tag%3A") + "=tag%3A".length()), + Arrays.stream(params) + .filter(param -> param.startsWith("Filter." + filterId + ".Value.")) + .map(param -> param.substring(param.indexOf("=") + 1)) + .collect(Collectors.toList())); + } + ); + final List instances = IntStream.range(1, nodes + 1).mapToObj(node -> { + final String instanceId = "node" + node; + final Instance instance = new Instance() + .withInstanceId(instanceId) + .withState(new InstanceState().withName(InstanceStateName.Running)) + .withPrivateDnsName(PREFIX_PRIVATE_DNS + instanceId + SUFFIX_PRIVATE_DNS) + .withPublicDnsName(PREFIX_PUBLIC_DNS + instanceId + SUFFIX_PUBLIC_DNS) + .withPrivateIpAddress(PREFIX_PRIVATE_IP + node) + .withPublicIpAddress(PREFIX_PUBLIC_IP + node); + if (tagsList != null) { + instance.setTags(tagsList.get(node - 1)); + } + return instance; + }).filter(instance -> + tagsIncluded.entrySet().stream().allMatch(entry -> instance.getTags().stream() + .filter(t -> t.getKey().equals(entry.getKey())) + .map(Tag::getValue) + .collect(Collectors.toList()) + .containsAll(entry.getValue()))) + .collect(Collectors.toList()); + for (NameValuePair parse : URLEncodedUtils.parse(request, UTF_8)) { + if ("Action".equals(parse.getName())) { + final byte[] responseBody = generateDescribeInstancesResponse(instances); + exchange.getResponseHeaders().set("Content-Type", "text/xml; charset=UTF-8"); + exchange.sendResponseHeaders(HttpStatus.SC_OK, responseBody.length); + exchange.getResponseBody().write(responseBody); + return; + } + } + } + } + fail("did not send response"); + }); List dynamicHosts = provider.getSeedAddresses(null); logger.debug("--> addresses found: {}", dynamicHosts); return dynamicHosts; @@ -113,7 +167,7 @@ public class Ec2DiscoveryTests extends ESTestCase { public void testPrivateIp() throws InterruptedException { int nodes = randomInt(10); for (int i = 0; i < nodes; i++) { - poorMansDNS.put(AmazonEC2Mock.PREFIX_PRIVATE_IP + (i+1), buildNewFakeTransportAddress()); + poorMansDNS.put(PREFIX_PRIVATE_IP + (i+1), buildNewFakeTransportAddress()); } Settings nodeSettings = Settings.builder() .put(AwsEc2Service.HOST_TYPE_SETTING.getKey(), "private_ip") @@ -123,7 +177,7 @@ public class Ec2DiscoveryTests extends ESTestCase { // We check that we are using here expected address int node = 1; for (TransportAddress address : transportAddresses) { - TransportAddress expected = poorMansDNS.get(AmazonEC2Mock.PREFIX_PRIVATE_IP + node++); + TransportAddress expected = poorMansDNS.get(PREFIX_PRIVATE_IP + node++); assertEquals(address, expected); } } @@ -131,7 +185,7 @@ public class Ec2DiscoveryTests extends ESTestCase { public void testPublicIp() throws InterruptedException { int nodes = randomInt(10); for (int i = 0; i < nodes; i++) { - poorMansDNS.put(AmazonEC2Mock.PREFIX_PUBLIC_IP + (i+1), buildNewFakeTransportAddress()); + poorMansDNS.put(PREFIX_PUBLIC_IP + (i+1), buildNewFakeTransportAddress()); } Settings nodeSettings = Settings.builder() .put(AwsEc2Service.HOST_TYPE_SETTING.getKey(), "public_ip") @@ -141,7 +195,7 @@ public class Ec2DiscoveryTests extends ESTestCase { // We check that we are using here expected address int node = 1; for (TransportAddress address : dynamicHosts) { - TransportAddress expected = poorMansDNS.get(AmazonEC2Mock.PREFIX_PUBLIC_IP + node++); + TransportAddress expected = poorMansDNS.get(PREFIX_PUBLIC_IP + node++); assertEquals(address, expected); } } @@ -150,8 +204,8 @@ public class Ec2DiscoveryTests extends ESTestCase { int nodes = randomInt(10); for (int i = 0; i < nodes; i++) { String instanceId = "node" + (i+1); - poorMansDNS.put(AmazonEC2Mock.PREFIX_PRIVATE_DNS + instanceId + - AmazonEC2Mock.SUFFIX_PRIVATE_DNS, buildNewFakeTransportAddress()); + poorMansDNS.put(PREFIX_PRIVATE_DNS + instanceId + + SUFFIX_PRIVATE_DNS, buildNewFakeTransportAddress()); } Settings nodeSettings = Settings.builder() .put(AwsEc2Service.HOST_TYPE_SETTING.getKey(), "private_dns") @@ -163,7 +217,7 @@ public class Ec2DiscoveryTests extends ESTestCase { for (TransportAddress address : dynamicHosts) { String instanceId = "node" + node++; TransportAddress expected = poorMansDNS.get( - AmazonEC2Mock.PREFIX_PRIVATE_DNS + instanceId + AmazonEC2Mock.SUFFIX_PRIVATE_DNS); + PREFIX_PRIVATE_DNS + instanceId + SUFFIX_PRIVATE_DNS); assertEquals(address, expected); } } @@ -172,8 +226,8 @@ public class Ec2DiscoveryTests extends ESTestCase { int nodes = randomInt(10); for (int i = 0; i < nodes; i++) { String instanceId = "node" + (i+1); - poorMansDNS.put(AmazonEC2Mock.PREFIX_PUBLIC_DNS + instanceId - + AmazonEC2Mock.SUFFIX_PUBLIC_DNS, buildNewFakeTransportAddress()); + poorMansDNS.put(PREFIX_PUBLIC_DNS + instanceId + + SUFFIX_PUBLIC_DNS, buildNewFakeTransportAddress()); } Settings nodeSettings = Settings.builder() .put(AwsEc2Service.HOST_TYPE_SETTING.getKey(), "public_dns") @@ -185,7 +239,7 @@ public class Ec2DiscoveryTests extends ESTestCase { for (TransportAddress address : dynamicHosts) { String instanceId = "node" + node++; TransportAddress expected = poorMansDNS.get( - AmazonEC2Mock.PREFIX_PUBLIC_DNS + instanceId + AmazonEC2Mock.SUFFIX_PUBLIC_DNS); + PREFIX_PUBLIC_DNS + instanceId + SUFFIX_PUBLIC_DNS); assertEquals(address, expected); } } @@ -289,8 +343,7 @@ public class Ec2DiscoveryTests extends ESTestCase { } } - - abstract class DummyEc2SeedHostsProvider extends AwsEc2SeedHostsProvider { + abstract static class DummyEc2SeedHostsProvider extends AwsEc2SeedHostsProvider { public int fetchCount = 0; DummyEc2SeedHostsProvider(Settings settings, TransportService transportService, AwsEc2Service service) { super(settings, transportService, service); @@ -298,7 +351,7 @@ public class Ec2DiscoveryTests extends ESTestCase { } public void testGetNodeListEmptyCache() { - AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(1, null); + AwsEc2Service awsEc2Service = new AwsEc2ServiceImpl(); DummyEc2SeedHostsProvider provider = new DummyEc2SeedHostsProvider(Settings.EMPTY, transportService, awsEc2Service) { @Override protected List fetchDynamicNodes() { @@ -311,27 +364,4 @@ public class Ec2DiscoveryTests extends ESTestCase { } assertThat(provider.fetchCount, is(1)); } - - public void testGetNodeListCached() throws Exception { - Settings.Builder builder = Settings.builder() - .put(AwsEc2Service.NODE_CACHE_TIME_SETTING.getKey(), "500ms"); - try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY)) { - DummyEc2SeedHostsProvider provider = new DummyEc2SeedHostsProvider(builder.build(), transportService, plugin.ec2Service) { - @Override - protected List fetchDynamicNodes() { - fetchCount++; - return Ec2DiscoveryTests.this.buildDynamicHosts(Settings.EMPTY, 1); - } - }; - for (int i=0; i<3; i++) { - provider.getSeedAddresses(null); - } - assertThat(provider.fetchCount, is(1)); - Thread.sleep(1_000L); // wait for cache to expire - for (int i=0; i<3; i++) { - provider.getSeedAddresses(null); - } - assertThat(provider.fetchCount, is(2)); - } - } } diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Docker.java b/qa/os/src/test/java/org/elasticsearch/packaging/util/Docker.java index d43c2219a03..08f230b2c38 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/util/Docker.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/util/Docker.java @@ -31,6 +31,7 @@ import java.nio.file.Paths; import java.nio.file.attribute.PosixFileAttributes; import java.nio.file.attribute.PosixFilePermission; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -76,7 +77,12 @@ public class Docker { * @param distribution details about the docker image to potentially load. */ public static void ensureImageIsLoaded(Distribution distribution) { - final long count = sh.run("docker image ls --format '{{.Repository}}' " + distribution.flavor.name).stdout.split("\n").length; + Shell.Result result = sh.run("docker image ls --format '{{.Repository}}' " + distribution.flavor.name); + + final long count = Arrays.stream(result.stdout.split("\n")) + .map(String::trim) + .filter(s -> s.isEmpty() == false) + .count(); if (count != 0) { return; diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java index 6c96175488e..c2055e060a5 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java @@ -84,7 +84,7 @@ public class TestDeprecationHeaderRestAction extends BaseRestHandler { @Override public List deprecatedRoutes() { return singletonList( - new DeprecatedRoute(GET, "/_test_cluster/deprecated_settings", DEPRECATED_ENDPOINT, deprecationLogger)); + new DeprecatedRoute(GET, "/_test_cluster/deprecated_settings", DEPRECATED_ENDPOINT)); } @Override diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml index 2ac37f6f49c..3d30f126cf6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml @@ -85,8 +85,8 @@ --- "Close index response with result per index": - skip: - version: " - 7.99.99" - reason: "close index response reports result per index starting version 8.0.0" + version: " - 7.2.99" + reason: "close index response reports result per index starting version 7.3.0" - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/10_basic.yml index 88187aa201a..2221d08c0b7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/10_basic.yml @@ -13,8 +13,8 @@ setup: --- "Validate query api": - skip: - version: ' - 7.99.99' - reason: message changed in 8.0.0 + version: ' - 7.6.99' + reason: message changed in 7.7.0 - do: indices.validate_query: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml index 9a7a5f8ed3d..694335b6677 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml @@ -6,11 +6,11 @@ setup: settings: number_of_replicas: 0 mappings: - "properties": - "number": - "type" : "integer" - "date": - "type" : "date" + properties: + number: + type: integer + date: + type: date - do: cluster.health: wait_for_status: green @@ -214,7 +214,10 @@ setup: mappings: properties: date: - type : date + type: date + fields: + nanos: + type: date_nanos - do: bulk: @@ -239,7 +242,24 @@ setup: date_histogram: field: date calendar_interval: month + - match: { hits.total.value: 4 } + - length: { aggregations.histo.buckets: 3 } + - match: { aggregations.histo.buckets.0.key_as_string: "2016-01-01T00:00:00.000Z" } + - match: { aggregations.histo.buckets.0.doc_count: 2 } + - match: { aggregations.histo.buckets.1.key_as_string: "2016-02-01T00:00:00.000Z" } + - match: { aggregations.histo.buckets.1.doc_count: 1 } + - match: { aggregations.histo.buckets.2.key_as_string: "2016-03-01T00:00:00.000Z" } + - match: { aggregations.histo.buckets.2.doc_count: 1 } + - do: + search: + body: + size: 0 + aggs: + histo: + date_histogram: + field: date.nanos + calendar_interval: month - match: { hits.total.value: 4 } - length: { aggregations.histo.buckets: 3 } - match: { aggregations.histo.buckets.0.key_as_string: "2016-01-01T00:00:00.000Z" } @@ -410,3 +430,63 @@ setup: - match: { aggregations.histo.buckets.1.doc_count: 2 } - match: { aggregations.histo.buckets.2.key_as_string: "2016-02-02T00:00:00.000Z" } - match: { aggregations.histo.buckets.2.doc_count: 1 } + +--- +"date_histogram with pre-epoch daylight savings time transition": + - skip: + version: " - 7.6.1" + reason: bug fixed in 7.6.1. + # Add date_nanos to the mapping. We couldn't do it during setup because that + # is run against 6.8 which doesn't have date_nanos + - do: + indices.put_mapping: + index: test_1 + body: + properties: + number: + type: integer + date: + type: date + fields: + nanos: + type: date_nanos + + - do: + bulk: + index: test_1 + refresh: true + body: + - '{"index": {}}' + - '{"date": "2016-01-01"}' + + - do: + search: + body: + size: 0 + aggs: + histo: + date_histogram: + field: date + fixed_interval: 1ms + time_zone: America/Phoenix + + - match: { hits.total.value: 1 } + - length: { aggregations.histo.buckets: 1 } + - match: { aggregations.histo.buckets.0.key_as_string: "2015-12-31T17:00:00.000-07:00" } + - match: { aggregations.histo.buckets.0.doc_count: 1 } + + - do: + search: + body: + size: 0 + aggs: + histo: + date_histogram: + field: date.nanos + fixed_interval: 1ms + time_zone: America/Phoenix + + - match: { hits.total.value: 1 } + - length: { aggregations.histo.buckets: 1 } + - match: { aggregations.histo.buckets.0.key_as_string: "2015-12-31T17:00:00.000-07:00" } + - match: { aggregations.histo.buckets.0.doc_count: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml index 6787629789a..d2378084f3b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml @@ -775,7 +775,7 @@ setup: --- "Mixed ip and unmapped fields": - skip: - version: " - 7.99.99" + version: " - 7.5.99" reason: This will fail against 7.x until the fix is backported there # It is important that the index *without* the ip field be sorted *before* # the index *with* the ip field because that has caused bugs in the past. @@ -821,8 +821,8 @@ setup: --- "date_histogram with time_zone": - skip: - version: " - 7.99.99" - reason: This will fail against 7.whatever until we backport the fix + version: " - 7.6.0" + reason: Fixed in 7.6.0 - do: index: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_rare_terms.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_rare_terms.yml index 04d76a98780..6d1e2d606a4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_rare_terms.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_rare_terms.yml @@ -316,8 +316,8 @@ setup: --- "sub aggs": - skip: - version: " - 7.99.99" - reason: Sub aggs fixed in 8.0 (to be backported to 7.6.1) + version: " - 7.6.1" + reason: Sub aggs fixed in 7.6.1 - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/300_pipeline.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/300_pipeline.yml index 0016c9f9894..2e3d796383a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/300_pipeline.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/300_pipeline.yml @@ -1,7 +1,7 @@ setup: - skip: - version: " - 7.99.99" #TODO change this after backport - reason: These new error messages were added in 7.1 + version: " - 7.1.99" + reason: These new error messages were added in 7.2 - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yml index 5204d65d333..dabd4aebf81 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yml @@ -141,7 +141,7 @@ --- 'Misspelled fields get "did you mean"': - skip: - version: " - 7.99.99" + version: " - 7.6.99" reason: Implemented in 8.0 (to be backported to 7.7) - do: catch: /\[significant_terms\] unknown field \[jlp\] did you mean \[jlh\]\?/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries.yml new file mode 100644 index 00000000000..e0b6827aa79 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries.yml @@ -0,0 +1,330 @@ +--- +setup: + - skip: + version: " - 7.6.99" + reason: "implemented in 7.7.0" + + - do: + indices.create: + index: test + body: + mappings: + properties: + text: + type: text + analyzer: standard + fields: + raw: + type: keyword + nested1: + type: nested + + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "test", "_id": "1"}}' + - '{"text" : "Some like it hot, some like it cold", "nested1": [{"foo": "bar1"}]}' + - '{"index": {"_index": "test", "_id": "2"}}' + - '{"text" : "Its cold outside, theres no kind of atmosphere", "nested1": [{"foo": "bar2"}]}' + - '{"index": {"_index": "test", "_id": "3"}}' + - '{"text" : "Baby its cold there outside", "nested1": [{"foo": "bar3"}]}' + - '{"index": {"_index": "test", "_id": "4"}}' + - '{"text" : "Outside it is cold and wet", "nested1": [{"foo": "bar4"}]}' + +--- +teardown: + - skip: + version: " - 7.6.99" + reason: "implemented in 7.7.0" + + - do: + cluster.put_settings: + body: + transient: + search.allow_expensive_queries: null + +--- +"Test disallow expensive queries": + - skip: + version: " - 7.6.99" + reason: "implemented in 7.7.0" + + ### Check for initial setting = null -> false + - do: + cluster.get_settings: + flat_settings: true + + - match: {search.allow_expensive_queries: null} + + ### Prefix + - do: + search: + index: test + body: + query: + prefix: + text: + value: out + + - match: { hits.total.value: 3 } + + ### Fuzzy + - do: + search: + index: test + body: + query: + fuzzy: + text: + value: outwide + + - match: { hits.total.value: 3 } + + + ### Regexp + - do: + search: + index: test + body: + query: + regexp: + text: + value: .*ou.*id.* + + - match: { hits.total.value: 3 } + + ### Wildcard + - do: + search: + index: test + body: + query: + wildcard: + text: + value: out?ide + + - match: { hits.total.value: 3 } + + ### Range on text + - do: + search: + index: test + body: + query: + range: + text: + gte: "theres" + + - match: { hits.total.value: 2 } + + ### Range on keyword + - do: + search: + index: test + body: + query: + range: + text.raw: + gte : "Outside it is cold and wet" + + - match: { hits.total.value: 2 } + + ### Nested + - do: + search: + index: test + body: + query: + nested: + path: "nested1" + query: + bool: + must: [{"match": {"nested1.foo": "bar2"}}] + + - match: { hits.total.value: 1 } + + ### Update setting to false + - do: + cluster.put_settings: + body: + transient: + search.allow_expensive_queries: "false" + flat_settings: true + + - match: {transient: {search.allow_expensive_queries: "false"}} + + ### Prefix + - do: + catch: /\[prefix\] queries cannot be executed when \'search.allow_expensive_queries\' is set to false. For optimised prefix queries on text fields please enable \[index_prefixes\]./ + search: + index: test + body: + query: + prefix: + text: + value: out + + ### Fuzzy + - do: + catch: /\[fuzzy\] queries cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + index: test + body: + query: + fuzzy: + text: + value: outwide + + ### Regexp + - do: + catch: /\[regexp\] queries cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + index: test + body: + query: + regexp: + text: + value: .*ou.*id.* + + ### Wildcard + - do: + catch: /\[wildcard\] queries cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + index: test + body: + query: + wildcard: + text: + value: out?ide + + ### Range on text + - do: + catch: /\[range\] queries on \[text\] or \[keyword\] fields cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + index: test + body: + query: + range: + text: + gte: "theres" + + ### Range on keyword + - do: + catch: /\[range\] queries on \[text\] or \[keyword\] fields cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + index: test + body: + query: + range: + text.raw: + gte : "Outside it is cold and wet" + + ### Nested + - do: + catch: /\[joining\] queries cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + index: test + body: + query: + nested: + path: "nested1" + query: + bool: + must: [{"match" : {"nested1.foo" : "bar2"}}] + + ### Revert setting to true + - do: + cluster.put_settings: + body: + transient: + search.allow_expensive_queries: "true" + flat_settings: true + + - match: {transient: {search.allow_expensive_queries: "true"}} + + ### Prefix + - do: + search: + index: test + body: + query: + prefix: + text: + value: out + + - match: { hits.total.value: 3 } + + ### Fuzzy + - do: + search: + index: test + body: + query: + fuzzy: + text: + value: outwide + + - match: { hits.total.value: 3 } + + ### Regexp + - do: + search: + index: test + body: + query: + regexp: + text: + value: .*ou.*id.* + + - match: { hits.total.value: 3 } + + ### Wildcard + - do: + search: + index: test + body: + query: + wildcard: + text: + value: out?ide + + - match: { hits.total.value: 3 } + + ### Range on text + - do: + search: + index: test + body: + query: + range: + text: + gte: "theres" + + - match: { hits.total.value: 2 } + + ### Range on keyword + - do: + search: + index: test + body: + query: + range: + text.raw: + gte: "Outside it is cold and wet" + + - match: { hits.total.value: 2 } + + ### Nested + - do: + search: + index: test + body: + query: + nested: + path: "nested1" + query: + bool: + must: [{"match": {"nested1.foo": "bar2"}}] + + - match: { hits.total.value: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml index 6168c211383..fe70620c6ef 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml @@ -41,8 +41,8 @@ setup: --- "Create a snapshot and clean up repository": - skip: - version: " - 7.99.99" - reason: cleanup introduced in 8.0 + version: " - 7.3.99" + reason: cleanup introduced in 7.4 - do: snapshot.cleanup_repository: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml index 00656be2b59..874dda3606c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml @@ -157,8 +157,8 @@ setup: --- "Get snapshot info with metadata": - skip: - version: " - 7.9.99" - reason: "https://github.com/elastic/elasticsearch/pull/41281 not yet backported to 7.x" + version: " - 7.2.99" + reason: "Introduced with 7.3" - do: indices.create: diff --git a/server/src/main/java/org/elasticsearch/common/logging/LoggingOutputStream.java b/server/src/main/java/org/elasticsearch/common/logging/LoggingOutputStream.java index 54af0a1ccfd..e9fa9f73714 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/LoggingOutputStream.java +++ b/server/src/main/java/org/elasticsearch/common/logging/LoggingOutputStream.java @@ -98,6 +98,7 @@ class LoggingOutputStream extends OutputStream { } if (used == 0) { // only windows \r was in the buffer + buffer.used = 0; return; } log(new String(buffer.bytes, 0, used, StandardCharsets.UTF_8)); diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 04556c34179..e792473a93c 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -450,6 +450,7 @@ public final class ClusterSettings extends AbstractScopedSettings { SearchService.DEFAULT_KEEPALIVE_SETTING, SearchService.KEEPALIVE_INTERVAL_SETTING, SearchService.MAX_KEEPALIVE_SETTING, + SearchService.ALLOW_EXPENSIVE_QUERIES, MultiBucketConsumerService.MAX_BUCKET_SETTING, SearchService.LOW_LEVEL_CANCELLATION_SETTING, SearchService.MAX_OPEN_SCROLL_CONTEXT, diff --git a/server/src/main/java/org/elasticsearch/common/time/DateUtils.java b/server/src/main/java/org/elasticsearch/common/time/DateUtils.java index e6bf6a65105..7b3a06bc665 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateUtils.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateUtils.java @@ -208,7 +208,7 @@ public class DateUtils { return ZoneId.of(zoneId).normalized(); } - private static final Instant MAX_NANOSECOND_INSTANT = Instant.parse("2262-04-11T23:47:16.854775807Z"); + static final Instant MAX_NANOSECOND_INSTANT = Instant.parse("2262-04-11T23:47:16.854775807Z"); static final long MAX_NANOSECOND_IN_MILLIS = MAX_NANOSECOND_INSTANT.toEpochMilli(); @@ -231,6 +231,26 @@ public class DateUtils { return instant.getEpochSecond() * 1_000_000_000 + instant.getNano(); } + /** + * Returns an instant that is with valid nanosecond resolution. If + * the parameter is before the valid nanosecond range then this returns + * the minimum {@linkplain Instant} valid for nanosecond resultion. If + * the parameter is after the valid nanosecond range then this returns + * the maximum {@linkplain Instant} valid for nanosecond resolution. + *

+ * Useful for checking if all values for the field are within some range, + * even if the range's endpoints are not valid nanosecond resolution. + */ + public static Instant clampToNanosRange(Instant instant) { + if (instant.isBefore(Instant.EPOCH)) { + return Instant.EPOCH; + } + if (instant.isAfter(MAX_NANOSECOND_INSTANT)) { + return MAX_NANOSECOND_INSTANT; + } + return instant; + } + /** * convert a long value to a java time instant * the long value resembles the nanoseconds since the epoch diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index c70e4c8f441..3660d2097c1 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -130,6 +130,7 @@ public final class IndexModule { private final List searchOperationListeners = new ArrayList<>(); private final List indexOperationListeners = new ArrayList<>(); private final AtomicBoolean frozen = new AtomicBoolean(false); + private final BooleanSupplier allowExpensiveQueries; /** * Construct the index module for the index with the specified index settings. The index module contains extension points for plugins @@ -144,13 +145,15 @@ public final class IndexModule { final IndexSettings indexSettings, final AnalysisRegistry analysisRegistry, final EngineFactory engineFactory, - final Map directoryFactories) { + final Map directoryFactories, + final BooleanSupplier allowExpensiveQueries) { this.indexSettings = indexSettings; this.analysisRegistry = analysisRegistry; this.engineFactory = Objects.requireNonNull(engineFactory); this.searchOperationListeners.add(new SearchSlowLog(indexSettings)); this.indexOperationListeners.add(new IndexingSlowLog(indexSettings)); this.directoryFactories = Collections.unmodifiableMap(directoryFactories); + this.allowExpensiveQueries = allowExpensiveQueries; } /** @@ -424,7 +427,7 @@ public final class IndexModule { new SimilarityService(indexSettings, scriptService, similarities), shardStoreDeleter, indexAnalyzers, engineFactory, circuitBreakerService, bigArrays, threadPool, scriptService, clusterService, client, queryCache, directoryFactory, eventListener, readerWrapperFactory, mapperRegistry, indicesFieldDataCache, searchOperationListeners, - indexOperationListeners, namedWriteableRegistry, idFieldDataEnabled); + indexOperationListeners, namedWriteableRegistry, idFieldDataEnabled, allowExpensiveQueries); success = true; return indexService; } finally { diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index e88f95d826a..9dbccdcda61 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -59,8 +59,8 @@ import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.query.SearchIndexNameMatcher; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.SearchIndexNameMatcher; import org.elasticsearch.index.seqno.RetentionLeaseSyncer; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; @@ -127,6 +127,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust private final IndexSettings indexSettings; private final List searchOperationListeners; private final List indexingOperationListeners; + private final BooleanSupplier allowExpensiveQueries; private volatile AsyncRefreshTask refreshTask; private volatile AsyncTranslogFSync fsyncTask; private volatile AsyncGlobalCheckpointTask globalCheckpointTask; @@ -167,8 +168,10 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust List searchOperationListeners, List indexingOperationListeners, NamedWriteableRegistry namedWriteableRegistry, - BooleanSupplier idFieldDataEnabled) { + BooleanSupplier idFieldDataEnabled, + BooleanSupplier allowExpensiveQueries) { super(indexSettings); + this.allowExpensiveQueries = allowExpensiveQueries; this.indexSettings = indexSettings; this.xContentRegistry = xContentRegistry; this.similarityService = similarityService; @@ -570,7 +573,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust return new QueryShardContext( shardId, indexSettings, bigArrays, indexCache.bitsetFilterCache(), indexFieldData::getForField, mapperService(), similarityService(), scriptService, xContentRegistry, namedWriteableRegistry, client, searcher, nowInMillis, clusterAlias, - indexNameMatcher); + indexNameMatcher, allowExpensiveQueries); } /** diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java index 98a79f066b2..be524946b0e 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java @@ -35,14 +35,18 @@ import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; import org.elasticsearch.search.sort.NestedSortBuilder; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; @@ -72,6 +76,12 @@ public interface IndexFieldData extends IndexCompone */ SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse); + /** + * Build a sort implementation specialized for aggregations. + */ + BucketedSort newBucketedSort(BigArrays bigArrays, @Nullable Object missingValue, MultiValueMode sortMode, + Nested nested, SortOrder sortOrder, DocValueFormat format); + /** * Clears any resources associated with this field data. */ @@ -227,6 +237,11 @@ public interface IndexFieldData extends IndexCompone public Object missingValue(boolean reversed) { return null; } + + /** + * Create a {@linkplain BucketedSort} which is useful for sorting inside of aggregations. + */ + public abstract BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format); } interface Builder { @@ -242,5 +257,4 @@ public interface IndexFieldData extends IndexCompone IndexFieldData localGlobalDirect(DirectoryReader indexReader) throws Exception; } - } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java index 859848df49e..b170c1aeae4 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java @@ -29,11 +29,15 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.fielddata.AbstractSortedDocValues; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; @@ -135,6 +139,11 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat }; } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) { + throw new IllegalArgumentException("only supported on numeric fields"); + } + /** * A view of a SortedDocValues where missing values * are replaced with the specified term diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java index c414944801f..8aff4a057f0 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java @@ -27,12 +27,16 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BitSet; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; @@ -58,6 +62,18 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato return indexFieldData.load(context).getDoubleValues(); } + private NumericDoubleValues getNumericDocValues(LeafReaderContext context, double missingValue) throws IOException { + final SortedNumericDoubleValues values = getValues(context); + if (nested == null) { + return FieldData.replaceMissing(sortMode.select(values), missingValue); + } else { + final BitSet rootDocs = nested.rootDocs(context); + final DocIdSetIterator innerDocs = nested.innerDocs(context); + final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; + return sortMode.select(values, missingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren); + } + } + protected void setScorer(Scorable scorer) {} @Override @@ -70,17 +86,7 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato return new FieldComparator.DoubleComparator(numHits, null, null) { @Override protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException { - final SortedNumericDoubleValues values = getValues(context); - final NumericDoubleValues selectedValues; - if (nested == null) { - selectedValues = FieldData.replaceMissing(sortMode.select(values), dMissingValue); - } else { - final BitSet rootDocs = nested.rootDocs(context); - final DocIdSetIterator innerDocs = nested.innerDocs(context); - final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; - selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren); - } - return selectedValues.getRawDoubleValues(); + return DoubleValuesComparatorSource.this.getNumericDocValues(context, dMissingValue).getRawDoubleValues(); } @Override public void setScorer(Scorable scorer) { @@ -88,4 +94,28 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato } }; } + + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) { + return new BucketedSort.ForDoubles(bigArrays, sortOrder, format) { + private final double dMissingValue = (Double) missingObject(missingValue, sortOrder == SortOrder.DESC); + + @Override + public Leaf forLeaf(LeafReaderContext ctx) throws IOException { + return new Leaf() { + private final NumericDoubleValues values = getNumericDocValues(ctx, dMissingValue); + + @Override + protected boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } + + @Override + protected double docValue() throws IOException { + return values.doubleValue(); + } + }; + } + }; + } } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java index 4621c7fd287..f0d9b303c18 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java @@ -22,15 +22,20 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BitSet; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; @@ -52,27 +57,59 @@ public class FloatValuesComparatorSource extends IndexFieldData.XFieldComparator return SortField.Type.FLOAT; } + private NumericDoubleValues getNumericDocValues(LeafReaderContext context, float missingValue) throws IOException { + final SortedNumericDoubleValues values = indexFieldData.load(context).getDoubleValues(); + if (nested == null) { + return FieldData.replaceMissing(sortMode.select(values), missingValue); + } else { + final BitSet rootDocs = nested.rootDocs(context); + final DocIdSetIterator innerDocs = nested.innerDocs(context); + final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; + return sortMode.select(values, missingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren); + } + } + @Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); - final float dMissingValue = (Float) missingObject(missingValue, reversed); + final float fMissingValue = (Float) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that // the comparator doesn't check docsWithField since we replace missing values in select() return new FieldComparator.FloatComparator(numHits, null, null) { @Override protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException { - final SortedNumericDoubleValues values = indexFieldData.load(context).getDoubleValues(); - final NumericDoubleValues selectedValues; - if (nested == null) { - selectedValues = FieldData.replaceMissing(sortMode.select(values), dMissingValue); - } else { - final BitSet rootDocs = nested.rootDocs(context); - final DocIdSetIterator innerDocs = nested.innerDocs(context); - final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; - selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren); - } - return selectedValues.getRawFloatValues(); + return FloatValuesComparatorSource.this.getNumericDocValues(context, fMissingValue).getRawFloatValues(); + } + }; + } + + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) { + return new BucketedSort.ForFloats(bigArrays, sortOrder, format) { + private final float dMissingValue = (Float) missingObject(missingValue, sortOrder == SortOrder.DESC); + + @Override + public boolean needsScores() { return false; } + + @Override + public Leaf forLeaf(LeafReaderContext ctx) throws IOException { + return new Leaf() { + private final NumericDoubleValues values = getNumericDocValues(ctx, dMissingValue); + + @Override + public void setScorer(Scorable scorer) {} + + @Override + protected boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } + + @Override + protected float docValue() throws IOException { + return (float) values.doubleValue(); + } + }; } }; } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java index 157c11e74cb..a990b3b861b 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java @@ -26,12 +26,16 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BitSet; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.plain.SortedNumericDVIndexFieldData; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; import java.util.function.Function; @@ -72,30 +76,54 @@ public class LongValuesComparatorSource extends IndexFieldData.XFieldComparatorS } return converter != null ? converter.apply(values) : values; } + + private NumericDocValues getNumericDocValues(LeafReaderContext context, long missingValue) throws IOException { + final SortedNumericDocValues values = loadDocValues(context); + if (nested == null) { + return FieldData.replaceMissing(sortMode.select(values), missingValue); + } + final BitSet rootDocs = nested.rootDocs(context); + final DocIdSetIterator innerDocs = nested.innerDocs(context); + final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; + return sortMode.select(values, missingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren); + } @Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); - final Long dMissingValue = (Long) missingObject(missingValue, reversed); + final long lMissingValue = (Long) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that // the comparator doesn't check docsWithField since we replace missing values in select() return new FieldComparator.LongComparator(numHits, null, null) { @Override protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException { - final SortedNumericDocValues values = loadDocValues(context); - final NumericDocValues selectedValues; - if (nested == null) { - selectedValues = FieldData.replaceMissing(sortMode.select(values), dMissingValue); - } else { - final BitSet rootDocs = nested.rootDocs(context); - final DocIdSetIterator innerDocs = nested.innerDocs(context); - final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; - selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren); - } - return selectedValues; + return LongValuesComparatorSource.this.getNumericDocValues(context, lMissingValue); } + }; + } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) { + return new BucketedSort.ForLongs(bigArrays, sortOrder, format) { + private final long lMissingValue = (Long) missingObject(missingValue, sortOrder == SortOrder.DESC); + + @Override + public Leaf forLeaf(LeafReaderContext ctx) throws IOException { + return new Leaf() { + private final NumericDocValues values = getNumericDocValues(ctx, lMissingValue); + + @Override + protected boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } + + @Override + protected long docValue() throws IOException { + return values.longValue(); + } + }; + } }; } } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java index 04fa72cfdd8..df9be3eeb7a 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java @@ -26,6 +26,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.SortField; import org.apache.lucene.util.Accountable; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; @@ -33,7 +34,10 @@ import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.N import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; import java.io.UncheckedIOException; @@ -102,6 +106,12 @@ public final class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent i throw new UnsupportedOperationException("no global ordinals sorting yet"); } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + throw new IllegalArgumentException("only supported on numeric fields"); + } + @Override public void clear() {} @@ -186,6 +196,12 @@ public final class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent i throw new UnsupportedOperationException("no global ordinals sorting yet"); } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + throw new IllegalArgumentException("only supported on numeric fields"); + } + @Override public void clear() {} diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractLatLonPointDVIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractLatLonPointDVIndexFieldData.java index ed77d3d5f8b..6bf6aa741d5 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractLatLonPointDVIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractLatLonPointDVIndexFieldData.java @@ -25,16 +25,21 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.SortField; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; public abstract class AbstractLatLonPointDVIndexFieldData extends DocValuesIndexFieldData implements IndexGeoPointFieldData { @@ -48,6 +53,12 @@ public abstract class AbstractLatLonPointDVIndexFieldData extends DocValuesIndex throw new IllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance"); } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + throw new IllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance"); + } + public static class LatLonPointDVIndexFieldData extends AbstractLatLonPointDVIndexFieldData { public LatLonPointDVIndexFieldData(Index index, String fieldName) { super(index, fieldName); diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java index 13d90ea36e7..5584dae053c 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java @@ -24,10 +24,15 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.SortedSetSelector; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; public class BinaryDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData { @@ -64,4 +69,10 @@ public class BinaryDVIndexFieldData extends DocValuesIndexFieldData implements I SortedSetSortField.STRING_LAST : SortedSetSortField.STRING_FIRST); return sortField; } + + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + throw new IllegalArgumentException("only supported on numeric fields"); + } } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java index 398093c034b..e58ff11cb17 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.SortField; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -31,7 +32,10 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; @@ -46,6 +50,12 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme throw new IllegalArgumentException("can't sort on binary field"); } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + throw new IllegalArgumentException("can't sort on binary field"); + } + @Override public BytesBinaryDVAtomicFieldData load(LeafReaderContext context) { try { diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/ConstantIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/ConstantIndexFieldData.java index 29e74bf818a..9fb7e20e283 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/ConstantIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/ConstantIndexFieldData.java @@ -28,18 +28,23 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.AbstractSortedDocValues; import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; import java.util.Collection; @@ -158,6 +163,12 @@ public class ConstantIndexFieldData extends AbstractIndexOrdinalsFieldData { return new SortField(getFieldName(), source, reverse); } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + throw new IllegalArgumentException("only supported on numeric fields"); + } + @Override public IndexOrdinalsFieldData loadGlobal(DirectoryReader indexReader) { return this; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java index 66a44a95afc..3648595e690 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java @@ -33,19 +33,24 @@ import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedLongValues; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.RamAccountingTermsEnum; +import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; @@ -84,6 +89,12 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData { return new SortField(getFieldName(), source, reverse); } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + throw new IllegalArgumentException("only supported on numeric fields"); + } + @Override public AtomicOrdinalsFieldData loadDirect(LeafReaderContext context) throws Exception { LeafReader reader = context.reader(); diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java index c4a9c01f3c4..847fe3bb764 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java @@ -31,7 +31,9 @@ import org.apache.lucene.search.SortedNumericSelector; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.time.DateUtils; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.AbstractSortedNumericDocValues; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; @@ -43,7 +45,10 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; import java.util.Collection; @@ -72,42 +77,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple */ public SortField sortField(NumericType targetNumericType, Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) { - final XFieldComparatorSource source; - switch (targetNumericType) { - case HALF_FLOAT: - case FLOAT: - source = new FloatValuesComparatorSource(this, missingValue, sortMode, nested); - break; - - case DOUBLE: - source = new DoubleValuesComparatorSource(this, missingValue, sortMode, nested); - break; - - case DATE: - if (numericType == NumericType.DATE_NANOSECONDS) { - // converts date values to nanosecond resolution - source = new LongValuesComparatorSource(this, missingValue, - sortMode, nested, dvs -> convertNanosToMillis(dvs)); - } else { - source = new LongValuesComparatorSource(this, missingValue, sortMode, nested); - } - break; - - case DATE_NANOSECONDS: - if (numericType == NumericType.DATE) { - // converts date_nanos values to millisecond resolution - source = new LongValuesComparatorSource(this, missingValue, - sortMode, nested, dvs -> convertMillisToNanos(dvs)); - } else { - source = new LongValuesComparatorSource(this, missingValue, sortMode, nested); - } - break; - - default: - assert !targetNumericType.isFloatingPoint(); - source = new LongValuesComparatorSource(this, missingValue, sortMode, nested); - break; - } + final XFieldComparatorSource source = comparatorSource(targetNumericType, missingValue, sortMode, nested); /** * Check if we can use a simple {@link SortedNumericSortField} compatible with index sorting and @@ -146,6 +116,49 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple return sortField(numericType, missingValue, sortMode, nested, reverse); } + /** + * Builds a {@linkplain BucketedSort} for the {@code targetNumericType}, + * casting the values if their native type doesn't match. + */ + public BucketedSort newBucketedSort(NumericType targetNumericType, BigArrays bigArrays, @Nullable Object missingValue, + MultiValueMode sortMode, Nested nested, SortOrder sortOrder, DocValueFormat format) { + return comparatorSource(targetNumericType, missingValue, sortMode, nested).newBucketedSort(bigArrays, sortOrder, format); + } + + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, @Nullable Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + return newBucketedSort(numericType, bigArrays, missingValue, sortMode, nested, sortOrder, format); + } + + private XFieldComparatorSource comparatorSource(NumericType targetNumericType, @Nullable Object missingValue, MultiValueMode sortMode, + Nested nested) { + switch (targetNumericType) { + case HALF_FLOAT: + case FLOAT: + return new FloatValuesComparatorSource(this, missingValue, sortMode, nested); + case DOUBLE: + return new DoubleValuesComparatorSource(this, missingValue, sortMode, nested); + case DATE: + if (numericType == NumericType.DATE_NANOSECONDS) { + // converts date values to nanosecond resolution + return new LongValuesComparatorSource(this, missingValue, + sortMode, nested, dvs -> convertNanosToMillis(dvs)); + } + return new LongValuesComparatorSource(this, missingValue, sortMode, nested); + case DATE_NANOSECONDS: + if (numericType == NumericType.DATE) { + // converts date_nanos values to millisecond resolution + return new LongValuesComparatorSource(this, missingValue, + sortMode, nested, dvs -> convertMillisToNanos(dvs)); + } + return new LongValuesComparatorSource(this, missingValue, sortMode, nested); + default: + assert !targetNumericType.isFloatingPoint(); + return new LongValuesComparatorSource(this, missingValue, sortMode, nested); + } + } + @Override public NumericType getNumericType() { return numericType; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java index 4aaddc7b8dc..203e802024a 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java @@ -30,6 +30,7 @@ import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; @@ -40,7 +41,10 @@ import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparator import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsIndexFieldData; import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; import java.util.function.Function; @@ -81,6 +85,12 @@ public class SortedSetDVOrdinalsIndexFieldData extends DocValuesIndexFieldData i return sortField; } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + throw new IllegalArgumentException("only supported on numeric fields"); + } + @Override public AtomicOrdinalsFieldData load(LeafReaderContext context) { return new SortedSetDVBytesAtomicFieldData(context.reader(), fieldName, scriptFunction); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index b6881390daf..2feb5213c45 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -88,6 +88,11 @@ public final class DateFieldMapper extends FieldMapper { public Instant toInstant(long value) { return Instant.ofEpochMilli(value); } + + @Override + public Instant clampToValidRange(Instant instant) { + return instant; + } }, NANOSECONDS("date_nanos", NumericType.DATE_NANOSECONDS) { @Override @@ -99,6 +104,11 @@ public final class DateFieldMapper extends FieldMapper { public Instant toInstant(long value) { return DateUtils.toInstant(value); } + + @Override + public Instant clampToValidRange(Instant instant) { + return DateUtils.clampToNanosRange(instant); + } }; private final String type; @@ -117,10 +127,18 @@ public final class DateFieldMapper extends FieldMapper { return numericType; } + /** + * Convert an {@linkplain Instant} into a long value in this resolution. + */ public abstract long convert(Instant instant); + /** + * Convert a long value in this resolution into an instant. + */ public abstract Instant toInstant(long value); + public abstract Instant clampToValidRange(Instant instant); + public static Resolution ofOrdinal(int ord) { for (Resolution resolution : values()) { if (ord == resolution.ordinal()) { @@ -440,9 +458,30 @@ public final class DateFieldMapper extends FieldMapper { } } - // This check needs to be done after fromInclusive and toInclusive - // are resolved so we can throw an exception if they are invalid - // even if there are no points in the shard + return isFieldWithinRange(reader, fromInclusive, toInclusive); + } + + /** + * Return whether all values of the given {@link IndexReader} are within the range, + * outside the range or cross the range. Unlike {@link #isFieldWithinQuery} this + * accepts values that are out of the range of the {@link #resolution} of this field. + * @param fromInclusive start date, inclusive + * @param toInclusive end date, inclusive + */ + public Relation isFieldWithinRange(IndexReader reader, Instant fromInclusive, Instant toInclusive) + throws IOException { + return isFieldWithinRange(reader, + resolution.convert(resolution.clampToValidRange(fromInclusive)), + resolution.convert(resolution.clampToValidRange(toInclusive))); + } + + /** + * Return whether all values of the given {@link IndexReader} are within the range, + * outside the range or cross the range. + * @param fromInclusive start date, inclusive, {@link Resolution#convert(Instant) converted} to the appropriate scale + * @param toInclusive end date, inclusive, {@link Resolution#convert(Instant) converted} to the appropriate scale + */ + private Relation isFieldWithinRange(IndexReader reader, long fromInclusive, long toInclusive) throws IOException { if (PointValues.size(reader, name()) == 0) { // no points, so nothing matches return Relation.DISJOINT; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java index 521521915ca..0198f75ef8c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java @@ -31,6 +31,7 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -45,7 +46,10 @@ import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; import java.util.Arrays; @@ -203,6 +207,12 @@ public class IdFieldMapper extends MetadataFieldMapper { return new SortField(getFieldName(), source, reverse); } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, + Nested nested, SortOrder sortOrder, DocValueFormat format) { + throw new UnsupportedOperationException("can't sort on the [" + CONTENT_TYPE + "] field"); + } + @Override public void clear() { fieldData.clear(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 82ea883008d..e1d38d0af74 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -26,6 +26,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.index.PrefixCodedTerms.TermIterator; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.intervals.IntervalsSource; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; @@ -34,7 +35,6 @@ import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.queries.intervals.IntervalsSource; import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.util.BytesRef; @@ -351,7 +351,8 @@ public abstract class MappedFieldType extends FieldType { throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support range queries"); } - public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions, + QueryShardContext context) { throw new IllegalArgumentException("Can only use fuzzy queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } @@ -420,10 +421,10 @@ public abstract class MappedFieldType extends FieldType { * {@link Relation#INTERSECTS}, which is always fine to return when there is * no way to check whether values are actually within bounds. */ public Relation isFieldWithinQuery( - IndexReader reader, - Object from, Object to, - boolean includeLower, boolean includeUpper, - ZoneId timeZone, DateMathParser dateMathParser, QueryRewriteContext context) throws IOException { + IndexReader reader, + Object from, Object to, + boolean includeLower, boolean includeUpper, + ZoneId timeZone, DateMathParser dateMathParser, QueryRewriteContext context) throws IOException { return Relation.INTERSECTS; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java index cde8e392dab..4ddda3df0af 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java @@ -31,6 +31,7 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.query.QueryShardContext; @@ -38,6 +39,8 @@ import org.elasticsearch.index.query.support.QueryParsers; import java.util.List; +import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; + /** Base class for {@link MappedFieldType} implementations that use the same * representation for internal index terms as the external representation so * that partial matching queries such as prefix, wildcard and fuzzy queries @@ -62,7 +65,11 @@ public abstract class StringFieldType extends TermBasedFieldType { @Override public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, - boolean transpositions) { + boolean transpositions, QueryShardContext context) { + if (context.allowExpensiveQueries() == false) { + throw new ElasticsearchException("[fuzzy] queries cannot be executed when '" + + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false."); + } failIfNotIndexed(); return new FuzzyQuery(new Term(name(), indexedValueForSearch(value)), fuzziness.asDistance(BytesRefs.toString(value)), prefixLength, maxExpansions, transpositions); @@ -70,6 +77,11 @@ public abstract class StringFieldType extends TermBasedFieldType { @Override public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, QueryShardContext context) { + if (context.allowExpensiveQueries() == false) { + throw new ElasticsearchException("[prefix] queries cannot be executed when '" + + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false. For optimised prefix queries on text " + + "fields please enable [index_prefixes]."); + } failIfNotIndexed(); PrefixQuery query = new PrefixQuery(new Term(name(), indexedValueForSearch(value))); if (method != null) { @@ -84,6 +96,11 @@ public abstract class StringFieldType extends TermBasedFieldType { if (termQuery instanceof MatchNoDocsQuery || termQuery instanceof MatchAllDocsQuery) { return termQuery; } + + if (context.allowExpensiveQueries() == false) { + throw new ElasticsearchException("[wildcard] queries cannot be executed when '" + + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false."); + } Term term = MappedFieldType.extractTerm(termQuery); WildcardQuery query = new WildcardQuery(term); @@ -94,6 +111,10 @@ public abstract class StringFieldType extends TermBasedFieldType { @Override public Query regexpQuery(String value, int flags, int maxDeterminizedStates, MultiTermQuery.RewriteMethod method, QueryShardContext context) { + if (context.allowExpensiveQueries() == false) { + throw new ElasticsearchException("[regexp] queries cannot be executed when '" + + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false."); + } failIfNotIndexed(); RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), flags, maxDeterminizedStates); if (method != null) { @@ -104,6 +125,10 @@ public abstract class StringFieldType extends TermBasedFieldType { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { + if (context.allowExpensiveQueries() == false) { + throw new ElasticsearchException("[range] queries on [text] or [keyword] fields cannot be executed when '" + + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false."); + } failIfNotIndexed(); return new TermRangeQuery(name(), lowerTerm == null ? null : indexedValueForSearch(lowerTerm), diff --git a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java index 954107c6560..8df0fec0441 100644 --- a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java @@ -328,7 +328,7 @@ public class FuzzyQueryBuilder extends AbstractQueryBuilder i String rewrite = this.rewrite; MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType != null) { - query = fieldType.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions); + query = fieldType.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions, context); } if (query == null) { int maxEdits = fuzziness.asDistance(BytesRefs.toString(value)); diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index ef2ae2bda80..224d9d9121c 100644 --- a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -34,7 +34,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.fetch.StoredFieldsContext; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortBuilder; diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java index 7b05b0cf731..c55d6f1662c 100644 --- a/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java @@ -22,7 +22,7 @@ package org.elasticsearch.index.query; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.script.FieldScript; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.InnerHitsContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.SortAndFormats; @@ -88,7 +88,7 @@ public abstract class InnerHitContextBuilder { innerHitsContext.storedFieldsContext(innerHitBuilder.getStoredFieldsContext()); } if (innerHitBuilder.getDocValueFields() != null) { - innerHitsContext.docValueFieldsContext(new DocValueFieldsContext(innerHitBuilder.getDocValueFields())); + innerHitsContext.docValuesContext(new FetchDocValuesContext(innerHitBuilder.getDocValueFields())); } if (innerHitBuilder.getScriptFields() != null) { for (SearchSourceBuilder.ScriptField field : innerHitBuilder.getScriptFields()) { diff --git a/server/src/main/java/org/elasticsearch/index/query/LegacyGeoShapeQueryProcessor.java b/server/src/main/java/org/elasticsearch/index/query/LegacyGeoShapeQueryProcessor.java index b58ecea3efb..5f728ffe0e7 100644 --- a/server/src/main/java/org/elasticsearch/index/query/LegacyGeoShapeQueryProcessor.java +++ b/server/src/main/java/org/elasticsearch/index/query/LegacyGeoShapeQueryProcessor.java @@ -26,6 +26,7 @@ import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.query.SpatialArgs; import org.apache.lucene.spatial.query.SpatialOperation; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.CircleBuilder; @@ -59,6 +60,8 @@ import org.locationtech.spatial4j.shape.Shape; import java.util.ArrayList; import java.util.List; +import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; + public class LegacyGeoShapeQueryProcessor implements AbstractGeometryFieldMapper.QueryProcessor { private AbstractGeometryFieldMapper.AbstractGeometryFieldType ft; @@ -74,6 +77,11 @@ public class LegacyGeoShapeQueryProcessor implements AbstractGeometryFieldMapper @Override public Query process(Geometry shape, String fieldName, SpatialStrategy strategy, ShapeRelation relation, QueryShardContext context) { + if (context.allowExpensiveQueries() == false) { + throw new ElasticsearchException("[geo-shape] queries on [PrefixTree geo shapes] cannot be executed when '" + + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false."); + } + LegacyGeoShapeFieldMapper.GeoShapeFieldType shapeFieldType = (LegacyGeoShapeFieldMapper.GeoShapeFieldType) ft; SpatialStrategy spatialStrategy = shapeFieldType.strategy(); if (strategy != null) { diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index ee8062308ac..699e535f460 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.ParentChildrenBlockJoinQuery; import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.MaxScoreCollector; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; @@ -57,6 +58,7 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; import static org.elasticsearch.search.fetch.subphase.InnerHitsContext.intersect; public class NestedQueryBuilder extends AbstractQueryBuilder { @@ -266,6 +268,11 @@ public class NestedQueryBuilder extends AbstractQueryBuilder @Override protected Query doToQuery(QueryShardContext context) throws IOException { + if (context.allowExpensiveQueries() == false) { + throw new ElasticsearchException("[joining] queries cannot be executed when '" + + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false."); + } + ObjectMapper nestedObjectMapper = context.getObjectMapper(path); if (nestedObjectMapper == null) { if (ignoreUnmapped) { diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 911d0d44228..6fbaedd0546 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -71,6 +71,7 @@ import java.util.Map; import java.util.Set; import java.util.function.BiConsumer; import java.util.function.BiFunction; +import java.util.function.BooleanSupplier; import java.util.function.LongSupplier; import java.util.function.Predicate; @@ -100,6 +101,7 @@ public class QueryShardContext extends QueryRewriteContext { private final Index fullyQualifiedIndex; private final Predicate indexNameMatcher; + private final BooleanSupplier allowExpensiveQueries; public void setTypes(String... types) { this.types = types; @@ -128,18 +130,19 @@ public class QueryShardContext extends QueryRewriteContext { IndexSearcher searcher, LongSupplier nowInMillis, String clusterAlias, - Predicate indexNameMatcher) { + Predicate indexNameMatcher, + BooleanSupplier allowExpensiveQueries) { this(shardId, indexSettings, bigArrays, bitsetFilterCache, indexFieldDataLookup, mapperService, similarityService, - scriptService, xContentRegistry, namedWriteableRegistry, client, searcher, nowInMillis, indexNameMatcher, - new Index(RemoteClusterAware.buildRemoteIndexName(clusterAlias, indexSettings.getIndex().getName()), - indexSettings.getIndex().getUUID())); + scriptService, xContentRegistry, namedWriteableRegistry, client, searcher, nowInMillis, indexNameMatcher, + new Index(RemoteClusterAware.buildRemoteIndexName(clusterAlias, indexSettings.getIndex().getName()), + indexSettings.getIndex().getUUID()), allowExpensiveQueries); } public QueryShardContext(QueryShardContext source) { this(source.shardId, source.indexSettings, source.bigArrays, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService, source.similarityService, source.scriptService, source.getXContentRegistry(), source.getWriteableRegistry(), source.client, source.searcher, source.nowInMillis, source.indexNameMatcher, - source.fullyQualifiedIndex); + source.fullyQualifiedIndex, source.allowExpensiveQueries); } private QueryShardContext(int shardId, @@ -156,7 +159,8 @@ public class QueryShardContext extends QueryRewriteContext { IndexSearcher searcher, LongSupplier nowInMillis, Predicate indexNameMatcher, - Index fullyQualifiedIndex) { + Index fullyQualifiedIndex, + BooleanSupplier allowExpensiveQueries) { super(xContentRegistry, namedWriteableRegistry, client, nowInMillis); this.shardId = shardId; this.similarityService = similarityService; @@ -171,6 +175,7 @@ public class QueryShardContext extends QueryRewriteContext { this.searcher = searcher; this.indexNameMatcher = indexNameMatcher; this.fullyQualifiedIndex = fullyQualifiedIndex; + this.allowExpensiveQueries = allowExpensiveQueries; } private void reset() { @@ -208,6 +213,10 @@ public class QueryShardContext extends QueryRewriteContext { return bitsetFilterCache.getBitSetProducer(filter); } + public boolean allowExpensiveQueries() { + return allowExpensiveQueries.getAsBoolean(); + } + public > IFD getForField(MappedFieldType fieldType) { return (IFD) indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName()); } diff --git a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index e9b18bd0aa1..da3f7850c0e 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -40,6 +41,8 @@ import org.elasticsearch.script.Script; import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; + public class ScriptQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "script"; @@ -130,6 +133,10 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder @Override protected Query doToQuery(QueryShardContext context) throws IOException { + if (context.allowExpensiveQueries() == false) { + throw new ElasticsearchException("[script] queries cannot be executed when '" + + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false."); + } FilterScript.Factory factory = context.compile(script, FilterScript.CONTEXT); FilterScript.LeafFactory filterScript = factory.newFactory(script.getParams(), context.lookup()); return new ScriptQuery(script, filterScript); diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreQueryBuilder.java index 74e51ff09a6..ff1db8a9e8c 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreQueryBuilder.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query.functionscore; import org.apache.lucene.search.Query; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -42,6 +43,7 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; /** * A query that computes a document score based on the provided script @@ -170,6 +172,10 @@ public class ScriptScoreQueryBuilder extends AbstractQueryBuilder querySupplier; if (fuzziness != null) { querySupplier = () -> { - Query query = fieldType.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions); + Query query = fieldType.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, + transpositions, context); if (query instanceof FuzzyQuery) { QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod); } diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java index b69e2dc6b20..115430a13c3 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java @@ -463,7 +463,7 @@ public class QueryStringQueryParser extends XQueryParser { Analyzer normalizer = forceAnalyzer == null ? queryBuilder.context.getSearchAnalyzer(currentFieldType) : forceAnalyzer; BytesRef term = termStr == null ? null : normalizer.normalize(field, termStr); return currentFieldType.fuzzyQuery(term, Fuzziness.fromEdits((int) minSimilarity), - getFuzzyPrefixLength(), fuzzyMaxExpansions, fuzzyTranspositions); + getFuzzyPrefixLength(), fuzzyMaxExpansions, fuzzyTranspositions, context); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); diff --git a/server/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java index 912e03ca799..b8509ca2c11 100644 --- a/server/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java @@ -42,10 +42,10 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.SimpleQueryStringBuilder; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.List; -import java.util.ArrayList; import static org.elasticsearch.common.lucene.search.Queries.newUnmappedFieldQuery; @@ -134,7 +134,7 @@ public class SimpleQueryStringQueryParser extends SimpleQueryParser { try { final BytesRef term = getAnalyzer(ft).normalize(fieldName, text); Query query = ft.fuzzyQuery(term, Fuzziness.fromEdits(fuzziness), settings.fuzzyPrefixLength, - settings.fuzzyMaxExpansions, settings.fuzzyTranspositions); + settings.fuzzyMaxExpansions, settings.fuzzyTranspositions, context); disjuncts.add(wrapWithBoost(query, entry.getValue())); } catch (RuntimeException e) { disjuncts.add(rethrowUnlessLenient(e)); diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index b2f5e75d85d..4112e037226 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -169,6 +169,7 @@ import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadF import static org.elasticsearch.index.IndexService.IndexCreationContext.CREATE_INDEX; import static org.elasticsearch.index.IndexService.IndexCreationContext.META_DATA_VERIFICATION; import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; +import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; public class IndicesService extends AbstractLifecycleComponent implements IndicesClusterStateService.AllocatedIndices, IndexService.ShardStoreDeleter { @@ -221,6 +222,7 @@ public class IndicesService extends AbstractLifecycleComponent final AbstractRefCounted indicesRefCount; // pkg-private for testing private final CountDownLatch closeLatch = new CountDownLatch(1); private volatile boolean idFieldDataEnabled; + private volatile boolean allowExpensiveQueries; @Nullable private final EsThreadPoolExecutor danglingIndicesThreadPoolExecutor; @@ -317,6 +319,9 @@ public class IndicesService extends AbstractLifecycleComponent 0, TimeUnit.MILLISECONDS, daemonThreadFactory(nodeName, DANGLING_INDICES_UPDATE_THREAD_NAME), threadPool.getThreadContext()) : null; + + this.allowExpensiveQueries = ALLOW_EXPENSIVE_QUERIES.get(clusterService.getSettings()); + clusterService.getClusterSettings().addSettingsUpdateConsumer(ALLOW_EXPENSIVE_QUERIES, this::setAllowExpensiveQueries); } private static final String DANGLING_INDICES_UPDATE_THREAD_NAME = "DanglingIndices#updateTask"; @@ -592,7 +597,8 @@ public class IndicesService extends AbstractLifecycleComponent idxSettings.getNumberOfReplicas(), indexCreationContext); - final IndexModule indexModule = new IndexModule(idxSettings, analysisRegistry, getEngineFactory(idxSettings), directoryFactories); + final IndexModule indexModule = new IndexModule(idxSettings, analysisRegistry, getEngineFactory(idxSettings), + directoryFactories, () -> allowExpensiveQueries); for (IndexingOperationListener operationListener : indexingOperationListeners) { indexModule.addIndexOperationListener(operationListener); } @@ -661,7 +667,8 @@ public class IndicesService extends AbstractLifecycleComponent */ public synchronized MapperService createIndexMapperService(IndexMetaData indexMetaData) throws IOException { final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, indexScopedSettings); - final IndexModule indexModule = new IndexModule(idxSettings, analysisRegistry, getEngineFactory(idxSettings), directoryFactories); + final IndexModule indexModule = new IndexModule(idxSettings, analysisRegistry, getEngineFactory(idxSettings), + directoryFactories, () -> allowExpensiveQueries); pluginsService.onIndexModule(indexModule); return indexModule.newIndexMapperService(xContentRegistry, mapperRegistry, scriptService); } @@ -1574,6 +1581,10 @@ public class IndicesService extends AbstractLifecycleComponent } } + private void setAllowExpensiveQueries(Boolean allowExpensiveQueries) { + this.allowExpensiveQueries = allowExpensiveQueries; + } + // visible for testing public boolean allPendingDanglingIndicesWritten() { return nodeWriteDanglingIndicesInfo == false || diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index f517c302827..d75e1cfb5d5 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -64,6 +64,7 @@ import static org.elasticsearch.rest.RestStatus.OK; public class RestController implements HttpServerTransport.Dispatcher { private static final Logger logger = LogManager.getLogger(RestController.class); + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); private final PathTrie handlers = new PathTrie<>(RestUtils.REST_DECODER); @@ -96,13 +97,11 @@ public class RestController implements HttpServerTransport.Dispatcher { * @param path Path to handle (e.g., "/{index}/{type}/_bulk") * @param handler The handler to actually execute * @param deprecationMessage The message to log and send as a header in the response - * @param logger The existing deprecation logger to use */ - protected void registerAsDeprecatedHandler(RestRequest.Method method, String path, RestHandler handler, - String deprecationMessage, DeprecationLogger logger) { + protected void registerAsDeprecatedHandler(RestRequest.Method method, String path, RestHandler handler, String deprecationMessage) { assert (handler instanceof DeprecationRestHandler) == false; - registerHandler(method, path, new DeprecationRestHandler(handler, deprecationMessage, logger)); + registerHandler(method, path, new DeprecationRestHandler(handler, deprecationMessage, deprecationLogger)); } /** @@ -128,17 +127,15 @@ public class RestController implements HttpServerTransport.Dispatcher { * @param handler The handler to actually execute * @param deprecatedMethod GET, POST, etc. * @param deprecatedPath Deprecated path to handle (e.g., "/_optimize") - * @param logger The existing deprecation logger to use */ protected void registerWithDeprecatedHandler(RestRequest.Method method, String path, RestHandler handler, - RestRequest.Method deprecatedMethod, String deprecatedPath, - DeprecationLogger logger) { + RestRequest.Method deprecatedMethod, String deprecatedPath) { // e.g., [POST /_optimize] is deprecated! Use [POST /_forcemerge] instead. final String deprecationMessage = "[" + deprecatedMethod.name() + " " + deprecatedPath + "] is deprecated! Use [" + method.name() + " " + path + "] instead."; registerHandler(method, path, handler); - registerAsDeprecatedHandler(deprecatedMethod, deprecatedPath, handler, deprecationMessage, logger); + registerAsDeprecatedHandler(deprecatedMethod, deprecatedPath, handler, deprecationMessage); } /** @@ -164,9 +161,9 @@ public class RestController implements HttpServerTransport.Dispatcher { public void registerHandler(final RestHandler restHandler) { restHandler.routes().forEach(route -> registerHandler(route.getMethod(), route.getPath(), restHandler)); restHandler.deprecatedRoutes().forEach(route -> - registerAsDeprecatedHandler(route.getMethod(), route.getPath(), restHandler, route.getDeprecationMessage(), route.getLogger())); + registerAsDeprecatedHandler(route.getMethod(), route.getPath(), restHandler, route.getDeprecationMessage())); restHandler.replacedRoutes().forEach(route -> registerWithDeprecatedHandler(route.getMethod(), route.getPath(), - restHandler, route.getDeprecatedMethod(), route.getDeprecatedPath(), route.getLogger())); + restHandler, route.getDeprecatedMethod(), route.getDeprecatedPath())); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index ab7b468f757..0c06a84df62 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.rest.RestRequest.Method; @@ -115,21 +114,15 @@ public interface RestHandler { class DeprecatedRoute extends Route { private final String deprecationMessage; - private final DeprecationLogger logger; - public DeprecatedRoute(Method method, String path, String deprecationMessage, DeprecationLogger logger) { + public DeprecatedRoute(Method method, String path, String deprecationMessage) { super(method, path); this.deprecationMessage = deprecationMessage; - this.logger = logger; } public String getDeprecationMessage() { return deprecationMessage; } - - public DeprecationLogger getLogger() { - return logger; - } } /** @@ -140,13 +133,11 @@ public interface RestHandler { private final String deprecatedPath; private final Method deprecatedMethod; - private final DeprecationLogger logger; - public ReplacedRoute(Method method, String path, Method deprecatedMethod, String deprecatedPath, DeprecationLogger logger) { + public ReplacedRoute(Method method, String path, Method deprecatedMethod, String deprecatedPath) { super(method, path); this.deprecatedMethod = deprecatedMethod; this.deprecatedPath = deprecatedPath; - this.logger = logger; } public String getDeprecatedPath() { @@ -156,9 +147,5 @@ public interface RestHandler { public Method getDeprecatedMethod() { return deprecatedMethod; } - - public DeprecationLogger getLogger() { - return logger; - } } } diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 390a485b305..a096364d9ba 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -58,7 +58,7 @@ import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.StoredFieldsContext; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; @@ -112,7 +112,7 @@ final class DefaultSearchContext extends SearchContext { private StoredFieldsContext storedFields; private ScriptFieldsContext scriptFields; private FetchSourceContext fetchSourceContext; - private DocValueFieldsContext docValueFieldsContext; + private FetchDocValuesContext docValuesContext; private int from = -1; private int size = -1; private SortAndFormats sort; @@ -465,13 +465,13 @@ final class DefaultSearchContext extends SearchContext { } @Override - public DocValueFieldsContext docValueFieldsContext() { - return docValueFieldsContext; + public FetchDocValuesContext docValuesContext() { + return docValuesContext; } @Override - public SearchContext docValueFieldsContext(DocValueFieldsContext docValueFieldsContext) { - this.docValueFieldsContext = docValueFieldsContext; + public SearchContext docValuesContext(FetchDocValuesContext docValuesContext) { + this.docValuesContext = docValuesContext; return this; } diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 0d5de295301..c26c061d781 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -243,14 +243,14 @@ import org.elasticsearch.search.aggregations.pipeline.SumBucketPipelineAggregati import org.elasticsearch.search.aggregations.pipeline.SumBucketPipelineAggregator; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase; -import org.elasticsearch.search.fetch.subphase.ExplainFetchSubPhase; -import org.elasticsearch.search.fetch.subphase.FetchSourceSubPhase; -import org.elasticsearch.search.fetch.subphase.MatchedQueriesFetchSubPhase; -import org.elasticsearch.search.fetch.subphase.ScoreFetchSubPhase; -import org.elasticsearch.search.fetch.subphase.ScriptFieldsFetchSubPhase; -import org.elasticsearch.search.fetch.subphase.SeqNoPrimaryTermFetchSubPhase; -import org.elasticsearch.search.fetch.subphase.VersionFetchSubPhase; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesPhase; +import org.elasticsearch.search.fetch.subphase.ExplainPhase; +import org.elasticsearch.search.fetch.subphase.FetchSourcePhase; +import org.elasticsearch.search.fetch.subphase.MatchedQueriesPhase; +import org.elasticsearch.search.fetch.subphase.FetchScorePhase; +import org.elasticsearch.search.fetch.subphase.ScriptFieldsPhase; +import org.elasticsearch.search.fetch.subphase.SeqNoPrimaryTermPhase; +import org.elasticsearch.search.fetch.subphase.FetchVersionPhase; import org.elasticsearch.search.fetch.subphase.highlight.FastVectorHighlighter; import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase; import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; @@ -263,6 +263,7 @@ import org.elasticsearch.search.sort.GeoDistanceSortBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; import org.elasticsearch.search.sort.ScriptSortBuilder; import org.elasticsearch.search.sort.SortBuilder; +import org.elasticsearch.search.sort.SortValue; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; @@ -334,6 +335,7 @@ public class SearchModule { registerSearchExts(plugins); registerShapes(); registerIntervalsSourceProviders(); + namedWriteables.addAll(SortValue.namedWriteables()); } public List getNamedWriteables() { @@ -745,15 +747,15 @@ public class SearchModule { } private void registerFetchSubPhases(List plugins) { - registerFetchSubPhase(new ExplainFetchSubPhase()); - registerFetchSubPhase(new DocValueFieldsFetchSubPhase()); - registerFetchSubPhase(new ScriptFieldsFetchSubPhase()); - registerFetchSubPhase(new FetchSourceSubPhase()); - registerFetchSubPhase(new VersionFetchSubPhase()); - registerFetchSubPhase(new SeqNoPrimaryTermFetchSubPhase()); - registerFetchSubPhase(new MatchedQueriesFetchSubPhase()); + registerFetchSubPhase(new ExplainPhase()); + registerFetchSubPhase(new FetchDocValuesPhase()); + registerFetchSubPhase(new ScriptFieldsPhase()); + registerFetchSubPhase(new FetchSourcePhase()); + registerFetchSubPhase(new FetchVersionPhase()); + registerFetchSubPhase(new SeqNoPrimaryTermPhase()); + registerFetchSubPhase(new MatchedQueriesPhase()); registerFetchSubPhase(new HighlightPhase(highlighters)); - registerFetchSubPhase(new ScoreFetchSubPhase()); + registerFetchSubPhase(new FetchScorePhase()); FetchPhaseConstructionContext context = new FetchPhaseConstructionContext(highlighters); registerFromPlugin(plugins, p -> p.getFetchSubPhases(context), this::registerFetchSubPhase); diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 8ea7e1e34b5..e4cf2d764d1 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -79,7 +79,7 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.QueryFetchSearchResult; import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchRequest; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.internal.AliasFilter; @@ -136,6 +136,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv Setting.positiveTimeSetting("search.max_keep_alive", timeValueHours(24), Property.NodeScope, Property.Dynamic); public static final Setting KEEPALIVE_INTERVAL_SETTING = Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), Property.NodeScope); + public static final Setting ALLOW_EXPENSIVE_QUERIES = + Setting.boolSetting("search.allow_expensive_queries", true, Property.NodeScope, Property.Dynamic); /** * Enables low-level, frequent search cancellation checks. Enabling low-level checks will make long running searches to react @@ -882,11 +884,11 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv context.fetchSourceContext(source.fetchSource()); } if (source.docValueFields() != null) { - List docValueFields = new ArrayList<>(); - for (DocValueFieldsContext.FieldAndFormat format : source.docValueFields()) { + List docValueFields = new ArrayList<>(); + for (FetchDocValuesContext.FieldAndFormat format : source.docValueFields()) { Collection fieldNames = context.mapperService().simpleMatchToFullName(format.field); for (String fieldName: fieldNames) { - docValueFields.add(new DocValueFieldsContext.FieldAndFormat(fieldName, format.format)); + docValueFields.add(new FetchDocValuesContext.FieldAndFormat(fieldName, format.format)); } } int maxAllowedDocvalueFields = context.mapperService().getIndexSettings().getMaxDocvalueFields(); @@ -896,7 +898,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv + "] but was [" + docValueFields.size() + "]. This limit can be set by changing the [" + IndexSettings.MAX_DOCVALUE_FIELDS_SEARCH_SETTING.getKey() + "] index level setting."); } - context.docValueFieldsContext(new DocValueFieldsContext(docValueFields)); + context.docValuesContext(new FetchDocValuesContext(docValueFields)); } if (source.highlighter() != null) { HighlightBuilder highlightBuilder = source.highlighter(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/ParsedAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/ParsedAggregation.java index ba1d847f23b..52836721876 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/ParsedAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/ParsedAggregation.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.aggregations; -import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.AbstractObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -36,7 +36,7 @@ import java.util.Map; */ public abstract class ParsedAggregation implements Aggregation, ToXContentFragment { - protected static void declareAggregationFields(ObjectParser objectParser) { + protected static void declareAggregationFields(AbstractObjectParser objectParser) { objectParser.declareObject((parsedAgg, metadata) -> parsedAgg.metadata = Collections.unmodifiableMap(metadata), (parser, context) -> parser.map(), InternalAggregation.CommonFields.META); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java index 3a0171d4eea..df9627760b0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.elasticsearch.index.query.QueryShardContext; @@ -410,86 +411,118 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil return NAME; } - /* + /** + * Returns a {@linkplain ZoneId} that functions the same as + * {@link #timeZone()} on the data in the shard referred to by + * {@code context}. It attempts to convert zones that + * have non-fixed offsets into fixed offset zones that produce the + * same results on all data in the shard. + *

+ * We go about this in three phases: + *

    + *
  1. A bunch of preflight checks to see if we *can* optimize it + *
  2. Find the any Instant in shard + *
  3. Find the DST transition before and after that Instant + *
  4. Round those into the interval + *
  5. Check if the rounded value include all values within shard + *
  6. If they do then return a fixed offset time zone because it + * will return the same values for all time in the shard as the + * original time zone, but faster + *
  7. Otherwise return the original time zone. It'll be slower, but + * correct. + *
+ *

* NOTE: this can't be done in rewrite() because the timezone is then also used on the * coordinating node in order to generate missing buckets, which may cross a transition * even though data on the shards doesn't. */ ZoneId rewriteTimeZone(QueryShardContext context) throws IOException { final ZoneId tz = timeZone(); - if (field() != null && - tz != null && - tz.getRules().isFixedOffset() == false && - field() != null && - script() == null) { - final MappedFieldType ft = context.fieldMapper(field()); - final IndexReader reader = context.getIndexReader(); - if (ft != null && reader != null) { - Long anyInstant = null; - final IndexNumericFieldData fieldData = context.getForField(ft); - for (LeafReaderContext ctx : reader.leaves()) { - AtomicNumericFieldData leafFD = fieldData.load(ctx); - SortedNumericDocValues values = leafFD.getLongValues(); - if (values.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { - anyInstant = values.nextValue(); - break; - } - } + if (tz == null || tz.getRules().isFixedOffset()) { + // This time zone is already as fast as it is going to get. + return tz; + } + if (script() != null) { + // We can't be sure what dates the script will return so we don't attempt to optimize anything + return tz; + } + if (field() == null) { + // Without a field we're not going to be able to look anything up. + return tz; + } + MappedFieldType ft = context.fieldMapper(field()); + if (ft == null || false == ft instanceof DateFieldMapper.DateFieldType) { + // If the field is unmapped or not a date then we can't get its range. + return tz; + } + DateFieldMapper.DateFieldType dft = (DateFieldMapper.DateFieldType) ft; + final IndexReader reader = context.getIndexReader(); + if (reader == null) { + return tz; + } - if (anyInstant != null) { - Instant instant = Instant.ofEpochMilli(anyInstant); - ZoneOffsetTransition prevOffsetTransition = tz.getRules().previousTransition(instant); - final long prevTransition; - if (prevOffsetTransition != null) { - prevTransition = prevOffsetTransition.getInstant().toEpochMilli(); - } else { - prevTransition = instant.toEpochMilli(); - } - ZoneOffsetTransition nextOffsetTransition = tz.getRules().nextTransition(instant); - final long nextTransition; - if (nextOffsetTransition != null) { - nextTransition = nextOffsetTransition.getInstant().toEpochMilli(); - } else { - nextTransition = instant.toEpochMilli(); - } - - // We need all not only values but also rounded values to be within - // [prevTransition, nextTransition]. - final long low; - - - DateIntervalWrapper.IntervalTypeEnum intervalType = dateHistogramInterval.getIntervalType(); - if (intervalType.equals(DateIntervalWrapper.IntervalTypeEnum.FIXED)) { - low = Math.addExact(prevTransition, dateHistogramInterval.tryIntervalAsFixedUnit().millis()); - } else if (intervalType.equals(DateIntervalWrapper.IntervalTypeEnum.CALENDAR)) { - final Rounding.DateTimeUnit intervalAsUnit = dateHistogramInterval.tryIntervalAsCalendarUnit(); - final Rounding rounding = Rounding.builder(intervalAsUnit).timeZone(timeZone()).build(); - low = rounding.nextRoundingValue(prevTransition); - } else { - // We're not sure what the interval was originally (legacy) so use old behavior of assuming - // calendar first, then fixed. Required because fixed/cal overlap in places ("1h") - Rounding.DateTimeUnit intervalAsUnit = dateHistogramInterval.tryIntervalAsCalendarUnit(); - if (intervalAsUnit != null) { - final Rounding rounding = Rounding.builder(intervalAsUnit).timeZone(timeZone()).build(); - low = rounding.nextRoundingValue(prevTransition); - } else { - final TimeValue intervalAsMillis = dateHistogramInterval.tryIntervalAsFixedUnit(); - low = Math.addExact(prevTransition, intervalAsMillis.millis()); - } - } - // rounding rounds down, so 'nextTransition' is a good upper bound - final long high = nextTransition; - - if (ft.isFieldWithinQuery(reader, low, high, true, false, ZoneOffset.UTC, EPOCH_MILLIS_PARSER, - context) == Relation.WITHIN) { - // All values in this reader have the same offset despite daylight saving times. - // This is very common for location-based timezones such as Europe/Paris in - // combination with time-based indices. - return ZoneOffset.ofTotalSeconds(tz.getRules().getOffset(instant).getTotalSeconds()); - } - } + Instant instant = null; + final IndexNumericFieldData fieldData = context.getForField(ft); + for (LeafReaderContext ctx : reader.leaves()) { + AtomicNumericFieldData leafFD = fieldData.load(ctx); + SortedNumericDocValues values = leafFD.getLongValues(); + if (values.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { + instant = Instant.ofEpochMilli(values.nextValue()); + break; } } + if (instant == null) { + return tz; + } + + ZoneOffsetTransition prevOffsetTransition = tz.getRules().previousTransition(instant); + final long prevTransition; + if (prevOffsetTransition != null) { + prevTransition = prevOffsetTransition.getInstant().toEpochMilli(); + } else { + prevTransition = instant.toEpochMilli(); + } + ZoneOffsetTransition nextOffsetTransition = tz.getRules().nextTransition(instant); + final long nextTransition; + if (nextOffsetTransition != null) { + nextTransition = nextOffsetTransition.getInstant().toEpochMilli(); + } else { + nextTransition = instant.toEpochMilli(); + } + + // We need all not only values but also rounded values to be within + // [prevTransition, nextTransition]. + final long low; + + DateIntervalWrapper.IntervalTypeEnum intervalType = dateHistogramInterval.getIntervalType(); + if (intervalType.equals(DateIntervalWrapper.IntervalTypeEnum.FIXED)) { + low = Math.addExact(prevTransition, dateHistogramInterval.tryIntervalAsFixedUnit().millis()); + } else if (intervalType.equals(DateIntervalWrapper.IntervalTypeEnum.CALENDAR)) { + final Rounding.DateTimeUnit intervalAsUnit = dateHistogramInterval.tryIntervalAsCalendarUnit(); + final Rounding rounding = Rounding.builder(intervalAsUnit).timeZone(timeZone()).build(); + low = rounding.nextRoundingValue(prevTransition); + } else { + // We're not sure what the interval was originally (legacy) so use old behavior of assuming + // calendar first, then fixed. Required because fixed/cal overlap in places ("1h") + Rounding.DateTimeUnit intervalAsUnit = dateHistogramInterval.tryIntervalAsCalendarUnit(); + if (intervalAsUnit != null) { + final Rounding rounding = Rounding.builder(intervalAsUnit).timeZone(timeZone()).build(); + low = rounding.nextRoundingValue(prevTransition); + } else { + final TimeValue intervalAsMillis = dateHistogramInterval.tryIntervalAsFixedUnit(); + low = Math.addExact(prevTransition, intervalAsMillis.millis()); + } + } + // rounding rounds down, so 'nextTransition' is a good upper bound + final long high = nextTransition; + + if (dft.isFieldWithinRange( + reader, Instant.ofEpochMilli(low), Instant.ofEpochMilli(high - 1)) == Relation.WITHIN) { + // All values in this reader have the same offset despite daylight saving times. + // This is very common for location-based timezones such as Europe/Paris in + // combination with time-based indices. + return ZoneOffset.ofTotalSeconds(tz.getRules().getOffset(instant).getTotalSeconds()); + } return tz; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java index 27b21a1ebd8..d00bbda4604 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java @@ -98,7 +98,7 @@ class MaxAggregator extends NumericMetricsAggregator.SingleValue { if (pointConverter != null) { Number segMax = findLeafMaxValue(ctx.reader(), pointField, pointConverter); if (segMax != null) { - /** + /* * There is no parent aggregator (see {@link MinAggregator#getPointReaderOrNull} * so the ordinal for the bucket is always 0. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java index 38841d7e4d8..96563d917bf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java @@ -103,7 +103,7 @@ class MinAggregator extends NumericMetricsAggregator.SingleValue { if (pointConverter != null) { Number segMin = findLeafMinValue(ctx.reader(), pointField, pointConverter); if (segMin != null) { - /** + /* * There is no parent aggregator (see {@link MinAggregator#getPointReaderOrNull} * so the ordinal for the bucket is always 0. */ @@ -190,7 +190,12 @@ class MinAggregator extends NumericMetricsAggregator.SingleValue { if (fieldType instanceof NumberFieldMapper.NumberFieldType) { converter = ((NumberFieldMapper.NumberFieldType) fieldType)::parsePoint; } else if (fieldType.getClass() == DateFieldMapper.DateFieldType.class) { - converter = (in) -> LongPoint.decodeDimension(in, 0); + DateFieldMapper.DateFieldType dft = (DateFieldMapper.DateFieldType) fieldType; + /* + * Makes sure that nanoseconds decode to milliseconds, just + * like they do when you run the agg without the optimization. + */ + converter = (in) -> dft.resolution().toInstant(LongPoint.decodeDimension(in, 0)).toEpochMilli(); } return converter; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java index faaa7d73391..7c32ef3b4ef 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java @@ -39,7 +39,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.search.fetch.StoredFieldsContext; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java index e5cd4c8cc1b..d377c1ac8ef 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java @@ -25,8 +25,8 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.fetch.StoredFieldsContext; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; @@ -107,7 +107,7 @@ class TopHitsAggregatorFactory extends AggregatorFactory { subSearchContext.storedFieldsContext(storedFieldsContext); } if (docValueFields != null) { - subSearchContext.docValueFieldsContext(new DocValueFieldsContext(docValueFields)); + subSearchContext.docValuesContext(new FetchDocValuesContext(docValueFields)); } for (ScriptFieldsContext.ScriptField field : scriptFields) { subSearchContext.scriptFields().add(field); diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 7085e5ba586..d786cc2f9e3 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -48,7 +48,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.fetch.StoredFieldsContext; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.internal.SearchContext; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 0b0c740f5c4..93501678e43 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -55,7 +55,7 @@ import org.elasticsearch.search.SearchPhase; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.InnerHitsContext; -import org.elasticsearch.search.fetch.subphase.InnerHitsFetchSubPhase; +import org.elasticsearch.search.fetch.subphase.InnerHitsPhase; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.tasks.TaskCancelledException; @@ -81,7 +81,7 @@ public class FetchPhase implements SearchPhase { public FetchPhase(List fetchSubPhases) { this.fetchSubPhases = fetchSubPhases.toArray(new FetchSubPhase[fetchSubPhases.size() + 1]); - this.fetchSubPhases[fetchSubPhases.size()] = new InnerHitsFetchSubPhase(this); + this.fetchSubPhases[fetchSubPhases.size()] = new InnerHitsPhase(this); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java rename to server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java index a869c4fefbc..8c76270e78b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java @@ -29,7 +29,7 @@ import java.io.IOException; /** * Explains the scoring calculations for the top hits. */ -public final class ExplainFetchSubPhase implements FetchSubPhase { +public final class ExplainPhase implements FetchSubPhase { @Override public void hitExecute(SearchContext context, HitContext hitContext) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsContext.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesContext.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsContext.java rename to server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesContext.java index daafe097029..0e9576fd9b0 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesContext.java @@ -36,7 +36,7 @@ import java.util.Objects; /** * All the required context to pull a field from the doc values. */ -public class DocValueFieldsContext { +public class FetchDocValuesContext { /** * Wrapper around a field name and the format that should be used to @@ -113,7 +113,7 @@ public class DocValueFieldsContext { private final List fields; - public DocValueFieldsContext(List fields) { + public FetchDocValuesContext(List fields) { this.fields = fields; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java similarity index 91% rename from server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java rename to server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java index 261742331f7..7d1d4b9dfeb 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java @@ -35,7 +35,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; @@ -51,15 +51,15 @@ import static org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericTyp import static org.elasticsearch.search.DocValueFormat.withNanosecondResolution; /** - * Query sub phase which pulls data from doc values + * Fetch sub phase which pulls data from doc values. * * Specifying {@code "docvalue_fields": ["field1", "field2"]} */ -public final class DocValueFieldsFetchSubPhase implements FetchSubPhase { +public final class FetchDocValuesPhase implements FetchSubPhase { private static final String USE_DEFAULT_FORMAT = "use_field_mapping"; private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger( - LogManager.getLogger(DocValueFieldsFetchSubPhase.class)); + LogManager.getLogger(FetchDocValuesPhase.class)); @Override public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException { @@ -67,22 +67,22 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase { if (context.collapse() != null) { // retrieve the `doc_value` associated with the collapse field String name = context.collapse().getFieldName(); - if (context.docValueFieldsContext() == null) { - context.docValueFieldsContext(new DocValueFieldsContext( + if (context.docValuesContext() == null) { + context.docValuesContext(new FetchDocValuesContext( Collections.singletonList(new FieldAndFormat(name, null)))); - } else if (context.docValueFieldsContext().fields().stream().map(ff -> ff.field).anyMatch(name::equals) == false) { - context.docValueFieldsContext().fields().add(new FieldAndFormat(name, null)); + } else if (context.docValuesContext().fields().stream().map(ff -> ff.field).anyMatch(name::equals) == false) { + context.docValuesContext().fields().add(new FieldAndFormat(name, null)); } } - if (context.docValueFieldsContext() == null) { + if (context.docValuesContext() == null) { return; } hits = hits.clone(); // don't modify the incoming hits Arrays.sort(hits, Comparator.comparingInt(SearchHit::docId)); - if (context.docValueFieldsContext().fields().stream() + if (context.docValuesContext().fields().stream() .map(f -> f.format) .filter(USE_DEFAULT_FORMAT::equals) .findAny() @@ -91,7 +91,7 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase { "ease the transition to 7.x. It has become the default and shouldn't be set explicitly anymore."); } - for (FieldAndFormat fieldAndFormat : context.docValueFieldsContext().fields()) { + for (FieldAndFormat fieldAndFormat : context.docValuesContext().fields()) { String field = fieldAndFormat.field; MappedFieldType fieldType = context.mapperService().fieldType(field); if (fieldType != null) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScoreFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/fetch/subphase/ScoreFetchSubPhase.java rename to server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java index 3a6db72d5b3..fe2a79513bc 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScoreFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java @@ -34,7 +34,7 @@ import java.util.Arrays; import java.util.Comparator; import java.util.Iterator; -public class ScoreFetchSubPhase implements FetchSubPhase { +public class FetchScorePhase implements FetchSubPhase { @Override public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhase.java rename to server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java index fa099392f40..1bdd7421066 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java @@ -31,7 +31,7 @@ import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; import java.util.Map; -public final class FetchSourceSubPhase implements FetchSubPhase { +public final class FetchSourcePhase implements FetchSubPhase { @Override public void hitExecute(SearchContext context, HitContext hitContext) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/VersionFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/fetch/subphase/VersionFetchSubPhase.java rename to server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java index baa0c6e9551..3c81af09be3 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/VersionFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java @@ -31,7 +31,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Comparator; -public final class VersionFetchSubPhase implements FetchSubPhase { +public final class FetchVersionPhase implements FetchSubPhase { @Override public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException { if (context.version() == false || diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java rename to server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java index 3921e6ce536..54b569ef306 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java @@ -34,11 +34,11 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; -public final class InnerHitsFetchSubPhase implements FetchSubPhase { +public final class InnerHitsPhase implements FetchSubPhase { private final FetchPhase fetchPhase; - public InnerHitsFetchSubPhase(FetchPhase fetchPhase) { + public InnerHitsPhase(FetchPhase fetchPhase) { this.fetchPhase = fetchPhase; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java rename to server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java index c2f6980781d..9302f6d9d63 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java @@ -40,7 +40,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -public final class MatchedQueriesFetchSubPhase implements FetchSubPhase { +public final class MatchedQueriesPhase implements FetchSubPhase { @Override public void hitsExecute(SearchContext context, SearchHit[] hits) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java rename to server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java index 532441c0cf9..7a015811bd1 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java @@ -37,7 +37,7 @@ import java.util.Comparator; import java.util.HashMap; import java.util.List; -public final class ScriptFieldsFetchSubPhase implements FetchSubPhase { +public final class ScriptFieldsPhase implements FetchSubPhase { @Override public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException { @@ -65,7 +65,7 @@ public final class ScriptFieldsFetchSubPhase implements FetchSubPhase { final Object value; try { value = leafScripts[i].execute(); - CollectionUtils.ensureNoSelfReferences(value, "ScriptFieldsFetchSubPhase leaf script " + i); + CollectionUtils.ensureNoSelfReferences(value, "ScriptFieldsPhase leaf script " + i); } catch (RuntimeException e) { if (scriptFields.get(i).ignoreException()) { continue; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermFetchSubPhase.java rename to server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java index 31a6328ff95..d8e1f060070 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java @@ -31,7 +31,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Comparator; -public final class SeqNoPrimaryTermFetchSubPhase implements FetchSubPhase { +public final class SeqNoPrimaryTermPhase implements FetchSubPhase { @Override public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException { if (context.seqNoAndPrimaryTerm() == false) { diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java index c0477ece69a..4473546f76d 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -49,7 +49,7 @@ import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.StoredFieldsContext; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.InnerHitsContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; @@ -197,9 +197,9 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas public abstract SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext); - public abstract DocValueFieldsContext docValueFieldsContext(); + public abstract FetchDocValuesContext docValuesContext(); - public abstract SearchContext docValueFieldsContext(DocValueFieldsContext docValueFieldsContext); + public abstract SearchContext docValuesContext(FetchDocValuesContext docValuesContext); public abstract ContextIndexSearcher searcher(); diff --git a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java index e918aed6037..a539a77d66b 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java @@ -25,7 +25,7 @@ import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.collapse.CollapseContext; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.StoredFieldsContext; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; @@ -58,7 +58,7 @@ public class SubSearchContext extends FilteredSearchContext { private StoredFieldsContext storedFields; private ScriptFieldsContext scriptFields; private FetchSourceContext fetchSourceContext; - private DocValueFieldsContext docValueFieldsContext; + private FetchDocValuesContext docValuesContext; private SearchContextHighlight highlight; private boolean explain; @@ -150,13 +150,13 @@ public class SubSearchContext extends FilteredSearchContext { } @Override - public DocValueFieldsContext docValueFieldsContext() { - return docValueFieldsContext; + public FetchDocValuesContext docValuesContext() { + return docValuesContext; } @Override - public SearchContext docValueFieldsContext(DocValueFieldsContext docValueFieldsContext) { - this.docValueFieldsContext = docValueFieldsContext; + public SearchContext docValuesContext(FetchDocValuesContext docValuesContext) { + this.docValuesContext = docValuesContext; return this; } diff --git a/server/src/main/java/org/elasticsearch/search/sort/BucketedSort.java b/server/src/main/java/org/elasticsearch/search/sort/BucketedSort.java new file mode 100644 index 00000000000..8239c1ef8b0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/sort/BucketedSort.java @@ -0,0 +1,372 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorable; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.common.lucene.ScorerAware; +import org.elasticsearch.common.util.BigArray; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.common.util.FloatArray; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.search.DocValueFormat; + +import java.io.IOException; + +/** + * Type specialized sort implementations designed for use in aggregations. + */ +public abstract class BucketedSort implements Releasable { + // TODO priority queue semantics to support multiple hits in the buckets + protected final BigArrays bigArrays; + private final SortOrder order; + private final DocValueFormat format; + + public BucketedSort(BigArrays bigArrays, SortOrder order, DocValueFormat format) { + this.bigArrays = bigArrays; + this.order = order; + this.format = format; + } + + /** + * The order of the sort. + */ + public final SortOrder getOrder() { + return order; + } + + /** + * The format to use when presenting the values. + */ + public final DocValueFormat getFormat() { + return format; + } + + /** + * Get the value for a bucket if it has been collected, null otherwise. + */ + public final SortValue getValue(long bucket) { + if (bucket >= buckets().size()) { + return null; + } + return getValueForBucket(bucket); + } + + /** + * Get the {@linkplain Leaf} implementation that'll do that actual collecting. + */ + public abstract Leaf forLeaf(LeafReaderContext ctx) throws IOException; + + /** + * Does this sort need scores? Most don't, but sorting on {@code _score} does. + */ + public abstract boolean needsScores(); + + /** + * The {@linkplain BigArray} backing this sort. + */ + protected abstract BigArray buckets(); + + /** + * Grow the {@linkplain BigArray} backing this sort to account for new buckets. + * This will only be called if the array is too small. + */ + protected abstract void grow(long minSize); + + /** + * Get the value for a bucket. This will only be called if the bucket was collected. + */ + protected abstract SortValue getValueForBucket(long bucket); + + /** + * Performs the actual collection against a {@linkplain LeafReaderContext}. + */ + public abstract class Leaf implements ScorerAware { + /** + * Collect this doc, returning {@code true} if it is competitive. + */ + public final boolean collectIfCompetitive(int doc, long bucket) throws IOException { + if (false == advanceExact(doc)) { + return false; + } + if (bucket >= buckets().size()) { + grow(bucket + 1); + setValue(bucket); + return true; + } + return setIfCompetitive(bucket); + } + + /** + * Move the underlying data source reader to the doc and return + * {@code true} if there is data for the sort value. + */ + protected abstract boolean advanceExact(int doc) throws IOException; + + /** + * Set the value for a particular bucket to the value that doc has for the sort. + * This is called when we're *sure* we haven't yet seen the bucket. + */ + protected abstract void setValue(long bucket) throws IOException; + + /** + * If the value that doc has for the sort is competitive with the other values + * then set it. This is called for buckets we *might* have already seen. So + * implementers will have to check for "empty" buckets in their own way. The + * vaguery here is for two reasons: + *

    + *
  • When we see a bucket that won't fit in our arrays we oversize them so + * we don't have to grow them by 1 every time.
  • + *
  • Buckets don't always arrive in order and our storage is "dense" on the + * bucket ordinal. For example, we might get bucket number 4 grow the array + * to fit it, and *then* get bucket number 3.
  • + *
+ */ + protected abstract boolean setIfCompetitive(long bucket) throws IOException; + } + + /** + * Superclass for implementations of {@linkplain BucketedSort} for {@code double} keys. + */ + public abstract static class ForDoubles extends BucketedSort { + private DoubleArray buckets = bigArrays.newDoubleArray(1, false); + + public ForDoubles(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) { + super(bigArrays, sortOrder, format); + // NaN is a sentinel value for "unused" + buckets.set(0, Double.NaN); + } + + @Override + public boolean needsScores() { return false; } + + @Override + protected final BigArray buckets() { return buckets; } + + @Override + protected final void grow(long minSize) { + long oldSize = buckets.size(); + buckets = bigArrays.grow(buckets, minSize); + buckets.fill(oldSize, buckets.size(), Double.NaN); + } + + @Override + public final SortValue getValueForBucket(long bucket) { + double val = buckets.get(bucket); + if (Double.isNaN(val)) { + return null; + } + return SortValue.from(val); + } + + @Override + public final void close() { + buckets.close(); + } + + protected abstract class Leaf extends BucketedSort.Leaf { + protected abstract double docValue() throws IOException; + + @Override + public final void setScorer(Scorable scorer) {} + + @Override + protected final void setValue(long bucket) throws IOException { + buckets.set(bucket, docValue()); + } + + @Override + protected final boolean setIfCompetitive(long bucket) throws IOException { + double docSort = docValue(); + double bestSort = buckets.get(bucket); + // The NaN check is important here because it needs to always lose. + if (false == Double.isNaN(bestSort) && getOrder().reverseMul() * Double.compare(bestSort, docSort) <= 0) { + return false; + } + buckets.set(bucket, docSort); + return true; + } + } + } + + /** + * Superclass for implementations of {@linkplain BucketedSort} for {@code float} keys. + */ + public abstract static class ForFloats extends BucketedSort { + private FloatArray buckets = bigArrays.newFloatArray(1, false); + + public ForFloats(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) { + super(bigArrays, sortOrder, format); + // NaN is a sentinel value for "unused" + buckets.set(0, Float.NaN); + } + + @Override + protected final BigArray buckets() { return buckets; } + + @Override + protected final void grow(long minSize) { + long oldSize = buckets.size(); + buckets = bigArrays.grow(buckets, minSize); + buckets.fill(oldSize, buckets.size(), Float.NaN); + } + + @Override + public final SortValue getValueForBucket(long bucket) { + float val = buckets.get(bucket); + if (Float.isNaN(val)) { + return null; + } + return SortValue.from(val); + } + + @Override + public final void close() { + buckets.close(); + } + + protected abstract class Leaf extends BucketedSort.Leaf { + protected abstract float docValue() throws IOException; + + @Override + protected final void setValue(long bucket) throws IOException { + buckets.set(bucket, docValue()); + } + + @Override + protected final boolean setIfCompetitive(long bucket) throws IOException { + float docSort = docValue(); + float bestSort = buckets.get(bucket); + // The NaN check is important here because it needs to always lose. + if (false == Float.isNaN(bestSort) && getOrder().reverseMul() * Float.compare(bestSort, docSort) <= 0) { + return false; + } + buckets.set(bucket, docSort); + return true; + } + + } + } + + /** + * Superclass for implementations of {@linkplain BucketedSort} for {@code long} keys. + */ + public abstract static class ForLongs extends BucketedSort { + /** + * Tracks which buckets have been seen before so we can *always* + * set the value in that case. We need this because there isn't a + * sentinel value in the {@code long} type that we can use for this + * like NaN in {@code double} or {@code float}. + */ + private BitArray seen = new BitArray(1, bigArrays); + /** + * The actual values. + */ + private LongArray buckets = bigArrays.newLongArray(1, false); + private long maxBucket = -1; + + public ForLongs(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) { + super(bigArrays, sortOrder, format); + } + + @Override + public boolean needsScores() { return false; } + + @Override + protected final BigArray buckets() { return buckets; } + + @Override + protected final void grow(long minSize) { + buckets = bigArrays.grow(buckets, minSize); + } + + @Override + public final SortValue getValueForBucket(long bucket) { + if (bucket > Integer.MAX_VALUE) { + /* We throw exceptions if we try to collect buckets bigger + * than an int so we *can't* have seen any of these. */ + return null; + } + if (bucket > maxBucket) { + return null; + } + if (false == seen.get((int) bucket)) { + /* Buckets we haven't seen must be null here so we can + * skip "gaps" in seen buckets. */ + return null; + } + return SortValue.from(buckets.get(bucket)); + } + + @Override + public final void close() { + Releasables.close(seen, buckets); + } + + protected abstract class Leaf extends BucketedSort.Leaf { + protected abstract long docValue() throws IOException; + + @Override + public final void setScorer(Scorable scorer) {} + + @Override + protected final void setValue(long bucket) throws IOException { + seen.set(bucketIsInt(bucket)); + buckets.set(bucket, docValue()); + maxBucket = Math.max(bucket, maxBucket); + } + + @Override + protected final boolean setIfCompetitive(long bucket) throws IOException { + long docSort = docValue(); + int intBucket = bucketIsInt(bucket); + if (bucket > maxBucket) { + seen.set(intBucket); + buckets.set(bucket, docSort); + maxBucket = bucket; + return true; + } + if (false == seen.get(intBucket)) { + seen.set(intBucket); + buckets.set(bucket, docSort); + return true; + } + long bestSort = buckets.get(bucket); + if (getOrder().reverseMul() * Double.compare(bestSort, docSort) <= 0) { + return false; + } + buckets.set(bucket, docSort); + return true; + } + + private int bucketIsInt(long bucket) { + if (bucket > Integer.MAX_VALUE) { + throw new UnsupportedOperationException("Long sort keys don't support more than [" + Integer.MAX_VALUE + "] buckets"); + // I don't feel too bad about that because it'd take about 16 GB of memory.... + } + return (int) bucket; + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index fb0f3fd15c1..838a921c2ff 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -387,69 +387,101 @@ public class FieldSortBuilder extends SortBuilder { @Override public SortFieldAndFormat build(QueryShardContext context) throws IOException { if (DOC_FIELD_NAME.equals(fieldName)) { - if (order == SortOrder.DESC) { - return SORT_DOC_REVERSE; - } else { - return SORT_DOC; - } - } else { - boolean isUnmapped = false; - MappedFieldType fieldType = context.fieldMapper(fieldName); - if (fieldType == null) { - isUnmapped = true; - if (unmappedType != null) { - fieldType = context.getMapperService().unmappedFieldType(unmappedType); - } else { - throw new QueryShardException(context, "No mapping found for [" + fieldName + "] in order to sort on"); - } - } - - MultiValueMode localSortMode = null; - if (sortMode != null) { - localSortMode = MultiValueMode.fromString(sortMode.toString()); - } - - boolean reverse = (order == SortOrder.DESC); - if (localSortMode == null) { - localSortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN; - } - - Nested nested = null; - if (isUnmapped == false) { - if (nestedSort != null) { - if (context.indexVersionCreated().before(Version.V_6_5_0) && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { - throw new QueryShardException(context, - "max_children is only supported on v6.5.0 or higher"); - } - if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { - throw new QueryShardException(context, - "max_children is only supported on last level of nested sort"); - } - validateMaxChildrenExistOnlyInTopLevelNestedSort(context, nestedSort); - nested = resolveNested(context, nestedSort); - } else { - nested = resolveNested(context, nestedPath, nestedFilter); - } - } - IndexFieldData fieldData = context.getForField(fieldType); - if (fieldData instanceof IndexNumericFieldData == false - && (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) { - throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields"); - } - final SortField field; - if (numericType != null) { - if (fieldData instanceof IndexNumericFieldData == false) { - throw new QueryShardException(context, - "[numeric_type] option cannot be set on a non-numeric field, got " + fieldType.typeName()); - } - SortedNumericDVIndexFieldData numericFieldData = (SortedNumericDVIndexFieldData) fieldData; - NumericType resolvedType = resolveNumericType(numericType); - field = numericFieldData.sortField(resolvedType, missing, localSortMode, nested, reverse); - } else { - field = fieldData.sortField(missing, localSortMode, nested, reverse); - } - return new SortFieldAndFormat(field, fieldType.docValueFormat(null, null)); + return order == SortOrder.DESC ? SORT_DOC_REVERSE : SORT_DOC; } + + MappedFieldType fieldType = context.fieldMapper(fieldName); + Nested nested = nested(context, fieldType); + if (fieldType == null) { + fieldType = resolveUnmappedType(context); + } + + boolean reverse = order == SortOrder.DESC; + IndexFieldData fieldData = context.getForField(fieldType); + if (fieldData instanceof IndexNumericFieldData == false + && (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) { + throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields"); + } + final SortField field; + if (numericType != null) { + if (fieldData instanceof IndexNumericFieldData == false) { + throw new QueryShardException(context, + "[numeric_type] option cannot be set on a non-numeric field, got " + fieldType.typeName()); + } + SortedNumericDVIndexFieldData numericFieldData = (SortedNumericDVIndexFieldData) fieldData; + NumericType resolvedType = resolveNumericType(numericType); + field = numericFieldData.sortField(resolvedType, missing, localSortMode(), nested, reverse); + } else { + field = fieldData.sortField(missing, localSortMode(), nested, reverse); + } + return new SortFieldAndFormat(field, fieldType.docValueFormat(null, null)); + } + + @Override + public BucketedSort buildBucketedSort(QueryShardContext context) throws IOException { + if (DOC_FIELD_NAME.equals(fieldName)) { + throw new IllegalArgumentException("sorting by _doc is not supported"); + } + + MappedFieldType fieldType = context.fieldMapper(fieldName); + Nested nested = nested(context, fieldType); + if (fieldType == null) { + fieldType = resolveUnmappedType(context); + } + + IndexFieldData fieldData = context.getForField(fieldType); + if (fieldData instanceof IndexNumericFieldData == false + && (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) { + throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields"); + } + if (numericType != null) { + SortedNumericDVIndexFieldData numericFieldData = (SortedNumericDVIndexFieldData) fieldData; + NumericType resolvedType = resolveNumericType(numericType); + return numericFieldData.newBucketedSort(resolvedType, context.bigArrays(), missing, localSortMode(), nested, order, + fieldType.docValueFormat(null, null)); + } + try { + return fieldData.newBucketedSort(context.bigArrays(), missing, localSortMode(), nested, order, + fieldType.docValueFormat(null, null)); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("error building sort for field [" + fieldName + "] of type [" + + fieldType.typeName() + "] in index [" + context.index().getName() + "]: " + e.getMessage(), e); + } + } + + private MappedFieldType resolveUnmappedType(QueryShardContext context) { + if (unmappedType == null) { + throw new QueryShardException(context, "No mapping found for [" + fieldName + "] in order to sort on"); + } + return context.getMapperService().unmappedFieldType(unmappedType); + } + + private MultiValueMode localSortMode() { + if (sortMode != null) { + return MultiValueMode.fromString(sortMode.toString()); + } + + return order == SortOrder.DESC ? MultiValueMode.MAX : MultiValueMode.MIN; + } + + private Nested nested(QueryShardContext context, MappedFieldType fieldType) throws IOException { + if (fieldType == null) { + return null; + } + // If we have a nestedSort we'll use that. Otherwise, use old style. + if (nestedSort == null) { + return resolveNested(context, nestedPath, nestedFilter); + } + if (context.indexVersionCreated().before(Version.V_6_5_0) && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on v6.5.0 or higher"); + } + if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on last level of nested sort"); + } + validateMaxChildrenExistOnlyInTopLevelNestedSort(context, nestedSort); + return resolveNested(context, nestedSort); } /** diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 9ec51753dac..8d5d012ccca 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; @@ -575,7 +576,42 @@ public class GeoDistanceSortBuilder extends SortBuilder @Override public SortFieldAndFormat build(QueryShardContext context) throws IOException { + GeoPoint[] localPoints = localPoints(); + boolean reverse = order == SortOrder.DESC; + MultiValueMode localSortMode = localSortMode(); + IndexGeoPointFieldData geoIndexFieldData = fieldData(context); + Nested nested = nested(context); + if (geoIndexFieldData.getClass() == LatLonPointDVIndexFieldData.class // only works with 5.x geo_point + && nested == null + && localSortMode == MultiValueMode.MIN // LatLonDocValuesField internally picks the closest point + && unit == DistanceUnit.METERS + && reverse == false + && localPoints.length == 1) { + return new SortFieldAndFormat( + LatLonDocValuesField.newDistanceSort(fieldName, localPoints[0].lat(), localPoints[0].lon()), + DocValueFormat.RAW); + } + + return new SortFieldAndFormat( + new SortField(fieldName, comparatorSource(localPoints, localSortMode, geoIndexFieldData, nested), reverse), + DocValueFormat.RAW); + } + + @Override + public BucketedSort buildBucketedSort(QueryShardContext context) throws IOException { + GeoPoint[] localPoints = localPoints(); + MultiValueMode localSortMode = localSortMode(); + IndexGeoPointFieldData geoIndexFieldData = fieldData(context); + Nested nested = nested(context); + + // TODO implement the single point optimization above + + return comparatorSource(localPoints, localSortMode, geoIndexFieldData, nested) + .newBucketedSort(context.bigArrays(), order, DocValueFormat.RAW); + } + + private GeoPoint[] localPoints() { // validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed // on 2.x created indexes GeoPoint[] localPoints = points.toArray(new GeoPoint[points.size()]); @@ -601,15 +637,19 @@ public class GeoDistanceSortBuilder extends SortBuilder GeoUtils.normalizePoint(point, true, true); } } + return localPoints; + } - boolean reverse = (order == SortOrder.DESC); - final MultiValueMode finalSortMode; - if (sortMode == null) { - finalSortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN; - } else { - finalSortMode = MultiValueMode.fromString(sortMode.toString()); + private MultiValueMode localSortMode() { + // TODO this lines up with FieldSortBuilder. Share? + if (sortMode != null) { + return MultiValueMode.fromString(sortMode.toString()); } + return order == SortOrder.DESC ? MultiValueMode.MAX : MultiValueMode.MIN; + } + + private IndexGeoPointFieldData fieldData(QueryShardContext context) { MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType == null) { if (ignoreUnmapped) { @@ -618,71 +658,80 @@ public class GeoDistanceSortBuilder extends SortBuilder throw new IllegalArgumentException("failed to find mapper for [" + fieldName + "] for geo distance based sort"); } } - final IndexGeoPointFieldData geoIndexFieldData = context.getForField(fieldType); + return context.getForField(fieldType); + } - final Nested nested; - if (nestedSort != null) { - if (context.indexVersionCreated().before(Version.V_6_5_0) && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { - throw new QueryShardException(context, - "max_children is only supported on v6.5.0 or higher"); - } - if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { - throw new QueryShardException(context, - "max_children is only supported on last level of nested sort"); - } - // new nested sorts takes priority - validateMaxChildrenExistOnlyInTopLevelNestedSort(context, nestedSort); - nested = resolveNested(context, nestedSort); - } else { - nested = resolveNested(context, nestedPath, nestedFilter); + private Nested nested(QueryShardContext context) throws IOException { + // If we have a nestedSort we'll use that. Otherwise, use old style. + if (nestedSort == null) { + return resolveNested(context, nestedPath, nestedFilter); } - - if (geoIndexFieldData.getClass() == LatLonPointDVIndexFieldData.class // only works with 5.x geo_point - && nested == null - && finalSortMode == MultiValueMode.MIN // LatLonDocValuesField internally picks the closest point - && unit == DistanceUnit.METERS - && reverse == false - && localPoints.length == 1) { - return new SortFieldAndFormat( - LatLonDocValuesField.newDistanceSort(fieldName, localPoints[0].lat(), localPoints[0].lon()), - DocValueFormat.RAW); + if (context.indexVersionCreated().before(Version.V_6_5_0) && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on v6.5.0 or higher"); } + if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on last level of nested sort"); + } + validateMaxChildrenExistOnlyInTopLevelNestedSort(context, nestedSort); + return resolveNested(context, nestedSort); + } - IndexFieldData.XFieldComparatorSource geoDistanceComparatorSource = new IndexFieldData.XFieldComparatorSource(null, finalSortMode, - nested) { - + private IndexFieldData.XFieldComparatorSource comparatorSource(GeoPoint[] localPoints, MultiValueMode localSortMode, + IndexGeoPointFieldData geoIndexFieldData, Nested nested) { + return new IndexFieldData.XFieldComparatorSource(null, localSortMode, nested) { @Override public SortField.Type reducedType() { return SortField.Type.DOUBLE; } + private NumericDoubleValues getNumericDoubleValues(LeafReaderContext context) throws IOException { + final MultiGeoPointValues geoPointValues = geoIndexFieldData.load(context).getGeoPointValues(); + final SortedNumericDoubleValues distanceValues = GeoUtils.distanceValues(geoDistance, unit, geoPointValues, localPoints); + if (nested == null) { + return FieldData.replaceMissing(sortMode.select(distanceValues), Double.POSITIVE_INFINITY); + } else { + final BitSet rootDocs = nested.rootDocs(context); + final DocIdSetIterator innerDocs = nested.innerDocs(context); + final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; + return localSortMode.select(distanceValues, Double.POSITIVE_INFINITY, rootDocs, innerDocs, + context.reader().maxDoc(), maxChildren); + } + } + @Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) { return new FieldComparator.DoubleComparator(numHits, null, null) { @Override protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException { - final MultiGeoPointValues geoPointValues = geoIndexFieldData.load(context).getGeoPointValues(); - final SortedNumericDoubleValues distanceValues = GeoUtils.distanceValues(geoDistance, unit, geoPointValues, - localPoints); - final NumericDoubleValues selectedValues; - if (nested == null) { - selectedValues = FieldData.replaceMissing(finalSortMode.select(distanceValues), Double.POSITIVE_INFINITY); - } else { - final BitSet rootDocs = nested.rootDocs(context); - final DocIdSetIterator innerDocs = nested.innerDocs(context); - final int maxChildren = nested.getNestedSort() != null ? - nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; - selectedValues = finalSortMode.select(distanceValues, Double.POSITIVE_INFINITY, rootDocs, innerDocs, - context.reader().maxDoc(), maxChildren); - } - return selectedValues.getRawDoubleValues(); + return getNumericDoubleValues(context).getRawDoubleValues(); + } + }; + } + + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) { + return new BucketedSort.ForDoubles(bigArrays, sortOrder, format) { + @Override + public Leaf forLeaf(LeafReaderContext ctx) throws IOException { + return new Leaf() { + private final NumericDoubleValues values = getNumericDoubleValues(ctx); + + @Override + protected boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } + + @Override + protected double docValue() throws IOException { + return values.doubleValue(); + } + }; } }; } }; - - return new SortFieldAndFormat(new SortField(fieldName, geoDistanceComparatorSource, reverse), - DocValueFormat.RAW); } static void parseGeoPoints(XContentParser parser, List geoPoints) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index 112a5eb2c8c..e612e5c6946 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -19,6 +19,8 @@ package org.elasticsearch.search.sort; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -101,6 +103,39 @@ public class ScoreSortBuilder extends SortBuilder { } } + @Override + public BucketedSort buildBucketedSort(QueryShardContext context) throws IOException { + return new BucketedSort.ForFloats(context.bigArrays(), order, DocValueFormat.RAW) { + @Override + public boolean needsScores() { return true; } + + @Override + public Leaf forLeaf(LeafReaderContext ctx) throws IOException { + return new BucketedSort.ForFloats.Leaf() { + private Scorable scorer; + + @Override + public void setScorer(Scorable scorer) { + this.scorer = scorer; + } + + @Override + protected boolean advanceExact(int doc) throws IOException { + assert doc == scorer.docID() : "expected scorer to be on [" + doc + "] but was on [" + scorer.docID() + "]"; + /* We will never be called by documents that don't match the + * query and they'll all have a score, thus `true`. */ + return true; + } + + @Override + protected float docValue() throws IOException { + return scorer.score(); + } + }; + } + }; + } + @Override public boolean equals(Object object) { if (this == object) { diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index ded7c8e7817..7a8f1c73712 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -49,8 +50,8 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; -import org.elasticsearch.script.Script; import org.elasticsearch.script.NumberSortScript; +import org.elasticsearch.script.Script; import org.elasticsearch.script.StringSortScript; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; @@ -306,13 +307,23 @@ public class ScriptSortBuilder extends SortBuilder { @Override public SortFieldAndFormat build(QueryShardContext context) throws IOException { + return new SortFieldAndFormat( + new SortField("_script", fieldComparatorSource(context), order == SortOrder.DESC), + DocValueFormat.RAW); + } + + @Override + public BucketedSort buildBucketedSort(QueryShardContext context) throws IOException { + return fieldComparatorSource(context).newBucketedSort(context.bigArrays(), order, DocValueFormat.RAW); + } + + private IndexFieldData.XFieldComparatorSource fieldComparatorSource(QueryShardContext context) throws IOException { MultiValueMode valueMode = null; if (sortMode != null) { valueMode = MultiValueMode.fromString(sortMode.toString()); } - boolean reverse = (order == SortOrder.DESC); if (valueMode == null) { - valueMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN; + valueMode = order == SortOrder.DESC ? MultiValueMode.MAX : MultiValueMode.MIN; } final Nested nested; @@ -332,12 +343,11 @@ public class ScriptSortBuilder extends SortBuilder { nested = resolveNested(context, nestedPath, nestedFilter); } - final IndexFieldData.XFieldComparatorSource fieldComparatorSource; switch (type) { case STRING: final StringSortScript.Factory factory = context.compile(script, StringSortScript.CONTEXT); final StringSortScript.LeafFactory searchScript = factory.newFactory(script.getParams(), context.lookup()); - fieldComparatorSource = new BytesRefFieldComparatorSource(null, null, valueMode, nested) { + return new BytesRefFieldComparatorSource(null, null, valueMode, nested) { StringSortScript leafScript; @Override protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOException { @@ -361,12 +371,17 @@ public class ScriptSortBuilder extends SortBuilder { protected void setScorer(Scorable scorer) { leafScript.setScorer(scorer); } + + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) { + throw new IllegalArgumentException("error building sort for [_script]: " + + "script sorting only supported on [numeric] scripts but was [" + type + "]"); + } }; - break; case NUMBER: final NumberSortScript.Factory numberSortFactory = context.compile(script, NumberSortScript.CONTEXT); final NumberSortScript.LeafFactory numberSortScript = numberSortFactory.newFactory(script.getParams(), context.lookup()); - fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, valueMode, nested) { + return new DoubleValuesComparatorSource(null, Double.MAX_VALUE, valueMode, nested) { NumberSortScript leafScript; @Override protected SortedNumericDoubleValues getValues(LeafReaderContext context) throws IOException { @@ -389,12 +404,9 @@ public class ScriptSortBuilder extends SortBuilder { leafScript.setScorer(scorer); } }; - break; default: throw new QueryShardException(context, "custom script sort type [" + type + "] not supported"); } - - return new SortFieldAndFormat(new SortField("_script", fieldComparatorSource, reverse), DocValueFormat.RAW); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index 6310db69b20..ce4eed90f71 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -70,10 +70,15 @@ public abstract class SortBuilder> implements NamedWrit } /** - * Create a @link {@link SortFieldAndFormat} from this builder. + * Create a {@linkplain SortFieldAndFormat} from this builder. */ protected abstract SortFieldAndFormat build(QueryShardContext context) throws IOException; + /** + * Create a {@linkplain BucketedSort} which is useful for sorting inside of aggregations. + */ + public abstract BucketedSort buildBucketedSort(QueryShardContext context) throws IOException; + /** * Set the order of sorting. */ diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortOrder.java b/server/src/main/java/org/elasticsearch/search/sort/SortOrder.java index c6805cae58f..02fb7a29bf1 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/SortOrder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/SortOrder.java @@ -40,6 +40,11 @@ public enum SortOrder implements Writeable { public String toString() { return "asc"; } + + @Override + public int reverseMul() { + return 1; + } }, /** * Descending order. @@ -49,6 +54,11 @@ public enum SortOrder implements Writeable { public String toString() { return "desc"; } + + @Override + public int reverseMul() { + return -1; + } }; public static SortOrder readFromStream(StreamInput in) throws IOException { @@ -63,4 +73,9 @@ public enum SortOrder implements Writeable { public static SortOrder fromString(String op) { return valueOf(op.toUpperCase(Locale.ROOT)); } + + /** + * -1 if the sort is reversed from the standard comparators, 1 otherwise. + */ + public abstract int reverseMul(); } diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortValue.java b/server/src/main/java/org/elasticsearch/search/sort/SortValue.java new file mode 100644 index 00000000000..f4cfb812b7a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/sort/SortValue.java @@ -0,0 +1,247 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.DocValueFormat; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +/** + * A {@link Comparable}, {@link DocValueFormat} aware wrapper around a sort value. + */ +public abstract class SortValue implements NamedWriteable, Comparable { + /** + * Get a {@linkplain SortValue} for a double. + */ + public static SortValue from(double d) { + return new DoubleSortValue(d); + } + + /** + * Get a {@linkplain SortValue} for a long. + */ + public static SortValue from(long l) { + return new LongSortValue(l); + } + + /** + * Get the list of {@linkplain NamedWriteable}s that this class needs. + */ + public static List namedWriteables() { + return Arrays.asList( + new NamedWriteableRegistry.Entry(SortValue.class, DoubleSortValue.NAME, DoubleSortValue::new), + new NamedWriteableRegistry.Entry(SortValue.class, LongSortValue.NAME, LongSortValue::new)); + } + + private SortValue() { + // All subclasses of this class are defined in this file. + } + + @Override + public final int compareTo(SortValue other) { + /* + * It might make sense to try and compare doubles to longs + * *carefully* to get a real sort. but it might not. For now + * we sort all doubles before all longs. + */ + int typeCompare = getWriteableName().compareTo(other.getWriteableName()); + if (typeCompare != 0) { + return typeCompare; + } + return compareToSameType(other); + } + + /** + * Write the key as xcontent. + */ + public final XContentBuilder toXContent(XContentBuilder builder, DocValueFormat format) throws IOException { + if (format == DocValueFormat.RAW) { + return rawToXContent(builder); + } + return builder.value(format(format)); + } + + /** + * The java object representing the sort value. + */ + public abstract Object getKey(); + + /** + * Format this value using the provided format. + */ + public abstract String format(DocValueFormat format); + + /** + * Write the key as xcontent using the most native type possible. + */ + protected abstract XContentBuilder rawToXContent(XContentBuilder builder) throws IOException; + + /** + * Compare this sort value to another sort value of the same type. + */ + protected abstract int compareToSameType(SortValue obj); + + // Force implementations to override equals for consistency with compareToSameType + @Override + public abstract boolean equals(Object obj); + + // Force implementations to override hashCode for consistency with equals + @Override + public abstract int hashCode(); + + // Force implementations to override toString so debugging isn't a nightmare. + @Override + public abstract String toString(); + + private static class DoubleSortValue extends SortValue { + public static final String NAME = "double"; + + private final double key; + + private DoubleSortValue(double key) { + this.key = key; + } + + private DoubleSortValue(StreamInput in) throws IOException { + this.key = in.readDouble(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeDouble(key); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public Object getKey() { + return key; + } + + @Override + public String format(DocValueFormat format) { + return format.format(key).toString(); + } + + @Override + protected XContentBuilder rawToXContent(XContentBuilder builder) throws IOException { + return builder.value(key); + } + + @Override + protected int compareToSameType(SortValue obj) { + DoubleSortValue other = (DoubleSortValue) obj; + return Double.compare(key, other.key); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || false == getClass().equals(obj.getClass())) { + return false; + } + DoubleSortValue other = (DoubleSortValue) obj; + return key == other.key; + } + + @Override + public int hashCode() { + return Double.hashCode(key); + } + + @Override + public String toString() { + return Double.toString(key); + } + } + + private static class LongSortValue extends SortValue { + public static final String NAME = "long"; + + private final long key; + + LongSortValue(long key) { + this.key = key; + } + + LongSortValue(StreamInput in) throws IOException { + key = in.readLong(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(key); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public Object getKey() { + return key; + } + + @Override + public String format(DocValueFormat format) { + return format.format(key).toString(); + } + + @Override + protected XContentBuilder rawToXContent(XContentBuilder builder) throws IOException { + return builder.value(key); + } + + @Override + protected int compareToSameType(SortValue obj) { + LongSortValue other = (LongSortValue) obj; + return Long.compare(key, other.key); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || false == getClass().equals(obj.getClass())) { + return false; + } + LongSortValue other = (LongSortValue) obj; + return key == other.key; + } + + @Override + public int hashCode() { + return Long.hashCode(key); + } + + @Override + public String toString() { + return Long.toString(key); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java index 8a37659147e..1bd9b4e505c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java @@ -133,7 +133,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { queryShardContext = new QueryShardContext(0, new IndexSettings(IndexMetaData.builder("test").settings(indexSettings).build(), indexSettings), BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), writableRegistry(), - null, null, () -> randomNonNegativeLong(), null, null); + null, null, () -> randomNonNegativeLong(), null, null, () -> true); } private ClusterState createClusterState(String name, int numShards, int numReplicas, Settings settings) { diff --git a/server/src/test/java/org/elasticsearch/common/logging/LoggingOutputStreamTests.java b/server/src/test/java/org/elasticsearch/common/logging/LoggingOutputStreamTests.java index e0521bced9f..d6c99fccf62 100644 --- a/server/src/test/java/org/elasticsearch/common/logging/LoggingOutputStreamTests.java +++ b/server/src/test/java/org/elasticsearch/common/logging/LoggingOutputStreamTests.java @@ -81,6 +81,7 @@ public class LoggingOutputStreamTests extends ESTestCase { // this test explicitly outputs the newlines instead of relying on println, to always test the unix behavior public void testFlushOnUnixNewline() { printStream.print("hello\n"); + printStream.print("\n"); // newline by itself does not show up printStream.print("world\n"); assertThat(loggingStream.lines, contains("hello", "world")); } @@ -88,6 +89,7 @@ public class LoggingOutputStreamTests extends ESTestCase { // this test explicitly outputs the newlines instead of relying on println, to always test the windows behavior public void testFlushOnWindowsNewline() { printStream.print("hello\r\n"); + printStream.print("\r\n"); // newline by itself does not show up printStream.print("world\r\n"); assertThat(loggingStream.lines, contains("hello", "world")); } @@ -102,7 +104,6 @@ public class LoggingOutputStreamTests extends ESTestCase { assertThat(loggingStream.threadLocal.get().bytes.length, equalTo(DEFAULT_BUFFER_LENGTH)); } - @AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/51838") public void testMaxBuffer() { String longStr = randomAlphaOfLength(MAX_BUFFER_LENGTH); String extraLongStr = longStr + "OVERFLOW"; diff --git a/server/src/test/java/org/elasticsearch/common/time/DateUtilsTests.java b/server/src/test/java/org/elasticsearch/common/time/DateUtilsTests.java index 4ef095da049..28d52e2bd80 100644 --- a/server/src/test/java/org/elasticsearch/common/time/DateUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/common/time/DateUtilsTests.java @@ -35,6 +35,7 @@ import java.util.Arrays; import java.util.HashSet; import java.util.Set; +import static org.elasticsearch.common.time.DateUtils.clampToNanosRange; import static org.elasticsearch.common.time.DateUtils.toInstant; import static org.elasticsearch.common.time.DateUtils.toLong; import static org.elasticsearch.common.time.DateUtils.toMilliSeconds; @@ -84,8 +85,8 @@ public class DateUtilsTests extends ESTestCase { } public void testInstantToLongMax() { - Instant tooEarlyInstant = ZonedDateTime.parse("2262-04-11T23:47:16.854775808Z").toInstant(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> toLong(tooEarlyInstant)); + Instant tooLateInstant = ZonedDateTime.parse("2262-04-11T23:47:16.854775808Z").toInstant(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> toLong(tooLateInstant)); assertThat(e.getMessage(), containsString("is after")); } @@ -109,6 +110,25 @@ public class DateUtilsTests extends ESTestCase { is(ZonedDateTime.parse("2262-04-11T23:47:16.854775807Z").toInstant())); } + public void testClampToNanosRange() { + assertThat(clampToNanosRange(Instant.EPOCH), equalTo(Instant.EPOCH)); + + Instant instant = createRandomInstant(); + assertThat(clampToNanosRange(instant), equalTo(instant)); + } + + public void testClampToNanosRangeMin() { + assertThat(clampToNanosRange(Instant.EPOCH.minusMillis(1)), equalTo(Instant.EPOCH)); + + Instant tooEarlyInstant = ZonedDateTime.parse("1677-09-21T00:12:43.145224191Z").toInstant(); + assertThat(clampToNanosRange(tooEarlyInstant), equalTo(Instant.EPOCH)); + } + + public void testClampToNanosRangeMax() { + Instant tooLateInstant = ZonedDateTime.parse("2262-04-11T23:47:16.854775808Z").toInstant(); + assertThat(clampToNanosRange(tooLateInstant), equalTo(DateUtils.MAX_NANOSECOND_INSTANT)); + } + public void testNanosToMillis() { assertThat(toMilliSeconds(0), is(Instant.EPOCH.toEpochMilli())); diff --git a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java index fc8a12d9853..2bb5702ff2e 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -166,7 +166,8 @@ public class IndexModuleTests extends ESTestCase { public void testWrapperIsBound() throws IOException { final MockEngineFactory engineFactory = new MockEngineFactory(AssertingDirectoryReader.class); - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, engineFactory, Collections.emptyMap()); + IndexModule module = new IndexModule( + indexSettings, emptyAnalysisRegistry, engineFactory, Collections.emptyMap(), () -> true); module.setReaderWrapper(s -> new Wrapper()); IndexService indexService = newIndexService(module); @@ -186,7 +187,8 @@ public class IndexModuleTests extends ESTestCase { final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); final Map indexStoreFactories = singletonMap( "foo_store", new FooFunction()); - final IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), indexStoreFactories); + final IndexModule module = new IndexModule( + indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), indexStoreFactories, () -> true); final IndexService indexService = newIndexService(module); assertThat(indexService.getDirectoryFactory(), instanceOf(FooFunction.class)); @@ -203,7 +205,7 @@ public class IndexModuleTests extends ESTestCase { } }; IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + IndexModule module = createIndexModule(indexSettings, emptyAnalysisRegistry); module.addIndexEventListener(eventListener); IndexService indexService = newIndexService(module); IndexSettings x = indexService.getIndexSettings(); @@ -218,7 +220,7 @@ public class IndexModuleTests extends ESTestCase { public void testListener() throws IOException { Setting booleanSetting = Setting.boolSetting("index.foo.bar", false, Property.Dynamic, Property.IndexScope); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings, booleanSetting); - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + IndexModule module = createIndexModule(indexSettings, emptyAnalysisRegistry); Setting booleanSetting2 = Setting.boolSetting("index.foo.bar.baz", false, Property.Dynamic, Property.IndexScope); AtomicBoolean atomicBoolean = new AtomicBoolean(false); module.addSettingsUpdateConsumer(booleanSetting, atomicBoolean::set); @@ -238,7 +240,7 @@ public class IndexModuleTests extends ESTestCase { public void testAddIndexOperationListener() throws IOException { final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + IndexModule module = createIndexModule(indexSettings, emptyAnalysisRegistry); AtomicBoolean executed = new AtomicBoolean(false); IndexingOperationListener listener = new IndexingOperationListener() { @Override @@ -269,7 +271,7 @@ public class IndexModuleTests extends ESTestCase { public void testAddSearchOperationListener() throws IOException { final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + IndexModule module = createIndexModule(indexSettings, emptyAnalysisRegistry); AtomicBoolean executed = new AtomicBoolean(false); SearchOperationListener listener = new SearchOperationListener() { @@ -304,7 +306,7 @@ public class IndexModuleTests extends ESTestCase { .build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); IndexModule module = - new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + createIndexModule(indexSettings, emptyAnalysisRegistry); module.addSimilarity("test_similarity", (providerSettings, indexCreatedVersion, scriptService) -> new TestSimilarity(providerSettings.get("key"))); @@ -320,9 +322,11 @@ public class IndexModuleTests extends ESTestCase { indexService.close("simon says", false); } + + public void testFrozen() { final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + IndexModule module = createIndexModule(indexSettings, emptyAnalysisRegistry); module.freeze(); String msg = "Can't modify IndexModule once the index service has been created"; assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addSearchOperationListener(null)).getMessage()); @@ -333,7 +337,7 @@ public class IndexModuleTests extends ESTestCase { assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.forceQueryCacheProvider(null)).getMessage()); } - public void testSetupUnknownSimilarity() throws IOException { + public void testSetupUnknownSimilarity() { Settings settings = Settings.builder() .put("index.similarity.my_similarity.type", "test_similarity") .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) @@ -341,19 +345,19 @@ public class IndexModuleTests extends ESTestCase { .build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); IndexModule module = - new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + createIndexModule(indexSettings, emptyAnalysisRegistry); Exception ex = expectThrows(IllegalArgumentException.class, () -> newIndexService(module)); assertEquals("Unknown Similarity type [test_similarity] for [my_similarity]", ex.getMessage()); } - public void testSetupWithoutType() throws IOException { + public void testSetupWithoutType() { Settings settings = Settings.builder() .put("index.similarity.my_similarity.foo", "bar") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + IndexModule module = createIndexModule(indexSettings, emptyAnalysisRegistry); Exception ex = expectThrows(IllegalArgumentException.class, () -> newIndexService(module)); assertEquals("Similarity [my_similarity] must have an associated type", ex.getMessage()); } @@ -363,7 +367,7 @@ public class IndexModuleTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + IndexModule module = createIndexModule(indexSettings, emptyAnalysisRegistry); final Set liveQueryCaches = new HashSet<>(); module.forceQueryCacheProvider((a, b) -> { final CustomQueryCache customQueryCache = new CustomQueryCache(liveQueryCaches); @@ -384,7 +388,7 @@ public class IndexModuleTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + IndexModule module = createIndexModule(indexSettings, emptyAnalysisRegistry); IndexService indexService = newIndexService(module); assertTrue(indexService.cache().query() instanceof IndexQueryCache); indexService.close("simon says", false); @@ -396,7 +400,7 @@ public class IndexModuleTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + IndexModule module = createIndexModule(indexSettings, emptyAnalysisRegistry); module.forceQueryCacheProvider((a, b) -> new CustomQueryCache(null)); IndexService indexService = newIndexService(module); assertTrue(indexService.cache().query() instanceof DisabledQueryCache); @@ -408,7 +412,7 @@ public class IndexModuleTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + IndexModule module = createIndexModule(indexSettings, emptyAnalysisRegistry); final Set liveQueryCaches = new HashSet<>(); module.forceQueryCacheProvider((a, b) -> { final CustomQueryCache customQueryCache = new CustomQueryCache(liveQueryCaches); @@ -458,7 +462,7 @@ public class IndexModuleTests extends ESTestCase { }; final AnalysisRegistry analysisRegistry = new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), singletonMap("test", analysisProvider), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap()); - IndexModule module = new IndexModule(indexSettings, analysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + IndexModule module = createIndexModule(indexSettings, analysisRegistry); threadPool.shutdown(); // causes index service creation to fail expectThrows(EsRejectedExecutionException.class, () -> newIndexService(module)); assertThat(openAnalyzers, empty()); @@ -475,11 +479,16 @@ public class IndexModuleTests extends ESTestCase { .build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(new Index("foo", "_na_"), settings, nodeSettings); final IndexModule module = - new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + createIndexModule(indexSettings, emptyAnalysisRegistry); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> newIndexService(module)); assertThat(e, hasToString(containsString("store type [" + storeType + "] is not allowed"))); } + private static IndexModule createIndexModule(IndexSettings indexSettings, AnalysisRegistry emptyAnalysisRegistry) { + return new IndexModule( + indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap(), () -> true); + } + class CustomQueryCache implements QueryCache { private final Set liveQueryCaches; @@ -545,5 +554,4 @@ public class IndexModuleTests extends ESTestCase { return null; } } - } diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index a120a640feb..2fdd1f4c99d 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -5131,6 +5131,9 @@ public class InternalEngineTests extends EngineTestCase { } public void testShouldPeriodicallyFlushAfterMerge() throws Exception { + engine.close(); + // Do not use MockRandomMergePolicy as it can cause a force merge performing two merges. + engine = createEngine(copy(engine.config(), newMergePolicy(random(), false))); assertThat("Empty engine does not need flushing", engine.shouldPeriodicallyFlush(), equalTo(false)); ParsedDocument doc = testParsedDocument(Integer.toString(0), null, testDocumentWithTextField(), SOURCE, null); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java index 33170eb39ec..aefb0e7b96e 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java @@ -22,10 +22,14 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.SortField; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; /** Returns an implementation based on paged bytes which doesn't implement WithOrdinals in order to visit different paths in the code, * eg. BytesRefFieldComparatorSource makes decisions based on whether the field data implements WithOrdinals. */ @@ -60,6 +64,12 @@ public class NoOrdinalsStringFieldDataTests extends PagedBytesStringFieldDataTes return new SortField(getFieldName(), source, reverse); } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + throw new UnsupportedOperationException(); + } + @Override public void clear() { in.clear(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index 6ac59169ad9..011d5ae3ef6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -45,6 +45,7 @@ import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.plain.SortedNumericDVIndexFieldData; import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType; +import org.elasticsearch.index.mapper.DateFieldMapper.Resolution; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.query.QueryRewriteContext; @@ -53,6 +54,7 @@ import org.joda.time.DateTimeZone; import org.junit.Before; import java.io.IOException; +import java.time.Instant; import java.time.ZoneOffset; import java.util.Locale; @@ -82,13 +84,94 @@ public class DateFieldTypeTests extends FieldTypeTestCase { nowInMillis = randomNonNegativeLong(); } - public void testIsFieldWithinQueryEmptyReader() throws IOException { + public void testIsFieldWithinRangeEmptyReader() throws IOException { QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), writableRegistry(), null, () -> nowInMillis); IndexReader reader = new MultiReader(); DateFieldType ft = new DateFieldType(); ft.setName("my_date"); assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", randomBoolean(), randomBoolean(), null, null, context)); + assertEquals(Relation.DISJOINT, ft.isFieldWithinRange(reader, instant("2015-10-12"), instant("2016-04-03"))); + } + + public void testIsFieldWithinQueryDateMillis() throws IOException { + DateFieldType ft = new DateFieldType(); + ft.setResolution(Resolution.MILLISECONDS); + isFieldWithinRangeTestCase(ft); + } + + public void testIsFieldWithinQueryDateNanos() throws IOException { + DateFieldType ft = new DateFieldType(); + ft.setResolution(Resolution.NANOSECONDS); + isFieldWithinRangeTestCase(ft); + } + + public void isFieldWithinRangeTestCase(DateFieldType ft) throws IOException { + ft.setName("my_date"); + + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); + Document doc = new Document(); + LongPoint field = new LongPoint("my_date", ft.parse("2015-10-12")); + doc.add(field); + w.addDocument(doc); + field.setLongValue(ft.parse("2016-04-03")); + w.addDocument(doc); + DirectoryReader reader = DirectoryReader.open(w); + + DateMathParser alternateFormat = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser(); + doTestIsFieldWithinQuery(ft, reader, null, null); + doTestIsFieldWithinQuery(ft, reader, null, alternateFormat); + doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, null); + doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, alternateFormat); + + QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), writableRegistry(), null, () -> nowInMillis); + assertEquals(Relation.INTERSECTS, ft.isFieldWithinRange(reader, instant("2015-10-09"), instant("2016-01-02"))); + assertEquals(Relation.INTERSECTS, ft.isFieldWithinRange(reader, instant("2016-01-02"), instant("2016-06-20"))); + assertEquals(Relation.INTERSECTS, ft.isFieldWithinRange(reader, instant("2016-01-02"), instant("2016-02-12"))); + assertEquals(Relation.DISJOINT, ft.isFieldWithinRange(reader, instant("2014-01-02"), instant("2015-02-12"))); + assertEquals(Relation.DISJOINT, ft.isFieldWithinRange(reader, instant("2016-05-11"), instant("2016-08-30"))); + assertEquals(Relation.WITHIN, ft.isFieldWithinRange(reader, instant("2015-09-25"), instant("2016-05-29"))); + assertEquals(Relation.WITHIN, ft.isFieldWithinRange(reader, instant("2015-10-12"), instant("2016-04-03"))); + assertEquals(Relation.INTERSECTS, + ft.isFieldWithinRange(reader, instant("2015-10-12").plusMillis(1), instant("2016-04-03").minusMillis(1))); + assertEquals(Relation.INTERSECTS, + ft.isFieldWithinRange(reader, instant("2015-10-12").plusMillis(1), instant("2016-04-03"))); + assertEquals(Relation.INTERSECTS, + ft.isFieldWithinRange(reader, instant("2015-10-12"), instant("2016-04-03").minusMillis(1))); + assertEquals(Relation.INTERSECTS, + ft.isFieldWithinRange(reader, instant("2015-10-12").plusNanos(1), instant("2016-04-03").minusNanos(1))); + assertEquals(ft.resolution() == Resolution.NANOSECONDS ? Relation.INTERSECTS : Relation.WITHIN, // Millis round down here. + ft.isFieldWithinRange(reader, instant("2015-10-12").plusNanos(1), instant("2016-04-03"))); + assertEquals(Relation.INTERSECTS, + ft.isFieldWithinRange(reader, instant("2015-10-12"), instant("2016-04-03").minusNanos(1))); + + // Some edge cases + assertEquals(Relation.WITHIN, ft.isFieldWithinRange(reader, Instant.EPOCH, instant("2016-04-03"))); + assertEquals(Relation.WITHIN, ft.isFieldWithinRange(reader, Instant.ofEpochMilli(-1000), instant("2016-04-03"))); + assertEquals(Relation.WITHIN, ft.isFieldWithinRange(reader, Instant.ofEpochMilli(Long.MIN_VALUE), instant("2016-04-03"))); + assertEquals(Relation.WITHIN, ft.isFieldWithinRange(reader, instant("2015-10-12"), Instant.ofEpochMilli(Long.MAX_VALUE))); + + // Fields with no value indexed. + DateFieldType ft2 = new DateFieldType(); + ft2.setName("my_date2"); + + assertEquals(Relation.DISJOINT, ft2.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", false, false, null, null, context)); + assertEquals(Relation.DISJOINT, ft2.isFieldWithinRange(reader, instant("2015-10-09"), instant("2016-01-02"))); + + // Fire a bunch of random values into isFieldWithinRange to make sure it doesn't crash + for (int iter = 0; iter < 1000; iter++) { + long min = randomLong(); + long max = randomLong(); + if (min > max) { + long swap = max; + max = min; + min = swap; + } + ft.isFieldWithinRange(reader, Instant.ofEpochMilli(min), Instant.ofEpochMilli(max)); + } + + IOUtils.close(reader, w, dir); } private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader, @@ -116,37 +199,6 @@ public class DateFieldTypeTests extends FieldTypeTestCase { true, false, null, null, context)); } - public void testIsFieldWithinQuery() throws IOException { - Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); - long instant1 = - DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-12")).toInstant().toEpochMilli(); - long instant2 = - DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2016-04-03")).toInstant().toEpochMilli(); - Document doc = new Document(); - LongPoint field = new LongPoint("my_date", instant1); - doc.add(field); - w.addDocument(doc); - field.setLongValue(instant2); - w.addDocument(doc); - DirectoryReader reader = DirectoryReader.open(w); - DateFieldType ft = new DateFieldType(); - ft.setName("my_date"); - DateMathParser alternateFormat = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser(); - doTestIsFieldWithinQuery(ft, reader, null, null); - doTestIsFieldWithinQuery(ft, reader, null, alternateFormat); - doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, null); - doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, alternateFormat); - - // Fields with no value indexed. - DateFieldType ft2 = new DateFieldType(); - ft2.setName("my_date2"); - - QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), writableRegistry(), null, () -> nowInMillis); - assertEquals(Relation.DISJOINT, ft2.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", false, false, null, null, context)); - IOUtils.close(reader, w, dir); - } - public void testValueFormat() { MappedFieldType ft = createDefaultFieldType(); long instant = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-12T14:10:55")) @@ -179,7 +231,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase { QueryShardContext context = new QueryShardContext(0, new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, - xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null); + xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null, () -> true); MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); String date = "2015-10-12T14:10:55"; @@ -202,7 +254,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase { QueryShardContext context = new QueryShardContext(0, new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), writableRegistry(), - null, null, () -> nowInMillis, null, null); + null, null, () -> nowInMillis, null, null, () -> true); MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); String date1 = "2015-10-12T14:10:55"; @@ -251,4 +303,8 @@ public class DateFieldTypeTests extends FieldTypeTestCase { w.close(); dir.close(); } + + private Instant instant(String str) { + return DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(str)).toInstant(); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java index dbff39027f5..4635c6f28cc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java @@ -68,7 +68,7 @@ public class FieldNamesFieldTypeTests extends FieldTypeTestCase { QueryShardContext queryShardContext = new QueryShardContext(0, indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, mapperService, - null, null, null, null, null, null, () -> 0L, null, null); + null, null, null, null, null, null, () -> 0L, null, null, () -> true); fieldNamesFieldType.setEnabled(true); Query termQuery = fieldNamesFieldType.termQuery("field_name", queryShardContext); assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.CONTENT_TYPE, "field_name")), termQuery); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldTypeTests.java index e0cd3b1d153..662b2e331e4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldTypeTests.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchException; public class IgnoredFieldTypeTests extends FieldTypeTestCase { @@ -40,7 +41,12 @@ public class IgnoredFieldTypeTests extends FieldTypeTestCase { ft.setIndexOptions(IndexOptions.DOCS); Query expected = new PrefixQuery(new Term("field", new BytesRef("foo*"))); - assertEquals(expected, ft.prefixQuery("foo*", null, null)); + assertEquals(expected, ft.prefixQuery("foo*", null, MOCK_QSC)); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.prefixQuery("foo*", null, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[prefix] queries cannot be executed when 'search.allow_expensive_queries' is set to false. " + + "For optimised prefix queries on text fields please enable [index_prefixes].", ee.getMessage()); } public void testRegexpQuery() { @@ -49,7 +55,12 @@ public class IgnoredFieldTypeTests extends FieldTypeTestCase { ft.setIndexOptions(IndexOptions.DOCS); Query expected = new RegexpQuery(new Term("field", new BytesRef("foo?"))); - assertEquals(expected, ft.regexpQuery("foo?", 0, 10, null, null)); + assertEquals(expected, ft.regexpQuery("foo?", 0, 10, null, MOCK_QSC)); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.regexpQuery("foo?", randomInt(10), randomInt(10) + 1, null, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[regexp] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + ee.getMessage()); } public void testWildcardQuery() { @@ -58,6 +69,11 @@ public class IgnoredFieldTypeTests extends FieldTypeTestCase { ft.setIndexOptions(IndexOptions.DOCS); Query expected = new WildcardQuery(new Term("field", new BytesRef("foo*"))); - assertEquals(expected, ft.wildcardQuery("foo*", null, null)); + assertEquals(expected, ft.wildcardQuery("foo*", null, MOCK_QSC)); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.wildcardQuery("valu*", null, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[wildcard] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + ee.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java index 1b100fb0872..bf0a0dffba7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java @@ -78,6 +78,6 @@ public class IndexFieldTypeTests extends FieldTypeTestCase { Predicate indexNameMatcher = pattern -> Regex.simpleMatch(pattern, "index"); return new QueryShardContext(0, indexSettings, null, null, null, null, null, null, xContentRegistry(), writableRegistry(), - null, null, System::currentTimeMillis, null, indexNameMatcher); + null, null, System::currentTimeMillis, null, indexNameMatcher, () -> true); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java index eae5b4ac7d2..b05c4c779cf 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java @@ -33,7 +33,10 @@ import org.apache.lucene.search.NormsFieldExistsQuery; import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.analysis.AnalyzerScope; @@ -150,17 +153,35 @@ public class KeywordFieldTypeTests extends FieldTypeTestCase { assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.NAME, "field")), ft.existsQuery(null)); } + public void testRangeQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + assertEquals(new TermRangeQuery("field", BytesRefs.toBytesRef("foo"), BytesRefs.toBytesRef("bar"), true, false), + ft.rangeQuery("foo", "bar", true, false, null, null, null, MOCK_QSC)); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.rangeQuery("foo", "bar", true, false, null, null, null, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[range] queries on [text] or [keyword] fields cannot be executed when " + + "'search.allow_expensive_queries' is set to false.", ee.getMessage()); + } + public void testRegexpQuery() { MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); ft.setIndexOptions(IndexOptions.DOCS); assertEquals(new RegexpQuery(new Term("field","foo.*")), - ft.regexpQuery("foo.*", 0, 10, null, null)); + ft.regexpQuery("foo.*", 0, 10, null, MOCK_QSC)); ft.setIndexOptions(IndexOptions.NONE); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> ft.regexpQuery("foo.*", 0, 10, null, null)); + () -> ft.regexpQuery("foo.*", 0, 10, null, MOCK_QSC)); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.regexpQuery("foo.*", randomInt(10), randomInt(10) + 1, null, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[regexp] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + ee.getMessage()); } public void testFuzzyQuery() { @@ -168,12 +189,18 @@ public class KeywordFieldTypeTests extends FieldTypeTestCase { ft.setName("field"); ft.setIndexOptions(IndexOptions.DOCS); assertEquals(new FuzzyQuery(new Term("field","foo"), 2, 1, 50, true), - ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true)); + ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_QSC)); ft.setIndexOptions(IndexOptions.NONE); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true)); + () -> ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_QSC)); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.fuzzyQuery("foo", Fuzziness.AUTO, randomInt(10) + 1, randomInt(10) + 1, + randomBoolean(), MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[fuzzy] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + ee.getMessage()); } public void testNormalizeQueries() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java index d53eb0bcac1..aaabf3f9edb 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java @@ -22,15 +22,20 @@ import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -44,6 +49,8 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class LegacyGeoShapeFieldMapperTests extends ESSingleNodeTestCase { @@ -695,6 +702,31 @@ public class LegacyGeoShapeFieldMapperTests extends ESSingleNodeTestCase { assertFieldWarnings("tree", "precision", "strategy", "points_only"); } + public void testDisallowExpensiveQueries() throws IOException { + QueryShardContext queryShardContext = mock(QueryShardContext.class); + when(queryShardContext.allowExpensiveQueries()).thenReturn(false); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + + + ElasticsearchException e = expectThrows(ElasticsearchException.class, + () -> geoShapeFieldMapper.fieldType().geometryQueryBuilder().process( + new Point(-10, 10), "location", SpatialStrategy.TERM, ShapeRelation.INTERSECTS, queryShardContext)); + assertEquals("[geo-shape] queries on [PrefixTree geo shapes] cannot be executed when " + + "'search.allow_expensive_queries' is set to false.", e.getMessage()); + assertFieldWarnings("tree"); + } + public String toXContentString(LegacyGeoShapeFieldMapper mapper, boolean includeDefaults) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); ToXContent.Params params; @@ -710,5 +742,4 @@ public class LegacyGeoShapeFieldMapperTests extends ESSingleNodeTestCase { public String toXContentString(LegacyGeoShapeFieldMapper mapper) throws IOException { return toXContentString(mapper, true); } - } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java index 893a909ece2..d1a7ff06d32 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -230,7 +230,7 @@ public class RangeFieldTypeTests extends FieldTypeTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAlphaOfLengthBetween(1, 10), indexSettings); return new QueryShardContext(0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, - xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null); + xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null, () -> true); } public void testDateRangeQueryUsingMappingFormat() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldTypeTests.java index 6f68d28c017..8bb052efbd4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldTypeTests.java @@ -25,8 +25,10 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchException; public class RoutingFieldTypeTests extends FieldTypeTestCase { + @Override protected MappedFieldType createDefaultFieldType() { return new RoutingFieldMapper.RoutingFieldType(); @@ -38,7 +40,12 @@ public class RoutingFieldTypeTests extends FieldTypeTestCase { ft.setIndexOptions(IndexOptions.DOCS); Query expected = new PrefixQuery(new Term("field", new BytesRef("foo*"))); - assertEquals(expected, ft.prefixQuery("foo*", null, null)); + assertEquals(expected, ft.prefixQuery("foo*", null, MOCK_QSC)); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.prefixQuery("foo*", null, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[prefix] queries cannot be executed when 'search.allow_expensive_queries' is set to false. " + + "For optimised prefix queries on text fields please enable [index_prefixes].", ee.getMessage()); } public void testRegexpQuery() { @@ -47,7 +54,12 @@ public class RoutingFieldTypeTests extends FieldTypeTestCase { ft.setIndexOptions(IndexOptions.DOCS); Query expected = new RegexpQuery(new Term("field", new BytesRef("foo?"))); - assertEquals(expected, ft.regexpQuery("foo?", 0, 10, null, null)); + assertEquals(expected, ft.regexpQuery("foo?", 0, 10, null, MOCK_QSC)); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.regexpQuery("foo?", randomInt(10), randomInt(10) + 1, null, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[regexp] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + ee.getMessage()); } public void testWildcardQuery() { @@ -56,6 +68,11 @@ public class RoutingFieldTypeTests extends FieldTypeTestCase { ft.setIndexOptions(IndexOptions.DOCS); Query expected = new WildcardQuery(new Term("field", new BytesRef("foo*"))); - assertEquals(expected, ft.wildcardQuery("foo*", null, null)); + assertEquals(expected, ft.wildcardQuery("foo*", null, MOCK_QSC)); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.wildcardQuery("valu*", null, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[wildcard] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + ee.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java index da589073552..9b54dcda310 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java @@ -30,10 +30,13 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.unit.Fuzziness; import org.junit.Before; @@ -45,6 +48,7 @@ import static org.apache.lucene.search.MultiTermQuery.CONSTANT_SCORE_REWRITE; import static org.hamcrest.Matchers.equalTo; public class TextFieldTypeTests extends FieldTypeTestCase { + @Override protected MappedFieldType createDefaultFieldType() { return new TextFieldMapper.TextFieldType(); @@ -130,17 +134,35 @@ public class TextFieldTypeTests extends FieldTypeTestCase { assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); } + public void testRangeQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + assertEquals(new TermRangeQuery("field", BytesRefs.toBytesRef("foo"), BytesRefs.toBytesRef("bar"), true, false), + ft.rangeQuery("foo", "bar", true, false, null, null, null, MOCK_QSC)); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.rangeQuery("foo", "bar", true, false, null, null, null, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[range] queries on [text] or [keyword] fields cannot be executed when " + + "'search.allow_expensive_queries' is set to false.", ee.getMessage()); + } + public void testRegexpQuery() { MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); ft.setIndexOptions(IndexOptions.DOCS); assertEquals(new RegexpQuery(new Term("field","foo.*")), - ft.regexpQuery("foo.*", 0, 10, null, null)); + ft.regexpQuery("foo.*", 0, 10, null, MOCK_QSC)); ft.setIndexOptions(IndexOptions.NONE); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> ft.regexpQuery("foo.*", 0, 10, null, null)); + () -> ft.regexpQuery("foo.*", 0, 10, null, MOCK_QSC)); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.regexpQuery("foo.*", randomInt(10), randomInt(10) + 1, null, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[regexp] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + ee.getMessage()); } public void testFuzzyQuery() { @@ -148,12 +170,18 @@ public class TextFieldTypeTests extends FieldTypeTestCase { ft.setName("field"); ft.setIndexOptions(IndexOptions.DOCS); assertEquals(new FuzzyQuery(new Term("field","foo"), 2, 1, 50, true), - ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true)); + ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_QSC)); ft.setIndexOptions(IndexOptions.NONE); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true)); + () -> ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_QSC)); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.fuzzyQuery("foo", Fuzziness.AUTO, randomInt(10) + 1, randomInt(10) + 1, + randomBoolean(), MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[fuzzy] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + ee.getMessage()); } public void testIndexPrefixes() { @@ -161,13 +189,18 @@ public class TextFieldTypeTests extends FieldTypeTestCase { ft.setName("field"); ft.setPrefixFieldType(new TextFieldMapper.PrefixFieldType("field", "field._index_prefix", 2, 10)); - Query q = ft.prefixQuery("goin", CONSTANT_SCORE_REWRITE, null); + Query q = ft.prefixQuery("goin", CONSTANT_SCORE_REWRITE, randomMockShardContext()); assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field._index_prefix", "goin"))), q); - q = ft.prefixQuery("internationalisatio", CONSTANT_SCORE_REWRITE, null); + q = ft.prefixQuery("internationalisatio", CONSTANT_SCORE_REWRITE, MOCK_QSC); assertEquals(new PrefixQuery(new Term("field", "internationalisatio")), q); - q = ft.prefixQuery("g", CONSTANT_SCORE_REWRITE, null); + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.prefixQuery("internationalisatio", null, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[prefix] queries cannot be executed when 'search.allow_expensive_queries' is set to false. " + + "For optimised prefix queries on text fields please enable [index_prefixes].", ee.getMessage()); + + q = ft.prefixQuery("g", CONSTANT_SCORE_REWRITE, randomMockShardContext()); Automaton automaton = Operations.concatenate(Arrays.asList(Automata.makeChar('g'), Automata.makeAnyChar())); diff --git a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index 40fa7ff6d95..6e3ee58c220 100644 --- a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilderTests; import org.elasticsearch.search.internal.ShardSearchRequest; diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index 1e147e24def..8e736b12d6a 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -425,7 +425,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase true); String json = "{ \"intervals\" : { \"" + STRING_FIELD_NAME + "\": { " + "\"match\" : { " + diff --git a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index 08cf8eedb94..c6fd863a35b 100644 --- a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -23,6 +23,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.Strings; @@ -355,5 +356,17 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase InnerHitContextBuilder.extractInnerHits(queryBuilder,Collections.singletonMap("some_name", null))); + assertEquals("[inner_hits] already contains an entry for key [some_name]", e.getMessage()); + } + + public void testDisallowExpensiveQueries() { + QueryShardContext queryShardContext = mock(QueryShardContext.class); + when(queryShardContext.allowExpensiveQueries()).thenReturn(false); + + NestedQueryBuilder queryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None); + ElasticsearchException e = expectThrows(ElasticsearchException.class, + () -> queryBuilder.toQuery(queryShardContext)); + assertEquals("[joining] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + e.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java index 8e57ad50bd1..ba68e1b6a20 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java @@ -153,6 +153,6 @@ public class QueryShardContextTests extends ESTestCase { (mappedFieldType, idxName) -> mappedFieldType.fielddataBuilder(idxName).build(indexSettings, mappedFieldType, null, null, null), mapperService, null, null, NamedXContentRegistry.EMPTY, new NamedWriteableRegistry(Collections.emptyList()), - null, null, () -> nowInMillis, clusterAlias, null); + null, null, () -> nowInMillis, clusterAlias, null, () -> true); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java index 83ab9c8e62b..c43470ea3b2 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java @@ -41,7 +41,7 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase { IndexReader reader = new MultiReader(); QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE, null, null, indexService.mapperService(), null, null, xContentRegistry(), writableRegistry(), - null, new IndexSearcher(reader), null, null, null); + null, new IndexSearcher(reader), null, null, null, () -> true); RangeQueryBuilder range = new RangeQueryBuilder("foo"); assertEquals(Relation.DISJOINT, range.getRelation(context)); } @@ -58,7 +58,8 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase { indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, null, - indexService.mapperService(), null, null, xContentRegistry(), writableRegistry(), null, null, null, null, null); + indexService.mapperService(), null, null, xContentRegistry(), writableRegistry(), + null, null, null, null, null, () -> true); RangeQueryBuilder range = new RangeQueryBuilder("foo"); // can't make assumptions on a missing reader, so it must return INTERSECT assertEquals(Relation.INTERSECTS, range.getRelation(context)); @@ -78,7 +79,7 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase { IndexReader reader = new MultiReader(); QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE, null, null, indexService.mapperService(), null, null, xContentRegistry(), writableRegistry(), - null, new IndexSearcher(reader), null, null, null); + null, new IndexSearcher(reader), null, null, null, () -> true); RangeQueryBuilder range = new RangeQueryBuilder("foo"); // no values -> DISJOINT assertEquals(Relation.DISJOINT, range.getRelation(context)); diff --git a/server/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java index fbf67860a2d..7eda1083b26 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.Query; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.ParsingException; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; @@ -33,6 +34,8 @@ import java.util.Set; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class ScriptQueryBuilderTests extends AbstractQueryTestCase { @Override @@ -53,7 +56,9 @@ public class ScriptQueryBuilderTests extends AbstractQueryTestCase new ScriptQueryBuilder((Script) null)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new ScriptQueryBuilder((Script) null)); + assertEquals("script cannot be null", e.getMessage()); } public void testFromJsonVerbose() throws IOException { @@ -126,4 +131,15 @@ public class ScriptQueryBuilderTests extends AbstractQueryTestCase queryBuilder.toQuery(queryShardContext)); + assertEquals("[script] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + e.getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java index 690a7d6ae75..04322a01d0f 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.Query; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery; import org.elasticsearch.index.query.functionscore.ScriptScoreQueryBuilder; import org.elasticsearch.script.MockScriptEngine; @@ -32,6 +33,8 @@ import java.util.Collections; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.hamcrest.CoreMatchers.instanceOf; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class ScriptScoreQueryBuilderTests extends AbstractQueryTestCase { @@ -71,15 +74,17 @@ public class ScriptScoreQueryBuilderTests extends AbstractQueryTestCase new ScriptScoreQueryBuilder(matchAllQuery(), null) ); + assertEquals("script_score: script must not be null" , e.getMessage()); - expectThrows( + e = expectThrows( IllegalArgumentException.class, () -> new ScriptScoreQueryBuilder(null, script) ); + assertEquals("script_score: query must not be null" , e.getMessage()); } /** @@ -93,4 +98,15 @@ public class ScriptScoreQueryBuilderTests extends AbstractQueryTestCase queryBuilder.toQuery(queryShardContext)); + assertEquals("[script score] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + e.getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java index 94f26d8ed8f..2b32d49e001 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java @@ -48,6 +48,7 @@ import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery.FilterScoreFunction; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery.ScoreMode; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.lucene.search.function.LeafScoreFunction; import org.elasticsearch.common.lucene.search.function.RandomScoreFunction; import org.elasticsearch.common.lucene.search.function.ScoreFunction; @@ -60,7 +61,11 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESTestCase; import org.junit.After; import org.junit.Before; @@ -144,6 +149,12 @@ public class FunctionScoreTests extends ESTestCase { throw new UnsupportedOperationException(UNSUPPORTED); } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + throw new UnsupportedOperationException(UNSUPPORTED); + } + @Override public void clear() { throw new UnsupportedOperationException(UNSUPPORTED); @@ -235,6 +246,12 @@ public class FunctionScoreTests extends ESTestCase { throw new UnsupportedOperationException(UNSUPPORTED); } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + throw new UnsupportedOperationException(UNSUPPORTED); + } + @Override public void clear() { throw new UnsupportedOperationException(UNSUPPORTED); diff --git a/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index c3b896e9c07..109bc017f74 100644 --- a/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -389,6 +389,7 @@ public class FlushIT extends ESIntegTestCase { .put(IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.getKey(), randomTimeValue(10, 1000, "ms")).build()); assertAcked(client().admin().indices().prepareCreate(indexName).setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), randomTimeValue(200, 500, "ms")) .put(IndexService.GLOBAL_CHECKPOINT_SYNC_INTERVAL_SETTING.getKey(), randomTimeValue(50, 200, "ms")) .put("index.routing.allocation.include._name", String.join(",", dataNodes)) .build())); diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index fb7c9dfcc53..146ded11b42 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; @@ -179,12 +178,11 @@ public class RestControllerTests extends ESTestCase { String path = "/_" + randomAlphaOfLengthBetween(1, 6); RestHandler handler = mock(RestHandler.class); String deprecationMessage = randomAlphaOfLengthBetween(1, 10); - DeprecationLogger logger = mock(DeprecationLogger.class); // don't want to test everything -- just that it actually wraps the handler - doCallRealMethod().when(controller).registerAsDeprecatedHandler(method, path, handler, deprecationMessage, logger); + doCallRealMethod().when(controller).registerAsDeprecatedHandler(method, path, handler, deprecationMessage); - controller.registerAsDeprecatedHandler(method, path, handler, deprecationMessage, logger); + controller.registerAsDeprecatedHandler(method, path, handler, deprecationMessage); verify(controller).registerHandler(eq(method), eq(path), any(DeprecationRestHandler.class)); } @@ -197,18 +195,17 @@ public class RestControllerTests extends ESTestCase { final RestHandler handler = mock(RestHandler.class); final RestRequest.Method deprecatedMethod = randomFrom(RestRequest.Method.values()); final String deprecatedPath = "/_" + randomAlphaOfLengthBetween(1, 6); - final DeprecationLogger logger = mock(DeprecationLogger.class); final String deprecationMessage = "[" + deprecatedMethod.name() + " " + deprecatedPath + "] is deprecated! Use [" + method.name() + " " + path + "] instead."; // don't want to test everything -- just that it actually wraps the handlers - doCallRealMethod().when(controller).registerWithDeprecatedHandler(method, path, handler, deprecatedMethod, deprecatedPath, logger); + doCallRealMethod().when(controller).registerWithDeprecatedHandler(method, path, handler, deprecatedMethod, deprecatedPath); - controller.registerWithDeprecatedHandler(method, path, handler, deprecatedMethod, deprecatedPath, logger); + controller.registerWithDeprecatedHandler(method, path, handler, deprecatedMethod, deprecatedPath); verify(controller).registerHandler(method, path, handler); - verify(controller).registerAsDeprecatedHandler(deprecatedMethod, deprecatedPath, handler, deprecationMessage, logger); + verify(controller).registerAsDeprecatedHandler(deprecatedMethod, deprecatedPath, handler, deprecationMessage); } public void testRegisterSecondMethodWithDifferentNamedWildcard() { diff --git a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 1f351fae312..d2a8b9dd533 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -53,7 +53,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuil import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.fetch.subphase.ExplainFetchSubPhase; +import org.elasticsearch.search.fetch.subphase.ExplainPhase; import org.elasticsearch.search.fetch.subphase.highlight.CustomHighlighter; import org.elasticsearch.search.fetch.subphase.highlight.FastVectorHighlighter; import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; @@ -137,7 +137,7 @@ public class SearchModuleTests extends ESTestCase { SearchPlugin registersDupeFetchSubPhase = new SearchPlugin() { @Override public List getFetchSubPhases(FetchPhaseConstructionContext context) { - return singletonList(new ExplainFetchSubPhase()); + return singletonList(new ExplainPhase()); } }; expectThrows(IllegalArgumentException.class, registryForPlugin(registersDupeFetchSubPhase)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java index 6fc5561f6d6..4afce4e5ff2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java @@ -97,7 +97,7 @@ public class ExtendedBoundsTests extends ESTestCase { QueryShardContext qsc = new QueryShardContext(0, new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), writableRegistry(), - null, null, () -> now, null, null); + null, null, () -> now, null, null, () -> true); DateFormatter formatter = DateFormatter.forPattern("dateOptionalTime"); DocValueFormat format = new DocValueFormat.DateTime(formatter, ZoneOffset.UTC, DateFieldMapper.Resolution.MILLISECONDS); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java index 72781f332a0..32c539e4334 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java @@ -46,13 +46,17 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.QueryShardContext; @@ -87,9 +91,9 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.LeafDocLookup; import java.io.IOException; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -97,7 +101,6 @@ import java.util.List; import java.util.Map; import java.util.function.BiFunction; import java.util.function.Consumer; -import java.util.function.DoubleConsumer; import java.util.function.Function; import java.util.function.Supplier; @@ -740,41 +743,56 @@ public class MinAggregatorTests extends AggregatorTestCase { ) ); } - assertNotNull( + for (DateFieldMapper.Resolution resolution : DateFieldMapper.Resolution.values()) { + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(new MatchAllDocsQuery()), + mockAggregator(), + mockDateValuesSourceConfig("number", true, resolution) + ) + ); + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(new TermQuery(new Term("foo", "bar"))), + null, + mockDateValuesSourceConfig("number", true, resolution) + ) + ); + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(null), + mockAggregator(), + mockDateValuesSourceConfig("number", true, resolution) + ) + ); + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(null), + null, + mockDateValuesSourceConfig("number", false, resolution) + ) + ); + } + // Check that we decode a dates "just like" the doc values instance. + Instant expected = Instant.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2020-01-01T00:00:00Z")); + byte[] scratch = new byte[8]; + LongPoint.encodeDimension(DateFieldMapper.Resolution.MILLISECONDS.convert(expected), scratch, 0); + assertThat( MinAggregator.getPointReaderOrNull( mockSearchContext(new MatchAllDocsQuery()), null, - mockDateValuesSourceConfig("number", true) - ) + mockDateValuesSourceConfig("number", true, DateFieldMapper.Resolution.MILLISECONDS) + ).apply(scratch), equalTo(expected.toEpochMilli()) ); - assertNull( + LongPoint.encodeDimension(DateFieldMapper.Resolution.NANOSECONDS.convert(expected), scratch, 0); + assertThat( MinAggregator.getPointReaderOrNull( mockSearchContext(new MatchAllDocsQuery()), - mockAggregator(), - mockDateValuesSourceConfig("number", true) - ) - ); - assertNull( - MinAggregator.getPointReaderOrNull( - mockSearchContext(new TermQuery(new Term("foo", "bar"))), null, - mockDateValuesSourceConfig("number", true) - ) - ); - assertNull( - MinAggregator.getPointReaderOrNull( - mockSearchContext(null), - mockAggregator(), - mockDateValuesSourceConfig("number", true) - ) - ); - assertNull( - MinAggregator.getPointReaderOrNull( - mockSearchContext(null), - null, - mockDateValuesSourceConfig("number", false) - ) + mockDateValuesSourceConfig("number", true, DateFieldMapper.Resolution.NANOSECONDS) + ).apply(scratch), equalTo(expected.toEpochMilli()) ); + } public void testMinShortcutRandom() throws Exception { @@ -799,21 +817,6 @@ public class MinAggregatorTests extends AggregatorTestCase { (v) -> DoublePoint.decodeDimension(v, 0)); } - private void testMinCase(IndexSearcher searcher, - AggregationBuilder aggregationBuilder, - MappedFieldType ft, - DoubleConsumer testResult) throws IOException { - Collection queries = Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(ft.name())); - for (Query query : queries) { - MinAggregator aggregator = createAggregator(query, aggregationBuilder, searcher, createIndexSettings(), ft); - aggregator.preCollection(); - searcher.search(new MatchAllDocsQuery(), aggregator); - aggregator.postCollection(); - InternalMin result = (InternalMin) aggregator.buildAggregation(0L); - testResult.accept(result.getValue()); - } - } - private void testMinShortcutCase(Supplier randomNumber, Function pointFieldFunc, Function pointConvertFunc) throws IOException { @@ -889,12 +892,17 @@ public class MinAggregatorTests extends AggregatorTestCase { return config; } - private ValuesSourceConfig mockDateValuesSourceConfig(String fieldName, boolean indexed) { + private ValuesSourceConfig mockDateValuesSourceConfig(String fieldName, boolean indexed, + DateFieldMapper.Resolution resolution) { ValuesSourceConfig config = mock(ValuesSourceConfig.class); - MappedFieldType ft = new DateFieldMapper.Builder(fieldName).fieldType(); - ft.setName(fieldName); - ft.setIndexOptions(indexed ? IndexOptions.DOCS : IndexOptions.NONE); - ft.freeze(); + Mapper.BuilderContext builderContext = new Mapper.BuilderContext( + Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(), + new ContentPath()); + MappedFieldType ft = new DateFieldMapper.Builder(fieldName) + .index(indexed) + .withResolution(resolution) + .build(builderContext) + .fieldType(); when(config.fieldContext()).thenReturn(new FieldContext(fieldName, null, ft)); return config; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java index 9d0d1d69f02..4adf16f6012 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java @@ -426,6 +426,6 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase { Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); ScriptService scriptService = new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); return new QueryShardContext(0, indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, mapperService, null, scriptService, - xContentRegistry(), writableRegistry(), null, null, System::currentTimeMillis, null, null); + xContentRegistry(), writableRegistry(), null, null, System::currentTimeMillis, null, null, () -> true); } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java similarity index 95% rename from server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhaseTests.java rename to server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java index 12675b8e178..d87d21c7d0e 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java @@ -39,7 +39,7 @@ import java.util.Map; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class FetchSourceSubPhaseTests extends ESTestCase { +public class FetchSourcePhaseTests extends ESTestCase { public void testFetchSource() throws IOException { XContentBuilder source = XContentFactory.jsonBuilder().startObject() @@ -149,22 +149,22 @@ public class FetchSourceSubPhaseTests extends ESTestCase { private FetchSubPhase.HitContext hitExecuteMultiple(XContentBuilder source, boolean fetchSource, String[] includes, String[] excludes, SearchHit.NestedIdentity nestedIdentity) { FetchSourceContext fetchSourceContext = new FetchSourceContext(fetchSource, includes, excludes); - SearchContext searchContext = new FetchSourceSubPhaseTestSearchContext(fetchSourceContext, + SearchContext searchContext = new FetchSourcePhaseTestSearchContext(fetchSourceContext, source == null ? null : BytesReference.bytes(source)); FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); final SearchHit searchHit = new SearchHit(1, null, null, nestedIdentity, null); hitContext.reset(searchHit, null, 1, null); - FetchSourceSubPhase phase = new FetchSourceSubPhase(); + FetchSourcePhase phase = new FetchSourcePhase(); phase.hitExecute(searchContext, hitContext); return hitContext; } - private static class FetchSourceSubPhaseTestSearchContext extends TestSearchContext { + private static class FetchSourcePhaseTestSearchContext extends TestSearchContext { final FetchSourceContext context; final BytesReference source; final IndexShard indexShard; - FetchSourceSubPhaseTestSearchContext(FetchSourceContext context, BytesReference source) { + FetchSourcePhaseTestSearchContext(FetchSourceContext context, BytesReference source) { super(null); this.context = context; this.source = source; diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 7d3d25377c9..d432573f38a 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -280,7 +280,7 @@ public class HighlightBuilderTests extends ESTestCase { // shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), namedWriteableRegistry, - null, null, System::currentTimeMillis, null, null) { + null, null, System::currentTimeMillis, null, null, () -> true) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java index d15404cc068..5ef47ce6705 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java @@ -18,12 +18,14 @@ */ package org.elasticsearch.search.geo; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.builders.PointBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; @@ -41,6 +43,16 @@ import static org.hamcrest.Matchers.instanceOf; public class GeoShapeIntegrationIT extends ESIntegTestCase { + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + // Check that only geo-shape queries on legacy PrefixTree based + // geo shapes are disallowed. + .put("search.allow_expensive_queries", false) + .put(super.nodeSettings(nodeOrdinal)) + .build(); + } + /** * Test that orientation parameter correctly persists across cluster restart */ @@ -183,21 +195,21 @@ public class GeoShapeIntegrationIT extends ESIntegTestCase { public void testIndexPolygonDateLine() throws Exception { String mappingVector = "{\n" + - " \"properties\": {\n" + - " \"shape\": {\n" + - " \"type\": \"geo_shape\"\n" + - " }\n" + - " }\n" + - " }"; + " \"properties\": {\n" + + " \"shape\": {\n" + + " \"type\": \"geo_shape\"\n" + + " }\n" + + " }\n" + + " }"; String mappingQuad = "{\n" + - " \"properties\": {\n" + - " \"shape\": {\n" + - " \"type\": \"geo_shape\",\n" + - " \"tree\": \"quadtree\"\n" + - " }\n" + - " }\n" + - " }"; + " \"properties\": {\n" + + " \"shape\": {\n" + + " \"type\": \"geo_shape\",\n" + + " \"tree\": \"quadtree\"\n" + + " }\n" + + " }\n" + + " }"; // create index @@ -208,37 +220,47 @@ public class GeoShapeIntegrationIT extends ESIntegTestCase { ensureGreen(); String source = "{\n" + - " \"shape\" : \"POLYGON((179 0, -179 0, -179 2, 179 2, 179 0))\""+ - "}"; + " \"shape\" : \"POLYGON((179 0, -179 0, -179 2, 179 2, 179 0))\""+ + "}"; indexRandom(true, client().prepareIndex("quad", "doc", "0").setSource(source, XContentType.JSON)); indexRandom(true, client().prepareIndex("vector", "doc", "0").setSource(source, XContentType.JSON)); - SearchResponse searchResponse = client().prepareSearch("quad").setQuery( - geoShapeQuery("shape", new PointBuilder(-179.75, 1)) - ).get(); + try { + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", true)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + SearchResponse searchResponse = client().prepareSearch("quad").setQuery( + geoShapeQuery("shape", new PointBuilder(-179.75, 1)) + ).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch("quad").setQuery( - geoShapeQuery("shape", new PointBuilder(90, 1)) - ).get(); + searchResponse = client().prepareSearch("quad").setQuery( + geoShapeQuery("shape", new PointBuilder(90, 1)) + ).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - searchResponse = client().prepareSearch("quad").setQuery( - geoShapeQuery("shape", new PointBuilder(-180, 1)) - ).get(); + searchResponse = client().prepareSearch("quad").setQuery( + geoShapeQuery("shape", new PointBuilder(-180, 1)) + ).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch("quad").setQuery( - geoShapeQuery("shape", new PointBuilder(180, 1)) - ).get(); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + searchResponse = client().prepareSearch("quad").setQuery( + geoShapeQuery("shape", new PointBuilder(180, 1)) + ).get(); - searchResponse = client().prepareSearch("vector").setQuery( + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + } finally { + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + } + + SearchResponse searchResponse = client().prepareSearch("vector").setQuery( geoShapeQuery("shape", new PointBuilder(90, 1)) ).get(); diff --git a/server/src/test/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java b/server/src/test/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java index 181f44adcf9..86cd05e15a2 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java @@ -18,11 +18,14 @@ */ package org.elasticsearch.search.geo; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -33,6 +36,8 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ESIntegTestCase; +import java.io.IOException; + import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -186,6 +191,53 @@ public class LegacyGeoShapeIntegrationIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); } + public void testDisallowExpensiveQueries() throws InterruptedException, IOException { + try { + // create index + assertAcked(client().admin().indices().prepareCreate("test") + .addMapping("_doc", "shape", "type=geo_shape,strategy=recursive,tree=geohash").get()); + ensureGreen(); + + indexRandom(true, client().prepareIndex("test", "_doc").setId("0").setSource( + "shape", (ToXContent) (builder, params) -> { + builder.startObject().field("type", "circle") + .startArray("coordinates").value(30).value(50).endArray() + .field("radius", "77km") + .endObject(); + return builder; + })); + refresh(); + + // Execute with search.allow_expensive_queries = null => default value = false => success + SearchResponse searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", + new Circle(30, 50, 77000))).get(); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", false)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + + // Set search.allow_expensive_queries to "false" => assert failure + ElasticsearchException e = expectThrows(ElasticsearchException.class, + () -> client().prepareSearch("test").setQuery(geoShapeQuery("shape", + new Circle(30, 50, 77000))).get()); + assertEquals("[geo-shape] queries on [PrefixTree geo shapes] cannot be executed when " + + "'search.allow_expensive_queries' is set to false.", e.getCause().getMessage()); + + // Set search.allow_expensive_queries to "true" => success + updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", true)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", + new Circle(30, 50, 77000))).get(); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + } finally { + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + } + } + private String findNodeName(String index) { ClusterState state = client().admin().cluster().prepareState().get().getState(); IndexShardRoutingTable shard = state.getRoutingTable().index(index).shard(0); diff --git a/server/src/test/java/org/elasticsearch/search/query/ScriptScoreQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/ScriptScoreQueryIT.java index cc199bc61de..88f777b2053 100644 --- a/server/src/test/java/org/elasticsearch/search/query/ScriptScoreQueryIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/ScriptScoreQueryIT.java @@ -19,6 +19,8 @@ package org.elasticsearch.search.query; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.ScriptDocValues; @@ -156,4 +158,57 @@ public class ScriptScoreQueryIT extends ESIntegTestCase { assertNoFailures(resp); assertOrderedSearchHits(resp, "3", "2", "1"); } + + public void testDisallowExpensiveQueries() { + try { + assertAcked( + prepareCreate("test-index").addMapping("_doc", "field1", "type=text", "field2", "type=double") + ); + int docCount = 10; + for (int i = 1; i <= docCount; i++) { + client().prepareIndex("test-index", "_doc").setId("" + i) + .setSource("field1", "text" + (i % 2), "field2", i) + .get(); + } + refresh(); + + Map params = new HashMap<>(); + params.put("param1", 0.1); + + // Execute with search.allow_expensive_queries = null => default value = true => success + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['field2'].value * param1", params); + SearchResponse resp = client() + .prepareSearch("test-index") + .setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script)) + .get(); + assertNoFailures(resp); + + // Set search.allow_expensive_queries to "false" => assert failure + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", false)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + + ElasticsearchException e = expectThrows(ElasticsearchException.class, + () -> client() + .prepareSearch("test-index") + .setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script)) + .get()); + assertEquals("[script score] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + e.getCause().getMessage()); + + // Set search.allow_expensive_queries to "true" => success + updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", true)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + resp = client() + .prepareSearch("test-index") + .setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script)) + .get(); + assertNoFailures(resp); + } finally { + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index 1f0394216e0..2ece6faf483 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -144,7 +144,7 @@ public class QueryRescorerBuilderTests extends ESTestCase { // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, - xContentRegistry(), namedWriteableRegistry, null, null, () -> nowInMillis, null, null) { + xContentRegistry(), namedWriteableRegistry, null, null, () -> nowInMillis, null, null, () -> true) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); @@ -188,7 +188,7 @@ public class QueryRescorerBuilderTests extends ESTestCase { // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, - xContentRegistry(), namedWriteableRegistry, null, null, () -> nowInMillis, null, null) { + xContentRegistry(), namedWriteableRegistry, null, null, () -> nowInMillis, null, null, () -> true) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); diff --git a/server/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java index fbc135f7cf3..4e26a12a0ce 100644 --- a/server/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -19,6 +19,8 @@ package org.elasticsearch.search.scriptfilter; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -47,6 +49,7 @@ import static java.util.Collections.emptyMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.scriptQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) @@ -222,6 +225,55 @@ public class ScriptQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().getAt(2).getFields().get("sNum1").getValues().get(0), equalTo(3.0)); } + public void testDisallowExpensiveQueries() { + try { + assertAcked( + prepareCreate("test-index").addMapping("_doc", "num1", "type=double") + ); + int docCount = 10; + for (int i = 1; i <= docCount; i++) { + client().prepareIndex("test-index", "_doc").setId("" + i) + .setSource("num1", i) + .get(); + } + refresh(); + + // Execute with search.allow_expensive_queries = null => default value = false => success + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > 1", + Collections.emptyMap()); + SearchResponse resp = client().prepareSearch("test-index") + .setQuery(scriptQuery(script)) + .get(); + assertNoFailures(resp); + + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", false)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + + // Set search.allow_expensive_queries to "false" => assert failure + ElasticsearchException e = expectThrows(ElasticsearchException.class, + () -> client() + .prepareSearch("test-index") + .setQuery(scriptQuery(script)) + .get()); + assertEquals("[script] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + e.getCause().getMessage()); + + // Set search.allow_expensive_queries to "true" => success + updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", true)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + resp = client().prepareSearch("test-index") + .setQuery(scriptQuery(script)) + .get(); + assertNoFailures(resp); + } finally { + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + } + } + private static AtomicInteger scriptCounter = new AtomicInteger(0); public static int incrementScriptCounter() { diff --git a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java index a0e4c6bbbea..39c5fff625c 100644 --- a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -28,13 +28,17 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -281,6 +285,11 @@ public class SearchAfterBuilderTests extends ESTestCase { public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) { return null; } + + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) { + return null; + } }; type = extractSortType(new SortField("field", source)); diff --git a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 396c05b3114..931d06f0793 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -198,7 +198,7 @@ public abstract class AbstractSortTestCase> extends EST }; return new QueryShardContext(0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, bitsetFilterCache, indexFieldDataLookup, null, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, searcher, - () -> randomNonNegativeLong(), null, null) { + () -> randomNonNegativeLong(), null, null, () -> true) { @Override public MappedFieldType fieldMapper(String name) { diff --git a/server/src/test/java/org/elasticsearch/search/sort/BucketedSortForDoublesTests.java b/server/src/test/java/org/elasticsearch/search/sort/BucketedSortForDoublesTests.java new file mode 100644 index 00000000000..25c246bb5b4 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/sort/BucketedSortForDoublesTests.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.search.DocValueFormat; + +import java.io.IOException; + +public class BucketedSortForDoublesTests extends BucketedSortTestCase { + @Override + public BucketedSort.ForDoubles build(SortOrder sortOrder, DocValueFormat format, double[] values) { + return new BucketedSort.ForDoubles(bigArrays(), sortOrder, format) { + @Override + public Leaf forLeaf(LeafReaderContext ctx) throws IOException { + return new Leaf() { + int index = -1; + + @Override + protected boolean advanceExact(int doc) throws IOException { + index = doc; + return doc < values.length; + } + + @Override + protected double docValue() throws IOException { + return values[index]; + } + }; + } + }; + } + + @Override + protected SortValue expectedSortValue(double v) { + return SortValue.from(v); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/sort/BucketedSortForFloatsTests.java b/server/src/test/java/org/elasticsearch/search/sort/BucketedSortForFloatsTests.java new file mode 100644 index 00000000000..0e48582e10e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/sort/BucketedSortForFloatsTests.java @@ -0,0 +1,131 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorable; +import org.elasticsearch.search.DocValueFormat; + +import java.io.IOException; + +public class BucketedSortForFloatsTests extends BucketedSortTestCase { + @Override + public BucketedSort.ForFloats build(SortOrder sortOrder, DocValueFormat format, double[] values) { + return new BucketedSort.ForFloats(bigArrays(), sortOrder, format) { + @Override + public boolean needsScores() { + return false; + } + + @Override + public Leaf forLeaf(LeafReaderContext ctx) throws IOException { + return new Leaf() { + int index = -1; + + @Override + protected boolean advanceExact(int doc) throws IOException { + index = doc; + return doc < values.length; + } + + @Override + protected float docValue() throws IOException { + return (float) values[index]; + } + + @Override + public void setScorer(Scorable scorer) {} + }; + } + }; + } + + private BucketedSort.ForFloats buildForScores(SortOrder sortOrder, DocValueFormat format) { + return new BucketedSort.ForFloats(bigArrays(), sortOrder, format) { + @Override + public Leaf forLeaf(LeafReaderContext ctx) throws IOException { + return new Leaf() { + Scorable scorer; + + @Override + public void setScorer(Scorable scorer) { + this.scorer = scorer; + } + + @Override + protected boolean advanceExact(int doc) throws IOException { + return scorer.docID() == doc; + } + + @Override + protected float docValue() throws IOException { + return scorer.score(); + } + }; + } + + @Override + public boolean needsScores() { + return true; + } + }; + } + + + @Override + protected SortValue expectedSortValue(double v) { + return SortValue.from(v); + } + + public void testScorer() throws IOException { + try (BucketedSort.ForFloats sort = buildForScores(SortOrder.DESC, DocValueFormat.RAW)) { + assertTrue(sort.needsScores()); + BucketedSort.Leaf leaf = sort.forLeaf(null); + MockScorable scorer = new MockScorable(); + leaf.setScorer(scorer); + scorer.doc = 1; + scorer.score = 10; + assertFalse(leaf.collectIfCompetitive(0, 0)); + assertTrue(leaf.collectIfCompetitive(1, 0)); + assertEquals(sort.getValue(0), SortValue.from(10.0)); + scorer.doc = 2; + scorer.score = 1; + assertFalse(leaf.collectIfCompetitive(2, 0)); + assertEquals(sort.getValue(0), SortValue.from(10.0)); + } + } + + + private class MockScorable extends Scorable { + private int doc; + private float score; + + @Override + public float score() throws IOException { + return score; + } + + @Override + public int docID() { + return doc; + } + } + +} diff --git a/server/src/test/java/org/elasticsearch/search/sort/BucketedSortForLongsTests.java b/server/src/test/java/org/elasticsearch/search/sort/BucketedSortForLongsTests.java new file mode 100644 index 00000000000..cfd13c548e9 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/sort/BucketedSortForLongsTests.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.search.DocValueFormat; + +import java.io.IOException; + +public class BucketedSortForLongsTests extends BucketedSortTestCase { + @Override + public BucketedSort.ForLongs build(SortOrder sortOrder, DocValueFormat format, double[] values) { + return new BucketedSort.ForLongs(bigArrays(), sortOrder, format) { + @Override + public Leaf forLeaf(LeafReaderContext ctx) throws IOException { + return new Leaf() { + int index = -1; + + @Override + protected boolean advanceExact(int doc) throws IOException { + index = doc; + return doc < values.length; + } + + @Override + protected long docValue() throws IOException { + return (long) values[index]; + } + }; + } + }; + } + + @Override + protected SortValue expectedSortValue(double v) { + return SortValue.from((long) v); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/sort/BucketedSortTestCase.java b/server/src/test/java/org/elasticsearch/search/sort/BucketedSortTestCase.java new file mode 100644 index 00000000000..3b2f0b8a001 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/sort/BucketedSortTestCase.java @@ -0,0 +1,178 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.MockPageCacheRecycler; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public abstract class BucketedSortTestCase extends ESTestCase { + /** + * Build a {@link BucketedSort} to test. Sorts built by this method shouldn't need scores. + * @param values values to test, always sent as doubles just to have + * numbers to test. subclasses should cast to their favorite types + */ + protected abstract T build(SortOrder sortOrder, DocValueFormat format, double[] values); + + /** + * Build the expected sort value for a value. + */ + protected abstract SortValue expectedSortValue(double v); + + private T build(SortOrder order, double[] values) { + DocValueFormat format = randomFrom(DocValueFormat.RAW, DocValueFormat.BINARY, DocValueFormat.BOOLEAN); + return build(order, format, values); + } + + public final void testNeverCalled() { + SortOrder order = randomFrom(SortOrder.values()); + DocValueFormat format = randomFrom(DocValueFormat.RAW, DocValueFormat.BINARY, DocValueFormat.BOOLEAN); + try (T sort = build(order, format, new double[] {})) { + assertThat(sort.getOrder(), equalTo(order)); + assertThat(sort.getFormat(), equalTo(format)); + assertThat(sort.getValue(randomNonNegativeLong()), nullValue()); + assertFalse(sort.needsScores()); + } + } + + public final void testEmptyLeaf() throws IOException { + try (T sort = build(randomFrom(SortOrder.values()), new double[] {})) { + BucketedSort.Leaf leaf = sort.forLeaf(null); + assertFalse(leaf.advanceExact(0)); + assertThat(sort.getValue(randomNonNegativeLong()), nullValue()); + } + } + + public final void testSingleDoc() throws IOException { + try (T sort = build(randomFrom(SortOrder.values()), new double[] {1})) { + BucketedSort.Leaf leaf = sort.forLeaf(null); + assertTrue(leaf.collectIfCompetitive(0, 0)); + assertThat(sort.getValue(0), equalTo(expectedSortValue(1))); + } + } + + public void testNonCompetitive() throws IOException { + try (T sort = build(SortOrder.DESC, new double[] {2, 1})) { + BucketedSort.Leaf leaf = sort.forLeaf(null); + assertTrue(leaf.collectIfCompetitive(0, 0)); + assertFalse(leaf.collectIfCompetitive(1, 0)); + assertThat(sort.getValue(0), equalTo(expectedSortValue(2))); + } + } + + public void testCompetitive() throws IOException { + try (T sort = build(SortOrder.DESC, new double[] {1, 2})) { + BucketedSort.Leaf leaf = sort.forLeaf(null); + assertTrue(leaf.collectIfCompetitive(0, 0)); + assertTrue(leaf.collectIfCompetitive(1, 0)); + assertThat(sort.getValue(0), equalTo(expectedSortValue(2))); + } + } + + public void testNegativeValue() throws IOException { + try (T sort = build(SortOrder.DESC, new double[] {-1})) { + BucketedSort.Leaf leaf = sort.forLeaf(null); + assertTrue(leaf.collectIfCompetitive(0, 0)); + assertThat(sort.getValue(0), equalTo(expectedSortValue(-1))); + } + } + + public void testSomeBuckets() throws IOException { + try (T sort = build(SortOrder.DESC, new double[] {2, 3})) { + BucketedSort.Leaf leaf = sort.forLeaf(null); + assertTrue(leaf.collectIfCompetitive(0, 0)); + assertTrue(leaf.collectIfCompetitive(0, 1)); + assertTrue(leaf.collectIfCompetitive(0, 2)); + assertTrue(leaf.collectIfCompetitive(1, 0)); + assertThat(sort.getValue(0), equalTo(expectedSortValue(3))); + assertThat(sort.getValue(1), equalTo(expectedSortValue(2))); + assertThat(sort.getValue(2), equalTo(expectedSortValue(2))); + assertThat(sort.getValue(3), nullValue()); + } + } + + public void testBucketGaps() throws IOException { + try (T sort = build(SortOrder.DESC, new double[] {2})) { + BucketedSort.Leaf leaf = sort.forLeaf(null); + assertTrue(leaf.collectIfCompetitive(0, 0)); + assertTrue(leaf.collectIfCompetitive(0, 2)); + assertThat(sort.getValue(0), equalTo(expectedSortValue(2))); + assertThat(sort.getValue(1), nullValue()); + assertThat(sort.getValue(2), equalTo(expectedSortValue(2))); + assertThat(sort.getValue(3), nullValue()); + } + } + + public void testBucketsOutOfOrder() throws IOException { + try (T sort = build(SortOrder.DESC, new double[] {2})) { + BucketedSort.Leaf leaf = sort.forLeaf(null); + assertTrue(leaf.collectIfCompetitive(0, 1)); + assertTrue(leaf.collectIfCompetitive(0, 0)); + assertThat(sort.getValue(0), equalTo(expectedSortValue(2.0))); + assertThat(sort.getValue(1), equalTo(expectedSortValue(2.0))); + assertThat(sort.getValue(2), nullValue()); + } + } + + public void testManyBuckets() throws IOException { + // Set the bucket values in random order + int[] buckets = new int[10000]; + for (int b = 0; b < buckets.length; b++) { + buckets[b] = b; + } + Collections.shuffle(Arrays.asList(buckets), random()); + + double[] maxes = new double[buckets.length]; + + try (T sort = build(SortOrder.DESC, new double[] {2, 3, -1})) { + BucketedSort.Leaf leaf = sort.forLeaf(null); + for (int b : buckets) { + maxes[b] = 2; + assertTrue(leaf.collectIfCompetitive(0, b)); + if (randomBoolean()) { + maxes[b] = 3; + assertTrue(leaf.collectIfCompetitive(1, b)); + } + if (randomBoolean()) { + assertFalse(leaf.collectIfCompetitive(2, b)); + } + } + for (int b = 0; b < buckets.length; b++) { + assertThat(sort.getValue(b), equalTo(expectedSortValue(maxes[b]))); + } + assertThat(sort.getValue(buckets.length), nullValue()); + } + } + + protected BigArrays bigArrays() { + return new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/sort/SortValueTests.java b/server/src/test/java/org/elasticsearch/search/sort/SortValueTests.java new file mode 100644 index 00000000000..6287490dc5c --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/sort/SortValueTests.java @@ -0,0 +1,114 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.test.AbstractNamedWriteableTestCase; + +import java.io.IOException; +import java.time.ZoneId; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.lessThan; + +public class SortValueTests extends AbstractNamedWriteableTestCase { + private static final DocValueFormat STRICT_DATE_TIME = new DocValueFormat.DateTime(DateFormatter.forPattern("strict_date_time"), + ZoneId.of("UTC"), DateFieldMapper.Resolution.MILLISECONDS); + + @Override + protected Class categoryClass() { + return SortValue.class; + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(SortValue.namedWriteables()); + } + + @Override + protected SortValue createTestInstance() { + return randomBoolean() ? SortValue.from(randomDouble()) : SortValue.from(randomLong()); + } + + @Override + protected SortValue mutateInstance(SortValue instance) throws IOException { + return randomValueOtherThanMany(mut -> instance.getKey().equals(mut.getKey()), this::createTestInstance); + } + + public void testFormatDouble() { + assertThat(SortValue.from(1.0).format(DocValueFormat.RAW), equalTo("1.0")); + // The date formatter coerces the double into a long to format it + assertThat(SortValue.from(1.0).format(STRICT_DATE_TIME), equalTo("1970-01-01T00:00:00.001Z")); + } + + public void testFormatLong() { + assertThat(SortValue.from(1).format(DocValueFormat.RAW), equalTo("1")); + assertThat(SortValue.from(1).format(STRICT_DATE_TIME), equalTo("1970-01-01T00:00:00.001Z")); + } + + public void testToXContentDouble() { + assertThat(toXContent(SortValue.from(1.0), DocValueFormat.RAW), equalTo("{\"test\":1.0}")); + // The date formatter coerces the double into a long to format it + assertThat(toXContent(SortValue.from(1.0), STRICT_DATE_TIME), equalTo("{\"test\":\"1970-01-01T00:00:00.001Z\"}")); + } + + public void testToXContentLong() { + assertThat(toXContent(SortValue.from(1), DocValueFormat.RAW), equalTo("{\"test\":1}")); + assertThat(toXContent(SortValue.from(1), STRICT_DATE_TIME), equalTo("{\"test\":\"1970-01-01T00:00:00.001Z\"}")); + } + + public void testCompareDifferentTypes() { + assertThat(SortValue.from(1.0), lessThan(SortValue.from(1))); + assertThat(SortValue.from(Double.MAX_VALUE), lessThan(SortValue.from(Long.MIN_VALUE))); + assertThat(SortValue.from(1), greaterThan(SortValue.from(1.0))); + assertThat(SortValue.from(Long.MIN_VALUE), greaterThan(SortValue.from(Double.MAX_VALUE))); + } + + public void testCompareDoubles() { + double r = randomDouble(); + assertThat(SortValue.from(r), equalTo(SortValue.from(r))); + assertThat(SortValue.from(r), lessThan(SortValue.from(r + 1))); + assertThat(SortValue.from(r), greaterThan(SortValue.from(r - 1))); + } + + public void testCompareLongs() { + long r = randomLongBetween(Long.MIN_VALUE + 1, Long.MAX_VALUE - 1); + assertThat(SortValue.from(r), equalTo(SortValue.from(r))); + assertThat(SortValue.from(r), lessThan(SortValue.from(r + 1))); + assertThat(SortValue.from(r), greaterThan(SortValue.from(r - 1))); + } + + public String toXContent(SortValue sortValue, DocValueFormat format) { + return Strings.toString(new ToXContentFragment() { + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("test"); + return sortValue.toXContent(builder, format); + } + }); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 2b3cd45bb48..73e7941b43d 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -181,7 +181,7 @@ public abstract class AbstractSuggestionBuilderTestCase true); SuggestionContext suggestionContext = suggestionBuilder.build(mockShardContext); assertEquals(toBytesRef(suggestionBuilder.text()), suggestionContext.getText()); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index 8914bad5c41..1b8f78c761f 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -31,9 +31,15 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + /** Base test case for subclasses of MappedFieldType */ public abstract class FieldTypeTestCase extends ESTestCase { + public static final QueryShardContext MOCK_QSC = createMockQueryShardContext(true); + public static final QueryShardContext MOCK_QSC_DISALLOW_EXPENSIVE = createMockQueryShardContext(false); + /** Abstraction for mutating a property of a MappedFieldType */ public abstract static class Modifier { /** The name of the property that is being modified. Used in test failure messages. */ @@ -243,6 +249,16 @@ public abstract class FieldTypeTestCase extends ESTestCase { "} " + super.toString(); } + protected QueryShardContext randomMockShardContext() { + return randomFrom(MOCK_QSC, MOCK_QSC_DISALLOW_EXPENSIVE); + } + + static QueryShardContext createMockQueryShardContext(boolean allowExpensiveQueries) { + QueryShardContext queryShardContext = mock(QueryShardContext.class); + when(queryShardContext.allowExpensiveQueries()).thenReturn(allowExpensiveQueries); + return queryShardContext; + } + public void testClone() { MappedFieldType fieldType = createNamedDefaultFieldType(); MappedFieldType clone = fieldType.clone(); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 714f86cf798..c8fe29b09b6 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -69,8 +69,8 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.fetch.FetchPhase; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase; -import org.elasticsearch.search.fetch.subphase.FetchSourceSubPhase; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesPhase; +import org.elasticsearch.search.fetch.subphase.FetchSourcePhase; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SearchLookup; @@ -171,11 +171,23 @@ public abstract class AggregatorTestCase extends ESTestCase { return aggregator; } + /** + * Create a {@linkplain SearchContext} for testing an {@link Aggregator}. + */ protected SearchContext createSearchContext(IndexSearcher indexSearcher, IndexSettings indexSettings, Query query, MultiBucketConsumer bucketConsumer, MappedFieldType... fieldTypes) { + return createSearchContext(indexSearcher, indexSettings, query, bucketConsumer, new NoneCircuitBreakerService(), fieldTypes); + } + + protected SearchContext createSearchContext(IndexSearcher indexSearcher, + IndexSettings indexSettings, + Query query, + MultiBucketConsumer bucketConsumer, + CircuitBreakerService circuitBreakerService, + MappedFieldType... fieldTypes) { QueryCache queryCache = new DisabledQueryCache(indexSettings); QueryCachingPolicy queryCachingPolicy = new QueryCachingPolicy() { @Override @@ -195,16 +207,20 @@ public abstract class AggregatorTestCase extends ESTestCase { when(searchContext.numberOfShards()).thenReturn(1); when(searchContext.searcher()).thenReturn(contextIndexSearcher); when(searchContext.fetchPhase()) - .thenReturn(new FetchPhase(Arrays.asList(new FetchSourceSubPhase(), new DocValueFieldsFetchSubPhase()))); + .thenReturn(new FetchPhase(Arrays.asList(new FetchSourcePhase(), new FetchDocValuesPhase()))); when(searchContext.bitsetFilterCache()).thenReturn(new BitsetFilterCache(indexSettings, mock(Listener.class))); - CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService(); IndexShard indexShard = mock(IndexShard.class); when(indexShard.shardId()).thenReturn(new ShardId("test", "test", 0)); when(searchContext.indexShard()).thenReturn(indexShard); when(searchContext.aggregations()) .thenReturn(new SearchContextAggregations(AggregatorFactories.EMPTY, bucketConsumer)); when(searchContext.query()).thenReturn(query); - when(searchContext.bigArrays()).thenReturn(new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), circuitBreakerService)); + /* + * Always use the circuit breaking big arrays instance so that the CircuitBreakerService + * we're passed gets a chance to break. + */ + BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), circuitBreakerService).withCircuitBreaking(); + when(searchContext.bigArrays()).thenReturn(bigArrays); // TODO: now just needed for top_hits, this will need to be revised for other agg unit tests: MapperService mapperService = mapperServiceMock(); @@ -280,7 +296,7 @@ public abstract class AggregatorTestCase extends ESTestCase { return new QueryShardContext(0, indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, getIndexFieldDataLookup(mapperService, circuitBreakerService), mapperService, null, getMockScriptService(), xContentRegistry(), - writableRegistry(), null, searcher, System::currentTimeMillis, null, null); + writableRegistry(), null, searcher, System::currentTimeMillis, null, null, () -> true); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index ef341fa5e07..faa9b680fdf 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -414,7 +414,7 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { QueryShardContext createShardContext(IndexSearcher searcher) { return new QueryShardContext(0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, bitsetFilterCache, indexFieldDataService::getForField, mapperService, similarityService, scriptService, xContentRegistry, - namedWriteableRegistry, this.client, searcher, () -> nowInMillis, null, indexNameMatcher()); + namedWriteableRegistry, this.client, searcher, () -> nowInMillis, null, indexNameMatcher(), () -> true); } ScriptModule createScriptModule(List scriptPlugins) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractNamedWriteableTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractNamedWriteableTestCase.java new file mode 100644 index 00000000000..663cc06ff05 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractNamedWriteableTestCase.java @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test; + +import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; + +/** + * Standard test case for testing the wire serialization of subclasses of {@linkplain NamedWriteable}. + * See {@link AbstractWireSerializingTestCase} for subclasses of {@link Writeable}. While you *can* + * use {@linkplain AbstractWireSerializingTestCase} to test susbclasses of {@linkplain NamedWriteable} + * this superclass will also test reading and writing the name. + */ +public abstract class AbstractNamedWriteableTestCase extends AbstractWireTestCase { + // Force subclasses to override to customize the registry for their NamedWriteable + @Override + protected abstract NamedWriteableRegistry getNamedWriteableRegistry(); + + /** + * The type of {@link NamedWriteable} to read. + */ + protected abstract Class categoryClass(); + + @Override + protected T copyInstance(T instance, Version version) throws IOException { + return copyNamedWriteable(instance, getNamedWriteableRegistry(), categoryClass(), version); + } + +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireSerializingTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireSerializingTestCase.java index ce1e5f7ce97..dca672bdc3b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireSerializingTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireSerializingTestCase.java @@ -19,6 +19,7 @@ package org.elasticsearch.test; import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -27,6 +28,7 @@ import java.io.IOException; /** * Standard test case for testing the wire serialization of subclasses of {@linkplain Writeable}. + * See {@link AbstractNamedWriteableTestCase} for subclasses of {@link NamedWriteable}. */ public abstract class AbstractWireSerializingTestCase extends AbstractWireTestCase { /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 0176bfbad65..5566f31cca2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -29,6 +29,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter; + import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -1193,6 +1194,26 @@ public abstract class ESTestCase extends LuceneTestCase { return copyInstance(original, namedWriteableRegistry, (out, value) -> value.writeTo(out), reader, version); } + /** + * Create a copy of an original {@link NamedWriteable} object by running it through a {@link BytesStreamOutput} and + * reading it in again using a provided {@link Writeable.Reader}. + */ + public static T copyNamedWriteable(T original, NamedWriteableRegistry namedWriteableRegistry, + Class categoryClass) throws IOException { + return copyNamedWriteable(original, namedWriteableRegistry, categoryClass, Version.CURRENT); + } + + /** + * Same as {@link #copyNamedWriteable(NamedWriteable, NamedWriteableRegistry, Class)} but also allows to provide + * a {@link Version} argument which will be used to write and read back the object. + */ + public static T copyNamedWriteable(T original, NamedWriteableRegistry namedWriteableRegistry, + Class categoryClass, Version version) throws IOException { + return copyInstance(original, namedWriteableRegistry, + (out, value) -> out.writeNamedWriteable(value), + in -> in.readNamedWriteable(categoryClass), version); + } + protected static T copyInstance(T original, NamedWriteableRegistry namedWriteableRegistry, Writeable.Writer writer, Writeable.Reader reader, Version version) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/NotEqualMessageBuilder.java b/test/framework/src/main/java/org/elasticsearch/test/NotEqualMessageBuilder.java index a1045e2713e..fdb3368d93e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/NotEqualMessageBuilder.java +++ b/test/framework/src/main/java/org/elasticsearch/test/NotEqualMessageBuilder.java @@ -56,7 +56,9 @@ public class NotEqualMessageBuilder { actual = new TreeMap<>(actual); expected = new TreeMap<>(expected); for (Map.Entry expectedEntry : expected.entrySet()) { - compare(expectedEntry.getKey(), actual.remove(expectedEntry.getKey()), expectedEntry.getValue()); + boolean hadKey = actual.containsKey(expectedEntry.getKey()); + Object actualValue = actual.remove(expectedEntry.getKey()); + compare(expectedEntry.getKey(), hadKey, actualValue, expectedEntry.getValue()); } for (Map.Entry unmatchedEntry : actual.entrySet()) { field(unmatchedEntry.getKey(), "unexpected but found [" + unmatchedEntry.getValue() + "]"); @@ -69,7 +71,7 @@ public class NotEqualMessageBuilder { public void compareLists(List actual, List expected) { int i = 0; while (i < actual.size() && i < expected.size()) { - compare(Integer.toString(i), actual.get(i), expected.get(i)); + compare(Integer.toString(i), true, actual.get(i), expected.get(i)); i++; } if (actual.size() == expected.size()) { @@ -87,12 +89,16 @@ public class NotEqualMessageBuilder { * Compare two values. * @param field the name of the field being compared. */ - public void compare(String field, @Nullable Object actual, Object expected) { + public void compare(String field, boolean hadKey, @Nullable Object actual, Object expected) { if (expected instanceof Map) { - if (actual == null) { + if (false == hadKey) { field(field, "expected map but not found"); return; } + if (actual == null) { + field(field, "expected map but was [null]"); + return; + } if (false == actual instanceof Map) { field(field, "expected map but found [" + actual + "]"); return; @@ -112,10 +118,14 @@ public class NotEqualMessageBuilder { return; } if (expected instanceof List) { - if (actual == null) { + if (false == hadKey) { field(field, "expected list but not found"); return; } + if (actual == null) { + field(field, "expected list but was [null]"); + return; + } if (false == actual instanceof List) { field(field, "expected list but found [" + actual + "]"); return; @@ -134,10 +144,18 @@ public class NotEqualMessageBuilder { indent -= 1; return; } - if (actual == null) { + if (false == hadKey) { field(field, "expected [" + expected + "] but not found"); return; } + if (actual == null) { + if (expected == null) { + field(field, "same [" + expected + "]"); + return; + } + field(field, "expected [" + expected + "] but was [null]"); + return; + } if (Objects.equals(expected, actual)) { if (expected instanceof String) { String expectedString = (String) expected; diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index d12c221cd0b..e7b5e937c0c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -45,7 +45,7 @@ import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.StoredFieldsContext; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; @@ -261,12 +261,12 @@ public class TestSearchContext extends SearchContext { } @Override - public DocValueFieldsContext docValueFieldsContext() { + public FetchDocValuesContext docValuesContext() { return null; } @Override - public SearchContext docValueFieldsContext(DocValueFieldsContext docValueFieldsContext) { + public SearchContext docValuesContext(FetchDocValuesContext docValuesContext) { return null; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java index 09f88f42492..211fa2f2095 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java @@ -89,7 +89,7 @@ public class MatchAssertion extends Assertion { if (expectedValue.equals(actualValue) == false) { NotEqualMessageBuilder message = new NotEqualMessageBuilder(); - message.compare(getField(), actualValue, expectedValue); + message.compare(getField(), true, actualValue, expectedValue); throw new AssertionError(getField() + " didn't match expected value:\n" + message); } } diff --git a/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java b/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java index 8a8842487f1..684210f13c5 100644 --- a/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java +++ b/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java @@ -43,7 +43,7 @@ public class MockSearchServiceTests extends ESTestCase { final long nowInMillis = randomNonNegativeLong(); SearchContext s = new TestSearchContext(new QueryShardContext(0, new IndexSettings(EMPTY_INDEX_METADATA, Settings.EMPTY), BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, - xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null)) { + xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null, () -> true)) { @Override public SearchShardTarget shardTarget() { diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/MatchAssertionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/MatchAssertionTests.java index 2bd72347441..ddcffa7ac5a 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/MatchAssertionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/MatchAssertionTests.java @@ -21,6 +21,10 @@ package org.elasticsearch.test.rest.yaml.section; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.test.ESTestCase; +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.containsString; + public class MatchAssertionTests extends ESTestCase { public void testNull() { @@ -39,4 +43,13 @@ public class MatchAssertionTests extends ESTestCase { expectThrows(AssertionError.class, () -> matchAssertion.doAssert(null, "/exp/")); } } + + public void testNullInMap() { + XContentLocation xContentLocation = new XContentLocation(0, 0); + MatchAssertion matchAssertion = new MatchAssertion(xContentLocation, "field", singletonMap("a", null)); + matchAssertion.doAssert(singletonMap("a", null), matchAssertion.getExpectedValue()); + AssertionError e = expectThrows(AssertionError.class, () -> + matchAssertion.doAssert(emptyMap(), matchAssertion.getExpectedValue())); + assertThat(e.getMessage(), containsString("expected [null] but not found")); + } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java index c56cbea5607..adc532f2685 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java @@ -25,6 +25,8 @@ import org.elasticsearch.xpack.analytics.cumulativecardinality.CumulativeCardina import org.elasticsearch.xpack.analytics.mapper.HistogramFieldMapper; import org.elasticsearch.xpack.analytics.stringstats.InternalStringStats; import org.elasticsearch.xpack.analytics.stringstats.StringStatsAggregationBuilder; +import org.elasticsearch.xpack.analytics.topmetrics.InternalTopMetrics; +import org.elasticsearch.xpack.analytics.topmetrics.TopMetricsAggregationBuilder; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.analytics.action.AnalyticsStatsAction; @@ -40,8 +42,9 @@ import static java.util.Collections.singletonList; public class AnalyticsPlugin extends Plugin implements SearchPlugin, ActionPlugin, MapperPlugin { - // TODO this should probably become more structured once Analytics plugin has more than just one agg + // TODO this should probably become more structured public static AtomicLong cumulativeCardUsage = new AtomicLong(0); + public static AtomicLong topMetricsUsage = new AtomicLong(0); private final boolean transportClientMode; public AnalyticsPlugin(Settings settings) { @@ -67,12 +70,17 @@ public class AnalyticsPlugin extends Plugin implements SearchPlugin, ActionPlugi new AggregationSpec( StringStatsAggregationBuilder.NAME, StringStatsAggregationBuilder::new, - StringStatsAggregationBuilder::parse).addResultReader(InternalStringStats::new), + StringStatsAggregationBuilder.PARSER).addResultReader(InternalStringStats::new), new AggregationSpec( BoxplotAggregationBuilder.NAME, BoxplotAggregationBuilder::new, (ContextParser) (p, c) -> BoxplotAggregationBuilder.parse(c, p)) - .addResultReader(InternalBoxplot::new) + .addResultReader(InternalBoxplot::new), + new AggregationSpec( + TopMetricsAggregationBuilder.NAME, + TopMetricsAggregationBuilder::new, + track(TopMetricsAggregationBuilder.PARSER, topMetricsUsage)) + .addResultReader(InternalTopMetrics::new) ); } @@ -98,4 +106,16 @@ public class AnalyticsPlugin extends Plugin implements SearchPlugin, ActionPlugi public Map getMappers() { return Collections.singletonMap(HistogramFieldMapper.CONTENT_TYPE, new HistogramFieldMapper.TypeParser()); } + + /** + * Track successful parsing. + */ + private static ContextParser track(ContextParser realParser, AtomicLong usage) { + return (parser, name) -> { + T value = realParser.parse(parser, name); + // Intentionally doesn't count unless the parser returns cleanly. + usage.addAndGet(1); + return value; + }; + } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/action/TransportAnalyticsStatsAction.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/action/TransportAnalyticsStatsAction.java index db17bb15f57..52833d47974 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/action/TransportAnalyticsStatsAction.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/action/TransportAnalyticsStatsAction.java @@ -52,6 +52,7 @@ public class TransportAnalyticsStatsAction extends TransportNodesAction { public static final String NAME = "boxplot"; @@ -37,7 +37,7 @@ public class BoxplotAggregationBuilder extends ValuesSourceAggregationBuilder.Le static { PARSER = new ObjectParser<>(BoxplotAggregationBuilder.NAME); - ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false); + ValuesSourceParserHelper.declareAnyFields(PARSER, true, true); PARSER.declareDouble(BoxplotAggregationBuilder::compression, COMPRESSION_FIELD); } @@ -98,7 +98,7 @@ public class BoxplotAggregationBuilder extends ValuesSourceAggregationBuilder.Le @Override protected BoxplotAggregatorFactory innerBuild(QueryShardContext queryShardContext, - ValuesSourceConfig config, + ValuesSourceConfig config, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder) throws IOException { return new BoxplotAggregatorFactory(name, config, compression, queryShardContext, parent, subFactoriesBuilder, metaData); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregator.java index 1c3a01b773d..dec42ddbd61 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregator.java @@ -11,6 +11,8 @@ import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.index.fielddata.HistogramValue; +import org.elasticsearch.index.fielddata.HistogramValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -29,12 +31,12 @@ import java.util.Map; public class BoxplotAggregator extends NumericMetricsAggregator.MultiValue { - private final ValuesSource.Numeric valuesSource; + private final ValuesSource valuesSource; private final DocValueFormat format; protected ObjectArray states; protected final double compression; - BoxplotAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, double compression, + BoxplotAggregator(String name, ValuesSource valuesSource, DocValueFormat formatter, double compression, SearchContext context, Aggregator parent, List pipelineAggregators, Map metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); @@ -58,23 +60,38 @@ public class BoxplotAggregator extends NumericMetricsAggregator.MultiValue { return LeafBucketCollector.NO_OP_COLLECTOR; } final BigArrays bigArrays = context.bigArrays(); - final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx); - return new LeafBucketCollectorBase(sub, values) { - @Override - public void collect(int doc, long bucket) throws IOException { - states = bigArrays.grow(states, bucket + 1); - - if (values.advanceExact(doc)) { + if (valuesSource instanceof ValuesSource.Histogram) { + final HistogramValues values = ((ValuesSource.Histogram)valuesSource).getHistogramValues(ctx); + return new LeafBucketCollectorBase(sub, values) { + @Override + public void collect(int doc, long bucket) throws IOException { TDigestState state = getExistingOrNewHistogram(bigArrays, bucket); if (values.advanceExact(doc)) { - final int valueCount = values.docValueCount(); - for (int i = 0; i < valueCount; i++) { - state.add(values.nextValue()); + final HistogramValue sketch = values.histogram(); + while(sketch.next()) { + state.add(sketch.value(), sketch.count()); } } } - } - }; + }; + } else { + final SortedNumericDoubleValues values = ((ValuesSource.Numeric)valuesSource).doubleValues(ctx); + return new LeafBucketCollectorBase(sub, values) { + @Override + public void collect(int doc, long bucket) throws IOException { + states = bigArrays.grow(states, bucket + 1); + if (values.advanceExact(doc)) { + TDigestState state = getExistingOrNewHistogram(bigArrays, bucket); + if (values.advanceExact(doc)) { + final int valueCount = values.docValueCount(); + for (int i = 0; i < valueCount; i++) { + state.add(values.nextValue()); + } + } + } + } + }; + } } private TDigestState getExistingOrNewHistogram(final BigArrays bigArrays, long bucket) { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorFactory.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorFactory.java index 190f65137f4..ac34667a9e1 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorFactory.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorFactory.java @@ -20,12 +20,12 @@ import java.io.IOException; import java.util.List; import java.util.Map; -public class BoxplotAggregatorFactory extends ValuesSourceAggregatorFactory { +public class BoxplotAggregatorFactory extends ValuesSourceAggregatorFactory { private final double compression; BoxplotAggregatorFactory(String name, - ValuesSourceConfig config, + ValuesSourceConfig config, double compression, QueryShardContext queryShardContext, AggregatorFactory parent, @@ -46,7 +46,7 @@ public class BoxplotAggregatorFactory extends ValuesSourceAggregatorFactory getCharOccurrences() { + return charOccurrences; + } + + boolean getShowDistribution() { + return showDistribution; + } + public String getCountAsString() { return format.format(getCount()).toString(); } @@ -282,6 +294,7 @@ public class InternalStringStats extends InternalAggregation { minLength == other.minLength && maxLength == other.maxLength && totalLength == other.totalLength && + Objects.equals(charOccurrences, other.charOccurrences) && showDistribution == other.showDistribution; } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java index 60c5bde1e7b..8602bfb6eec 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -27,23 +26,17 @@ import java.util.Map; import java.util.Objects; public class StringStatsAggregationBuilder extends ValuesSourceAggregationBuilder { - public static final String NAME = "string_stats"; - private boolean showDistribution = false; - private static final ObjectParser PARSER; private static final ParseField SHOW_DISTRIBUTION_FIELD = new ParseField("show_distribution"); - + public static final ObjectParser PARSER = + ObjectParser.fromBuilder(NAME, StringStatsAggregationBuilder::new); static { - PARSER = new ObjectParser<>(StringStatsAggregationBuilder.NAME); ValuesSourceParserHelper.declareBytesFields(PARSER, true, true); - PARSER.declareBoolean(StringStatsAggregationBuilder::showDistribution, StringStatsAggregationBuilder.SHOW_DISTRIBUTION_FIELD); } - public static StringStatsAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { - return PARSER.parse(parser, new StringStatsAggregationBuilder(aggregationName), null); - } + private boolean showDistribution = false; public StringStatsAggregationBuilder(String name) { super(name, CoreValuesSourceType.BYTES, ValueType.STRING); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetrics.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetrics.java new file mode 100644 index 00000000000..06b44cc22e9 --- /dev/null +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetrics.java @@ -0,0 +1,178 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.analytics.topmetrics; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.search.sort.SortValue; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class InternalTopMetrics extends InternalNumericMetricsAggregation.MultiValue { + private final DocValueFormat sortFormat; + private final SortOrder sortOrder; + private final SortValue sortValue; + private final String metricName; + private final double metricValue; + + public InternalTopMetrics(String name, DocValueFormat sortFormat, @Nullable SortOrder sortOrder, SortValue sortValue, String metricName, + double metricValue, List pipelineAggregators, Map metaData) { + super(name, pipelineAggregators, metaData); + this.sortFormat = sortFormat; + this.sortOrder = sortOrder; + this.sortValue = sortValue; + this.metricName = metricName; + this.metricValue = metricValue; + } + + static InternalTopMetrics buildEmptyAggregation(String name, String metricField, + List pipelineAggregators, Map metaData) { + return new InternalTopMetrics(name, DocValueFormat.RAW, SortOrder.ASC, null, metricField, Double.NaN, pipelineAggregators, + metaData); + } + + /** + * Read from a stream. + */ + public InternalTopMetrics(StreamInput in) throws IOException { + super(in); + sortFormat = in.readNamedWriteable(DocValueFormat.class); + sortOrder = SortOrder.readFromStream(in); + sortValue = in.readOptionalNamedWriteable(SortValue.class); + metricName = in.readString(); + metricValue = in.readDouble(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeNamedWriteable(sortFormat); + sortOrder.writeTo(out); + out.writeOptionalNamedWriteable(sortValue); + out.writeString(metricName); + out.writeDouble(metricValue); + } + + @Override + public String getWriteableName() { + return TopMetricsAggregationBuilder.NAME; + } + + @Override + public Object getProperty(List path) { + if (path.isEmpty()) { + return this; + } + if (path.size() == 1 && metricName.contentEquals(path.get(1))) { + return metricValue; + } + throw new IllegalArgumentException("path not supported for [" + getName() + "]: " + path); + } + + @Override + public InternalTopMetrics reduce(List aggregations, ReduceContext reduceContext) { + if (false == isMapped()) { + return this; + } + DocValueFormat bestSortFormat = sortFormat; + SortValue bestSortValue = sortValue; + double bestMetricValue = metricValue; + int reverseMul = sortOrder.reverseMul(); + for (InternalAggregation agg : aggregations) { + InternalTopMetrics result = (InternalTopMetrics) agg; + if (result.sortValue != null && reverseMul * bestSortValue.compareTo(result.sortValue) > 0) { + bestSortFormat = result.sortFormat; + bestSortValue = result.sortValue; + bestMetricValue = result.metricValue; + } + } + return new InternalTopMetrics(getName(), bestSortFormat, sortOrder, bestSortValue, metricName, bestMetricValue, + pipelineAggregators(), getMetaData()); + } + + @Override + public boolean isMapped() { + return sortValue != null; + } + + @Override + public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + builder.startArray("top"); + if (sortValue != null) { + builder.startObject(); + { + builder.startArray("sort"); + sortValue.toXContent(builder, sortFormat); + builder.endArray(); + builder.startObject("metrics"); + { + builder.field(metricName, Double.isNaN(metricValue) ? null : metricValue); + } + builder.endObject(); + } + builder.endObject(); + } + builder.endArray(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), sortFormat, sortOrder, sortValue, metricName, metricValue); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj) == false) return false; + InternalTopMetrics other = (InternalTopMetrics) obj; + return sortFormat.equals(other.sortFormat) && + sortOrder.equals(other.sortOrder) && + Objects.equals(sortValue, other.sortValue) && + metricName.equals(other.metricName) && + metricValue == other.metricValue; + } + + @Override + public double value(String name) { + if (metricName.equals(name)) { + return metricValue; + } + throw new IllegalArgumentException("known metric [" + name + "]"); + } + + DocValueFormat getSortFormat() { + return sortFormat; + } + + SortOrder getSortOrder() { + return sortOrder; + } + + SortValue getSortValue() { + return sortValue; + } + + String getFormattedSortValue() { + return sortValue.format(sortFormat); + } + + String getMetricName() { + return metricName; + } + + double getMetricValue() { + return metricValue; + } +} diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java new file mode 100644 index 00000000000..31616545590 --- /dev/null +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.analytics.topmetrics; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ContextParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig; +import org.elasticsearch.search.sort.SortBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.search.builder.SearchSourceBuilder.SORT_FIELD; + +public class TopMetricsAggregationBuilder extends AbstractAggregationBuilder { + public static final String NAME = "top_metrics"; + public static final ParseField METRIC_FIELD = new ParseField("metric"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, + false, (args, name) -> { + @SuppressWarnings("unchecked") + List> sorts = (List>) args[0]; + MultiValuesSourceFieldConfig metricField = (MultiValuesSourceFieldConfig) args[1]; + return new TopMetricsAggregationBuilder(name, sorts, metricField); + }); + static { + PARSER.declareField(constructorArg(), (p, n) -> SortBuilder.fromXContent(p), SORT_FIELD, + ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING); + ContextParser metricParser = MultiValuesSourceFieldConfig.PARSER.apply(true, false); + PARSER.declareObject(constructorArg(), (p, n) -> metricParser.parse(p, null).build(), METRIC_FIELD); + } + + private final List> sortBuilders; + // TODO MultiValuesSourceFieldConfig has more things than we support and less things than we want to support + private final MultiValuesSourceFieldConfig metricField; + + /** + * Ctor for parsing. + */ + public TopMetricsAggregationBuilder(String name, List> sortBuilders, MultiValuesSourceFieldConfig metricField) { + super(name); + if (sortBuilders.size() != 1) { + throw new IllegalArgumentException("[sort] must contain exactly one sort"); + } + this.sortBuilders = sortBuilders; + this.metricField = metricField; + } + + /** + * Cloning ctor for reducing. + */ + public TopMetricsAggregationBuilder(TopMetricsAggregationBuilder clone, AggregatorFactories.Builder factoriesBuilder, + Map metaData) { + super(clone, factoriesBuilder, metaData); + this.sortBuilders = clone.sortBuilders; + this.metricField = clone.metricField; + } + + /** + * Read from a stream. + */ + public TopMetricsAggregationBuilder(StreamInput in) throws IOException { + super(in); + @SuppressWarnings("unchecked") + List> sortBuilders = (List>) (List) in.readNamedWriteableList(SortBuilder.class); + this.sortBuilders = sortBuilders; + this.metricField = new MultiValuesSourceFieldConfig(in); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeNamedWriteableList(sortBuilders); + metricField.writeTo(out); + } + + @Override + protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map metaData) { + return new TopMetricsAggregationBuilder(this, factoriesBuilder, metaData); + } + + @Override + protected AggregatorFactory doBuild(QueryShardContext queryShardContext, AggregatorFactory parent, Builder subFactoriesBuilder) + throws IOException { + return new TopMetricsAggregatorFactory(name, queryShardContext, parent, subFactoriesBuilder, metaData, sortBuilders, metricField); + } + + @Override + protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.startArray(SORT_FIELD.getPreferredName()); + for (SortBuilder sort : sortBuilders) { + sort.toXContent(builder, params); + } + builder.endArray(); + builder.field(METRIC_FIELD.getPreferredName(), metricField); + } + builder.endObject(); + return builder; + } + + @Override + public String getType() { + return NAME; + } + + List> getSortBuilders() { + return sortBuilders; + } + + MultiValuesSourceFieldConfig getMetricField() { + return metricField; + } +} diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java new file mode 100644 index 00000000000..74622d66e3d --- /dev/null +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.analytics.topmetrics; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorable; +import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.index.fielddata.NumericDoubleValues; +import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortValue; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * Collects the {@code top_metrics} aggregation, which functions like a memory + * efficient but limited version of the {@code top_hits} aggregation. Amortized, + * each bucket should take something like 16 bytes. Because of this, unlike + * {@code top_hits}, you can sort by the buckets of this metric. + * + * This extends {@linkplain NumericMetricsAggregator.MultiValue} as a compromise + * to allow sorting on the metric. Right now it only collects a single metric + * but we expect it to collect a list of them in the future. Also in the future + * we expect it to allow collecting non-numeric metrics which'll change how we + * do the inheritance. Finally, we also expect it to allow collecting more than + * one document worth of metrics. Once that happens we'll need to come up with + * some way to pick which document's metrics to use for the sort. + */ +class TopMetricsAggregator extends NumericMetricsAggregator.MultiValue { + private final BucketedSort sort; + private final String metricName; + private final ValuesSource.Numeric metricValueSource; + private DoubleArray values; + + TopMetricsAggregator(String name, SearchContext context, Aggregator parent, List pipelineAggregators, + Map metaData, BucketedSort sort, + String metricName, ValuesSource.Numeric metricValueSource) throws IOException { + super(name, context, parent, pipelineAggregators, metaData); + this.sort = sort; + this.metricName = metricName; + this.metricValueSource = metricValueSource; + if (metricValueSource != null) { + values = context.bigArrays().newDoubleArray(1, false); + values.fill(0, values.size(), Double.NaN); + } + } + + @Override + public boolean hasMetric(String name) { + return metricName.equals(name); + } + + @Override + public double metric(String name, long owningBucketOrd) { + return values.get(owningBucketOrd); + } + + @Override + public ScoreMode scoreMode() { + boolean needs = (sort != null && sort.needsScores()) || (metricValueSource != null && metricValueSource.needsScores()); + return needs ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; + } + + @Override + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException { + assert sub == LeafBucketCollector.NO_OP_COLLECTOR : "Expected noop but was " + sub.toString(); + + if (metricValueSource == null) { + return LeafBucketCollector.NO_OP_COLLECTOR; + } + BucketedSort.Leaf leafSort = sort.forLeaf(ctx); + // TODO allow configuration of value mode + NumericDoubleValues metricValues = MultiValueMode.AVG.select(metricValueSource.doubleValues(ctx)); + + return new LeafBucketCollector() { + @Override + public void collect(int doc, long bucket) throws IOException { + if (leafSort.collectIfCompetitive(doc, bucket)) { + if (bucket >= values.size()) { + long oldSize = values.size(); + values = context.bigArrays().grow(values, bucket + 1); + values.fill(oldSize, values.size(), Double.NaN); + } + double metricValue = metricValues.advanceExact(doc) ? metricValues.doubleValue() : Double.NaN; + values.set(bucket, metricValue); + } + } + + @Override + public void setScorer(Scorable s) throws IOException { + leafSort.setScorer(s); + } + }; + } + + @Override + public InternalAggregation buildAggregation(long bucket) throws IOException { + if (metricValueSource == null) { + return buildEmptyAggregation(); + } + double metricValue = values.get(bucket); + SortValue sortValue = sort.getValue(bucket); + return new InternalTopMetrics(name, sort.getFormat(), sort.getOrder(), sortValue, metricName, metricValue, pipelineAggregators(), + metaData()); + } + + @Override + public InternalTopMetrics buildEmptyAggregation() { + // The sort format and sort order aren't used in reduction so we pass the simplest thing. + return InternalTopMetrics.buildEmptyAggregation(name, metricName, pipelineAggregators(), + metaData()); + } + + @Override + public void doClose() { + Releasables.close(sort, values); + } +} diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorFactory.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorFactory.java new file mode 100644 index 00000000000..879312cd348 --- /dev/null +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorFactory.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.analytics.topmetrics; + +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig; +import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public class TopMetricsAggregatorFactory extends AggregatorFactory { + private final List> sortBuilders; + private final MultiValuesSourceFieldConfig metricField; + + public TopMetricsAggregatorFactory(String name, QueryShardContext queryShardContext, AggregatorFactory parent, + Builder subFactoriesBuilder, Map metaData, List> sortBuilders, + MultiValuesSourceFieldConfig metricField) throws IOException { + super(name, queryShardContext, parent, subFactoriesBuilder, metaData); + this.sortBuilders = sortBuilders; + this.metricField = metricField; + } + + @Override + protected TopMetricsAggregator createInternal(SearchContext searchContext, Aggregator parent, boolean collectsFromSingleBucket, + List pipelineAggregators, Map metaData) throws IOException { + ValuesSourceConfig metricFieldSource = ValuesSourceConfig.resolve(queryShardContext, ValueType.NUMERIC, + metricField.getFieldName(), metricField.getScript(), metricField.getMissing(), metricField.getTimeZone(), null); + ValuesSource.Numeric metricValueSource = metricFieldSource.toValuesSource(queryShardContext); + if (metricValueSource == null) { + return createUnmapped(searchContext, parent, pipelineAggregators, metaData); + } + BucketedSort bucketedSort = sortBuilders.get(0).buildBucketedSort(searchContext.getQueryShardContext()); + + return new TopMetricsAggregator(name, searchContext, parent, pipelineAggregators, metaData, bucketedSort, + metricField.getFieldName(), metricValueSource); + } + + private TopMetricsAggregator createUnmapped(SearchContext searchContext, Aggregator parent, + List pipelineAggregators, Map metaData) throws IOException { + return new TopMetricsAggregator(name, searchContext, parent, pipelineAggregators, metaData, null, metricField.getFieldName(), + null); + } + +} diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramPercentileAggregationTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramPercentileAggregationTests.java index ed17bd0a5ec..4c55efe9b83 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramPercentileAggregationTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramPercentileAggregationTests.java @@ -27,6 +27,8 @@ import org.elasticsearch.search.aggregations.metrics.PercentilesMethod; import org.elasticsearch.search.aggregations.metrics.TDigestState; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.analytics.AnalyticsPlugin; +import org.elasticsearch.xpack.analytics.boxplot.Boxplot; +import org.elasticsearch.xpack.analytics.boxplot.BoxplotAggregationBuilder; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import java.util.ArrayList; @@ -131,8 +133,7 @@ public class HistogramPercentileAggregationTests extends ESSingleNodeTestCase { } } - public void testTDigestHistogram() throws Exception { - + private void setupTDigestHistogram(int compression) throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() .startObject() .startObject("_doc") @@ -170,8 +171,6 @@ public class HistogramPercentileAggregationTests extends ESSingleNodeTestCase { PutMappingRequest request2 = new PutMappingRequest("pre_agg").type("_doc").source(xContentBuilder2); client().admin().indices().putMapping(request2).actionGet(); - - int compression = TestUtil.nextInt(random(), 200, 300); TDigestState histogram = new TDigestState(compression); BulkRequest bulkRequest = new BulkRequest(); @@ -218,6 +217,11 @@ public class HistogramPercentileAggregationTests extends ESSingleNodeTestCase { response = client().prepareSearch("pre_agg").get(); assertEquals(numDocs / frq, response.getHits().getTotalHits().value); + } + + public void testTDigestHistogram() throws Exception { + int compression = TestUtil.nextInt(random(), 200, 300); + setupTDigestHistogram(compression); PercentilesAggregationBuilder builder = AggregationBuilders.percentiles("agg").field("inner.data").method(PercentilesMethod.TDIGEST) @@ -236,6 +240,31 @@ public class HistogramPercentileAggregationTests extends ESSingleNodeTestCase { } } + public void testBoxplotHistogram() throws Exception { + int compression = TestUtil.nextInt(random(), 200, 300); + setupTDigestHistogram(compression); + BoxplotAggregationBuilder bpBuilder = new BoxplotAggregationBuilder("agg").field("inner.data").compression(compression); + + SearchResponse bpResponseRaw = client().prepareSearch("raw").addAggregation(bpBuilder).get(); + SearchResponse bpResponsePreAgg = client().prepareSearch("pre_agg").addAggregation(bpBuilder).get(); + SearchResponse bpResponseBoth = client().prepareSearch("raw", "pre_agg").addAggregation(bpBuilder).get(); + + Boxplot bpRaw = bpResponseRaw.getAggregations().get("agg"); + Boxplot bpPreAgg = bpResponsePreAgg.getAggregations().get("agg"); + Boxplot bpBoth = bpResponseBoth.getAggregations().get("agg"); + assertEquals(bpRaw.getMax(), bpPreAgg.getMax(), 0.0); + assertEquals(bpRaw.getMax(), bpBoth.getMax(), 0.0); + assertEquals(bpRaw.getMin(), bpPreAgg.getMin(), 0.0); + assertEquals(bpRaw.getMin(), bpBoth.getMin(), 0.0); + + assertEquals(bpRaw.getQ1(), bpPreAgg.getQ1(), 1.0); + assertEquals(bpRaw.getQ1(), bpBoth.getQ1(), 1.0); + assertEquals(bpRaw.getQ2(), bpPreAgg.getQ2(), 1.0); + assertEquals(bpRaw.getQ2(), bpBoth.getQ2(), 1.0); + assertEquals(bpRaw.getQ3(), bpPreAgg.getQ3(), 1.0); + assertEquals(bpRaw.getQ3(), bpBoth.getQ3(), 1.0); + } + @Override protected Collection> getPlugins() { List> plugins = new ArrayList<>(super.getPlugins()); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStatsTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStatsTests.java new file mode 100644 index 00000000000..305ebd54ded --- /dev/null +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStatsTests.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.analytics.stringstats; + +import org.elasticsearch.client.analytics.ParsedStringStats; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.InternalAggregationTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Predicate; + +import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class InternalStringStatsTests extends InternalAggregationTestCase { + @Override + protected List getNamedXContents() { + List result = new ArrayList<>(super.getNamedXContents()); + result.add(new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(StringStatsAggregationBuilder.NAME), + (p, c) -> ParsedStringStats.PARSER.parse(p, (String) c))); + return result; + } + + protected InternalStringStats createTestInstance( + String name, List pipelineAggregators, Map metaData) { + if (randomBoolean()) { + return new InternalStringStats(name, 0, 0, 0, 0, emptyMap(), randomBoolean(), DocValueFormat.RAW, + pipelineAggregators, metaData); + } + return new InternalStringStats(name, randomLongBetween(1, Long.MAX_VALUE), + randomNonNegativeLong(), between(0, Integer.MAX_VALUE), between(0, Integer.MAX_VALUE), randomCharOccurrences(), + randomBoolean(), DocValueFormat.RAW, + pipelineAggregators, metaData); + }; + + @Override + protected InternalStringStats mutateInstance(InternalStringStats instance) throws IOException { + String name = instance.getName(); + long count = instance.getCount(); + long totalLength = instance.getTotalLength(); + int minLength = instance.getMinLength(); + int maxLength = instance.getMaxLength(); + Map charOccurrences = instance.getCharOccurrences(); + boolean showDistribution = instance.getShowDistribution(); + switch (between(0, 6)) { + case 0: + name = name + "a"; + break; + case 1: + count = randomValueOtherThan(count, () -> randomLongBetween(1, Long.MAX_VALUE)); + break; + case 2: + totalLength = randomValueOtherThan(totalLength, ESTestCase::randomNonNegativeLong); + break; + case 3: + minLength = randomValueOtherThan(minLength, () -> between(0, Integer.MAX_VALUE)); + break; + case 4: + maxLength = randomValueOtherThan(maxLength, () -> between(0, Integer.MAX_VALUE)); + break; + case 5: + charOccurrences = randomValueOtherThan(charOccurrences, this::randomCharOccurrences); + break; + case 6: + showDistribution = !showDistribution; + break; + } + return new InternalStringStats(name, count, totalLength, minLength, maxLength, charOccurrences, showDistribution, + DocValueFormat.RAW, instance.pipelineAggregators(), instance.getMetaData()); + } + + @Override + protected Reader instanceReader() { + return InternalStringStats::new; + } + + @Override + protected void assertFromXContent(InternalStringStats aggregation, ParsedAggregation parsedAggregation) throws IOException { + ParsedStringStats parsed = (ParsedStringStats) parsedAggregation; + assertThat(parsed.getName(), equalTo(aggregation.getName())); + if (aggregation.getCount() == 0) { + assertThat(parsed.getCount(), equalTo(0L)); + assertThat(parsed.getMinLength(), equalTo(0)); + assertThat(parsed.getMaxLength(), equalTo(0)); + assertThat(parsed.getAvgLength(), equalTo(0d)); + assertThat(parsed.getEntropy(), equalTo(0d)); + assertThat(parsed.getDistribution(), nullValue()); + return; + } + assertThat(parsed.getCount(), equalTo(aggregation.getCount())); + assertThat(parsed.getMinLength(), equalTo(aggregation.getMinLength())); + assertThat(parsed.getMaxLength(), equalTo(aggregation.getMaxLength())); + assertThat(parsed.getAvgLength(), equalTo(aggregation.getAvgLength())); + assertThat(parsed.getEntropy(), equalTo(aggregation.getEntropy())); + if (aggregation.getShowDistribution()) { + assertThat(parsed.getDistribution(), equalTo(aggregation.getDistribution())); + } else { + assertThat(parsed.getDistribution(), nullValue()); + } + } + + @Override + protected Predicate excludePathsFromXContentInsertion() { + return path -> path.endsWith(".distribution"); + } + + @Override + protected void assertReduced(InternalStringStats reduced, List inputs) { + assertThat(reduced.getCount(), equalTo(inputs.stream().mapToLong(InternalStringStats::getCount).sum())); + assertThat(reduced.getMinLength(), equalTo(inputs.stream().mapToInt(InternalStringStats::getMinLength).min().getAsInt())); + assertThat(reduced.getMaxLength(), equalTo(inputs.stream().mapToInt(InternalStringStats::getMaxLength).max().getAsInt())); + assertThat(reduced.getTotalLength(), equalTo(inputs.stream().mapToLong(InternalStringStats::getTotalLength).sum())); + Map reducedChars = new HashMap<>(); + for (InternalStringStats stats : inputs) { + for (Map.Entry e : stats.getCharOccurrences().entrySet()) { + reducedChars.merge(e.getKey(), e.getValue(), (lhs, rhs) -> lhs + rhs); + } + } + assertThat(reduced.getCharOccurrences(), equalTo(reducedChars)); + } + + private Map randomCharOccurrences() { + Map charOccurrences = new HashMap(); + int occurrencesSize = between(0, 1000); + while (charOccurrences.size() < occurrencesSize) { + charOccurrences.put(randomAlphaOfLength(5), randomNonNegativeLong()); + } + return charOccurrences; + } +} diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilderTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilderTests.java new file mode 100644 index 00000000000..99fee0aa03a --- /dev/null +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilderTests.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.analytics.stringstats; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.BaseAggregationBuilder; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.Arrays; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class StringStatsAggregationBuilderTests extends AbstractSerializingTestCase { + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(Arrays.asList( + new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, new ParseField(StringStatsAggregationBuilder.NAME), + (p, c) -> StringStatsAggregationBuilder.PARSER.parse(p, (String) c)))); + } + + @Override + protected StringStatsAggregationBuilder doParseInstance(XContentParser parser) throws IOException { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + String name = parser.currentName(); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.currentName(), equalTo("string_stats")); + StringStatsAggregationBuilder parsed = StringStatsAggregationBuilder.PARSER.apply(parser, name); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + return parsed; + } + + @Override + protected Reader instanceReader() { + return StringStatsAggregationBuilder::new; + } + + @Override + protected StringStatsAggregationBuilder createTestInstance() { + StringStatsAggregationBuilder builder = new StringStatsAggregationBuilder(randomAlphaOfLength(5)); + builder.showDistribution(randomBoolean()); + return builder; + } + + @Override + protected StringStatsAggregationBuilder mutateInstance(StringStatsAggregationBuilder instance) throws IOException { + if (randomBoolean()) { + StringStatsAggregationBuilder mutant = new StringStatsAggregationBuilder(instance.getName()); + mutant.showDistribution(!instance.showDistribution()); + return mutant; + } + StringStatsAggregationBuilder mutant = new StringStatsAggregationBuilder(randomAlphaOfLength(4)); + mutant.showDistribution(instance.showDistribution()); + return mutant; + } + + public void testClientBuilder() throws IOException { + AbstractXContentTestCase.xContentTester( + this::createParser, this::createTestInstance, this::toXContentThroughClientBuilder, + p -> { + p.nextToken(); + AggregatorFactories.Builder b = AggregatorFactories.parseAggregators(p); + assertThat(b.getAggregatorFactories(), hasSize(1)); + assertThat(b.getPipelineAggregatorFactories(), empty()); + return (StringStatsAggregationBuilder) b.getAggregatorFactories().iterator().next(); + } ).test(); + } + + private void toXContentThroughClientBuilder(StringStatsAggregationBuilder serverBuilder, XContentBuilder builder) throws IOException { + builder.startObject(); + createClientBuilder(serverBuilder).toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + } + + private org.elasticsearch.client.analytics.StringStatsAggregationBuilder createClientBuilder( + StringStatsAggregationBuilder serverBuilder) { + org.elasticsearch.client.analytics.StringStatsAggregationBuilder builder = + new org.elasticsearch.client.analytics.StringStatsAggregationBuilder(serverBuilder.getName()); + return builder.showDistribution(serverBuilder.showDistribution()); + } +} diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsReduceTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsReduceTests.java new file mode 100644 index 00000000000..d70a0e4f39d --- /dev/null +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsReduceTests.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.analytics.topmetrics; + +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.search.sort.SortValue; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; + +import static java.util.Collections.emptyList; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.sameInstance; + +/** + * Some explicit and simple tests for reducing {@link InternalTopMetrics}. + * All of the randomized testing, including randomized reduce testing is + * in {@link InternalTopMetricsTests}. + */ +public class InternalTopMetricsReduceTests extends ESTestCase { + public void testAllEmpty() { + InternalTopMetrics first = buildEmpty(); + InternalTopMetrics reduced = reduce(first, buildEmpty(), buildEmpty(), buildEmpty()); + assertThat(reduced, sameInstance(first)); + } + + public void testFirstEmpty() { + InternalTopMetrics first = buildEmpty(); + InternalTopMetrics reduced = reduce(first, buildFilled(SortValue.from(1), 1.0)); + assertThat(reduced, sameInstance(first)); + } + + public void testMany() { + InternalTopMetrics first = buildFilled(SortValue.from(2.0), randomDouble()); + InternalTopMetrics min = buildFilled(SortValue.from(1.0), randomDouble()); + InternalTopMetrics max = buildFilled(SortValue.from(7.0), randomDouble()); + InternalTopMetrics[] metrics = new InternalTopMetrics[] { + first, max, min, buildEmpty(), buildEmpty(), + }; + InternalTopMetrics winner = first.getSortOrder() == SortOrder.ASC ? min : max; + InternalTopMetrics reduced = reduce(metrics); + assertThat(reduced.getName(), equalTo("test")); + assertThat(reduced.getSortValue(), equalTo(winner.getSortValue())); + assertThat(reduced.getSortFormat(), equalTo(winner.getSortFormat())); + assertThat(reduced.getSortOrder(), equalTo(first.getSortOrder())); + assertThat(reduced.getMetricValue(), equalTo(winner.getMetricValue())); + assertThat(reduced.getMetricName(), equalTo("test")); + } + + public void testDifferentTypes() { + InternalTopMetrics doubleMetrics = buildFilled(SortValue.from(100.0), randomDouble()); + InternalTopMetrics longMetrics = buildFilled(SortValue.from(7), randomDouble()); + InternalTopMetrics reduced = reduce(doubleMetrics, longMetrics); + // Doubles sort first. + InternalTopMetrics winner = doubleMetrics.getSortOrder() == SortOrder.ASC ? doubleMetrics : longMetrics; + assertThat(reduced.getName(), equalTo("test")); + assertThat(reduced.getSortValue(), equalTo(winner.getSortValue())); + assertThat(reduced.getSortFormat(), equalTo(winner.getSortFormat())); + assertThat(reduced.getSortOrder(), equalTo(doubleMetrics.getSortOrder())); + assertThat(reduced.getMetricValue(), equalTo(winner.getMetricValue())); + assertThat(reduced.getMetricName(), equalTo("test")); + } + + private InternalTopMetrics buildEmpty() { + return InternalTopMetrics.buildEmptyAggregation("test", "test", emptyList(), null); + } + + private InternalTopMetrics buildFilled(SortValue sortValue, double metricValue) { + DocValueFormat sortFormat = randomFrom(DocValueFormat.RAW, DocValueFormat.BINARY, DocValueFormat.BOOLEAN, DocValueFormat.IP); + SortOrder sortOrder = randomFrom(SortOrder.values()); + return new InternalTopMetrics("test", sortFormat, sortOrder, sortValue, "test", metricValue, emptyList(), null); + } + + private InternalTopMetrics reduce(InternalTopMetrics... results) { + return results[0].reduce(Arrays.asList(results), null); + } +} diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsTests.java new file mode 100644 index 00000000000..1ccb8fa4b79 --- /dev/null +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsTests.java @@ -0,0 +1,198 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.analytics.topmetrics; + +import org.elasticsearch.client.analytics.ParsedTopMetrics; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.search.sort.SortValue; +import org.elasticsearch.test.InternalAggregationTestCase; + +import java.io.IOException; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Predicate; + +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class InternalTopMetricsTests extends InternalAggregationTestCase { + public void testEmptyIsNotMapped() { + InternalTopMetrics empty = InternalTopMetrics.buildEmptyAggregation( + randomAlphaOfLength(5), randomAlphaOfLength(2), emptyList(), null); + assertFalse(empty.isMapped()); + } + + public void testNonEmptyIsMapped() { + InternalTopMetrics nonEmpty = randomValueOtherThanMany(tm -> tm.getSortValue() == null, this::createTestInstance); + assertTrue(nonEmpty.isMapped()); + } + + public void testToXContentDoubleSortValue() throws IOException { + InternalTopMetrics tm = new InternalTopMetrics("test", DocValueFormat.RAW, randomFrom(SortOrder.values()), SortValue.from(1.0), + "test", 1.0, emptyList(), null); + assertThat(Strings.toString(tm, true, true), equalTo( + "{\n" + + " \"test\" : {\n" + + " \"top\" : [\n" + + " {\n" + + " \"sort\" : [\n" + + " 1.0\n" + + " ],\n" + + " \"metrics\" : {\n" + + " \"test\" : 1.0\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}")); + } + + public void testToXConentDateSortValue() throws IOException { + DocValueFormat sortFormat = new DocValueFormat.DateTime(DateFormatter.forPattern("strict_date_time"), ZoneId.of("UTC"), + DateFieldMapper.Resolution.MILLISECONDS); + SortValue sortValue = SortValue.from(ZonedDateTime.parse("2007-12-03T10:15:30Z").toInstant().toEpochMilli()); + InternalTopMetrics tm = new InternalTopMetrics("test", sortFormat, randomFrom(SortOrder.values()), sortValue, "test", 1.0, + emptyList(), null); + assertThat(Strings.toString(tm, true, true), equalTo( + "{\n" + + " \"test\" : {\n" + + " \"top\" : [\n" + + " {\n" + + " \"sort\" : [\n" + + " \"2007-12-03T10:15:30.000Z\"\n" + + " ],\n" + + " \"metrics\" : {\n" + + " \"test\" : 1.0\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}")); + } + + @Override + protected List getNamedXContents() { + List result = new ArrayList<>(super.getNamedXContents()); + result.add(new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(TopMetricsAggregationBuilder.NAME), + (p, c) -> ParsedTopMetrics.PARSER.parse(p, (String) c))); + return result; + } + + @Override + protected InternalTopMetrics createTestInstance(String name, List pipelineAggregators, + Map metaData) { + DocValueFormat sortFormat = randomNumericDocValueFormat(); + SortOrder sortOrder = randomFrom(SortOrder.values()); + SortValue sortValue = randomSortValue(); + String metricName = randomAlphaOfLength(5); + double metricValue = randomDouble(); + return new InternalTopMetrics(name, sortFormat, sortOrder, sortValue, metricName, metricValue, pipelineAggregators, metaData); + } + + @Override + protected InternalTopMetrics mutateInstance(InternalTopMetrics instance) throws IOException { + String name = instance.getName(); + DocValueFormat sortFormat = instance.getSortFormat(); + SortOrder sortOrder = instance.getSortOrder(); + SortValue sortValue = instance.getSortValue(); + String metricName = instance.getMetricName(); + double metricValue = instance.getMetricValue(); + switch (randomInt(5)) { + case 0: + name = randomAlphaOfLength(6); + break; + case 1: + sortFormat = randomValueOtherThan(sortFormat, InternalAggregationTestCase::randomNumericDocValueFormat); + break; + case 2: + sortOrder = sortOrder == SortOrder.ASC ? SortOrder.DESC : SortOrder.ASC; + break; + case 3: + sortValue = randomValueOtherThan(sortValue, InternalTopMetricsTests::randomSortValue); + break; + case 4: + metricName = randomAlphaOfLength(6); + break; + case 5: + metricValue = randomValueOtherThan(metricValue, () -> randomDouble()); + break; + default: + throw new IllegalArgumentException("bad mutation"); + } + return new InternalTopMetrics(name, sortFormat, sortOrder, sortValue, metricName, metricValue, emptyList(), null); + } + + @Override + protected Reader instanceReader() { + return InternalTopMetrics::new; + } + + @Override + protected void assertFromXContent(InternalTopMetrics aggregation, ParsedAggregation parsedAggregation) throws IOException { + ParsedTopMetrics parsed = (ParsedTopMetrics) parsedAggregation; + assertThat(parsed.getName(), equalTo(aggregation.getName())); + if (false == aggregation.isMapped()) { + assertThat(parsed.getTopMetrics(), hasSize(0)); + return; + } + assertThat(parsed.getTopMetrics(), hasSize(1)); + ParsedTopMetrics.TopMetrics parsedTop = parsed.getTopMetrics().get(0); + Object expectedSort = aggregation.getSortFormat() == DocValueFormat.RAW ? + aggregation.getSortValue().getKey() : aggregation.getFormattedSortValue(); + assertThat(parsedTop.getSort(), equalTo(singletonList(expectedSort))); + assertThat(parsedTop.getMetrics(), equalTo(singletonMap(aggregation.getMetricName(), aggregation.getMetricValue()))); + } + + @Override + protected void assertReduced(InternalTopMetrics reduced, List inputs) { + InternalTopMetrics first = inputs.get(0); + InternalTopMetrics winner = inputs.stream() + .filter(tm -> tm.isMapped()) + .min((lhs, rhs) -> first.getSortOrder().reverseMul() * lhs.getSortValue().compareTo(rhs.getSortValue())) + .get(); + assertThat(reduced.getName(), equalTo(first.getName())); + assertThat(reduced.getSortValue(), equalTo(winner.getSortValue())); + assertThat(reduced.getSortFormat(), equalTo(winner.getSortFormat())); + assertThat(reduced.getSortOrder(), equalTo(first.getSortOrder())); + assertThat(reduced.getMetricValue(), equalTo(winner.getMetricValue())); + assertThat(reduced.getMetricName(), equalTo(first.getMetricName())); + } + + private static SortValue randomSortValue() { + switch (between(0, 2)) { + case 0: + return null; + case 1: + return SortValue.from(randomLong()); + case 2: + return SortValue.from(randomDouble()); + default: + throw new IllegalArgumentException("unsupported random sort"); + } + } + + @Override + protected Predicate excludePathsFromXContentInsertion() { + return path -> path.endsWith(".metrics"); + } +} diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilderTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilderTests.java new file mode 100644 index 00000000000..0a44a2dcd77 --- /dev/null +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilderTests.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.analytics.topmetrics; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.BaseAggregationBuilder; +import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.SortBuilder; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +import static java.util.Collections.singletonList; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class TopMetricsAggregationBuilderTests extends AbstractSerializingTestCase { + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(Arrays.asList( + new NamedWriteableRegistry.Entry(SortBuilder.class, FieldSortBuilder.NAME, FieldSortBuilder::new))); + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(Arrays.asList( + new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, new ParseField(TopMetricsAggregationBuilder.NAME), + (p, c) -> TopMetricsAggregationBuilder.PARSER.parse(p, (String) c)))); + } + + @Override + protected TopMetricsAggregationBuilder doParseInstance(XContentParser parser) throws IOException { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + String name = parser.currentName(); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.currentName(), equalTo("top_metrics")); + TopMetricsAggregationBuilder parsed = TopMetricsAggregationBuilder.PARSER.apply(parser, name); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + return parsed; + } + + @Override + protected Reader instanceReader() { + return TopMetricsAggregationBuilder::new; + } + + @Override + protected TopMetricsAggregationBuilder createTestInstance() { + List> sortBuilders = singletonList( + new FieldSortBuilder(randomAlphaOfLength(5)).order(randomFrom(SortOrder.values()))); + MultiValuesSourceFieldConfig.Builder metricField = new MultiValuesSourceFieldConfig.Builder(); + metricField.setFieldName(randomAlphaOfLength(5)).setMissing(1.0); + return new TopMetricsAggregationBuilder(randomAlphaOfLength(5), sortBuilders, metricField.build()); + } + + public void testClientBuilder() throws IOException { + AbstractXContentTestCase.xContentTester( + this::createParser, this::createTestInstance, this::toXContentThroughClientBuilder, + p -> { + p.nextToken(); + AggregatorFactories.Builder b = AggregatorFactories.parseAggregators(p); + assertThat(b.getAggregatorFactories(), hasSize(1)); + assertThat(b.getPipelineAggregatorFactories(), empty()); + return (TopMetricsAggregationBuilder) b.getAggregatorFactories().iterator().next(); + } ).test(); + } + + private void toXContentThroughClientBuilder(TopMetricsAggregationBuilder serverBuilder, XContentBuilder builder) throws IOException { + builder.startObject(); + createClientBuilder(serverBuilder).toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + } + + private org.elasticsearch.client.analytics.TopMetricsAggregationBuilder createClientBuilder( + TopMetricsAggregationBuilder serverBuilder) { + assertThat(serverBuilder.getSortBuilders(), hasSize(1)); + return new org.elasticsearch.client.analytics.TopMetricsAggregationBuilder( + serverBuilder.getName(), + serverBuilder.getSortBuilders().get(0), + serverBuilder.getMetricField().getFieldName()); + } +} diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java new file mode 100644 index 00000000000..e870bd870fe --- /dev/null +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java @@ -0,0 +1,498 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.analytics.topmetrics; + +import org.apache.lucene.document.Field; +import org.apache.lucene.document.LatLonDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.index.mapper.GeoPointFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; +import org.elasticsearch.index.mapper.TextFieldMapper; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.script.MockScriptEngine; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.script.ScriptModule; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig; +import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.GeoDistanceSortBuilder; +import org.elasticsearch.search.sort.ScoreSortBuilder; +import org.elasticsearch.search.sort.ScriptSortBuilder; +import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; +import org.elasticsearch.search.sort.SortBuilder; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.search.sort.SortValue; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Map; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.notANumber; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + + +public class TopMetricsAggregatorTests extends AggregatorTestCase { + public void testNoDocs() throws IOException { + InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> {}, + doubleFields()); + assertThat(result.getSortFormat(), equalTo(DocValueFormat.RAW)); + assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); + assertThat(result.getSortValue(), nullValue()); + assertThat(result.getMetricValue(), notANumber()); + } + + public void testUnmappedMetric() throws IOException { + InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> { + writer.addDocument(singletonList(doubleField("s", 1.0))); + }, + numberFieldType(NumberType.DOUBLE, "s")); + assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); + assertThat(result.getSortValue(), nullValue()); + assertThat(result.getMetricValue(), notANumber()); + } + + public void testMissingValueForMetric() throws IOException { + InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> { + writer.addDocument(singletonList(doubleField("s", 1.0))); + }, + doubleFields()); + assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(1.0))); + assertThat(result.getMetricValue(), notANumber()); + } + + public void testActualValueForMetric() throws IOException { + InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> { + writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m", 2.0))); + }, + doubleFields()); + assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(1.0))); + assertThat(result.getMetricValue(), equalTo(2.0d)); + } + + private InternalTopMetrics collectFromDoubles(TopMetricsAggregationBuilder builder) throws IOException { + return collect(builder, new MatchAllDocsQuery(), writer -> { + writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(doubleField("s", 2.0), doubleField("m", 3.0))); + }, + doubleFields()); + } + + public void testSortByDoubleAscending() throws IOException { + InternalTopMetrics result = collectFromDoubles(simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC))); + assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(1.0))); + assertThat(result.getMetricValue(), equalTo(2.0d)); + } + + public void testSortByDoubleDescending() throws IOException { + InternalTopMetrics result = collectFromDoubles(simpleBuilder(new FieldSortBuilder("s").order(SortOrder.DESC))); + assertThat(result.getSortOrder(), equalTo(SortOrder.DESC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(2.0))); + assertThat(result.getMetricValue(), equalTo(3.0d)); + } + + public void testSortByDoubleCastToLong() throws IOException { + InternalTopMetrics result = collectFromDoubles(simpleBuilder(new FieldSortBuilder("s").setNumericType("long"))); + assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(1))); + assertThat(result.getMetricValue(), equalTo(2.0d)); + } + + + public void testSortByFloatAscending() throws IOException { + TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC)); + InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> { + writer.addDocument(Arrays.asList(floatField("s", 1.0F), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(floatField("s", 2.0F), doubleField("m", 3.0))); + }, + floatAndDoubleField()); + assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(1.0))); + assertThat(result.getMetricValue(), equalTo(2.0d)); + } + + public void testSortByFloatDescending() throws IOException { + TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.DESC)); + InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> { + writer.addDocument(Arrays.asList(floatField("s", 1.0F), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(floatField("s", 2.0F), doubleField("m", 3.0))); + }, + floatAndDoubleField()); + assertThat(result.getSortOrder(), equalTo(SortOrder.DESC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(2.0))); + assertThat(result.getMetricValue(), equalTo(3.0d)); + } + + public void testSortByLongAscending() throws IOException { + TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC)); + InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> { + writer.addDocument(Arrays.asList(longField("s", 10), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(longField("s", 20), doubleField("m", 3.0))); + }, + longAndDoubleField()); + assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(10))); + assertThat(result.getMetricValue(), equalTo(2.0d)); + } + + public void testSortByLongDescending() throws IOException { + TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.DESC)); + InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> { + writer.addDocument(Arrays.asList(longField("s", 10), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(longField("s", 20), doubleField("m", 3.0))); + }, + longAndDoubleField()); + assertThat(result.getSortOrder(), equalTo(SortOrder.DESC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(20))); + assertThat(result.getMetricValue(), equalTo(3.0d)); + } + + public void testSortByScoreDescending() throws IOException { + TopMetricsAggregationBuilder builder = simpleBuilder(new ScoreSortBuilder().order(SortOrder.DESC)); + InternalTopMetrics result = collect(builder, boostFoo(), writer -> { + writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0))); + }, + textAndDoubleField()); + assertThat(result.getSortOrder(), equalTo(SortOrder.DESC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(2.0))); + assertThat(result.getMetricValue(), equalTo(2.0d)); + } + + public void testSortByScoreAscending() throws IOException { + TopMetricsAggregationBuilder builder = simpleBuilder(new ScoreSortBuilder().order(SortOrder.ASC)); + InternalTopMetrics result = collect(builder, boostFoo(), writer -> { + writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0))); + }, + textAndDoubleField()); + assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(1.0))); + assertThat(result.getMetricValue(), equalTo(3.0d)); + } + + public void testSortByScriptDescending() throws IOException { + TopMetricsAggregationBuilder builder = simpleBuilder(scriptSortOnS().order(SortOrder.DESC)); + InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> { + writer.addDocument(Arrays.asList(doubleField("s", 2), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(doubleField("s", 1), doubleField("m", 3.0))); + }, + doubleFields()); + assertThat(result.getSortOrder(), equalTo(SortOrder.DESC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(2.0))); + assertThat(result.getMetricValue(), equalTo(2.0d)); + } + + public void testSortByScriptAscending() throws IOException { + TopMetricsAggregationBuilder builder = simpleBuilder(scriptSortOnS().order(SortOrder.ASC)); + InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> { + writer.addDocument(Arrays.asList(doubleField("s", 2), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(doubleField("s", 1), doubleField("m", 3.0))); + }, + doubleFields()); + assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(1.0))); + assertThat(result.getMetricValue(), equalTo(3.0d)); + } + + public void testSortByStringScriptFails() throws IOException { + Script script = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "s", emptyMap()); + TopMetricsAggregationBuilder builder = simpleBuilder(new ScriptSortBuilder(script, ScriptSortType.STRING)); + Exception e = expectThrows(IllegalArgumentException.class, () -> collect(builder, boostFoo(), writer -> { + writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0))); + }, + textAndDoubleField())); + assertThat(e.getMessage(), equalTo( + "error building sort for [_script]: script sorting only supported on [numeric] scripts but was [string]")); + } + + private InternalTopMetrics collectFromNewYorkAndLA(TopMetricsAggregationBuilder builder) throws IOException { + return collect(builder, new MatchAllDocsQuery(), writer -> { + writer.addDocument(Arrays.asList(geoPointField("s", 40.7128, -74.0060), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(geoPointField("s", 34.0522, -118.2437), doubleField("m", 3.0))); + }, + geoPointAndDoubleField()); + } + + public void testSortByGeoDistancDescending() throws IOException { + TopMetricsAggregationBuilder builder = simpleBuilder(new GeoDistanceSortBuilder("s", 35.7796, 78.6382).order(SortOrder.DESC)); + InternalTopMetrics result = collectFromNewYorkAndLA(builder); + assertThat(result.getSortOrder(), equalTo(SortOrder.DESC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(1.2054632268631617E7))); + assertThat(result.getMetricValue(), equalTo(3.0d)); + } + + public void testSortByGeoDistanceAscending() throws IOException { + TopMetricsAggregationBuilder builder = simpleBuilder(new GeoDistanceSortBuilder("s", 35.7796, 78.6382).order(SortOrder.ASC)); + InternalTopMetrics result = collectFromNewYorkAndLA(builder); + assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); + assertThat(result.getSortValue(), equalTo(SortValue.from(1.1062351376961706E7))); + assertThat(result.getMetricValue(), equalTo(2.0d)); + } + + public void testInsideTerms() throws IOException { + TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC)); + TermsAggregationBuilder terms = new TermsAggregationBuilder("terms", ValueType.DOUBLE).field("c").subAggregation(builder); + Terms result = (Terms) collect(terms, new MatchAllDocsQuery(), writer -> { + writer.addDocument(Arrays.asList(doubleField("c", 1.0), doubleField("s", 1.0), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(doubleField("c", 1.0), doubleField("s", 2.0), doubleField("m", 3.0))); + writer.addDocument(Arrays.asList(doubleField("c", 2.0), doubleField("s", 4.0), doubleField("m", 9.0))); + }, + numberFieldType(NumberType.DOUBLE, "c"), numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m")); + Terms.Bucket bucket1 = result.getBuckets().get(0); + assertThat(bucket1.getKey(), equalTo(1.0)); + InternalTopMetrics top1 = bucket1.getAggregations().get("test"); + assertThat(top1.getSortOrder(), equalTo(SortOrder.ASC)); + assertThat(top1.getSortValue(), equalTo(SortValue.from(1.0))); + assertThat(top1.getMetricValue(), equalTo(2.0d)); + Terms.Bucket bucket2 = result.getBuckets().get(1); + assertThat(bucket2.getKey(), equalTo(2.0)); + InternalTopMetrics top2 = bucket2.getAggregations().get("test"); + assertThat(top2.getSortOrder(), equalTo(SortOrder.ASC)); + assertThat(top2.getSortValue(), equalTo(SortValue.from(4.0))); + assertThat(top2.getMetricValue(), equalTo(9.0d)); + } + + public void testTonsOfBucketsTriggersBreaker() throws IOException { + // Build a "simple" circuit breaker that trips at 20k + CircuitBreakerService breaker = mock(CircuitBreakerService.class); + ByteSizeValue max = new ByteSizeValue(20, ByteSizeUnit.KB); + when(breaker.getBreaker(CircuitBreaker.REQUEST)).thenReturn(new NoopCircuitBreaker(CircuitBreaker.REQUEST) { + private long total = 0; + + @Override + public double addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException { + logger.debug("Used {} grabbing {} for {}", total, bytes, label); + total += bytes; + if (total > max.getBytes()) { + throw new CircuitBreakingException("test error", bytes, max.getBytes(), Durability.TRANSIENT); + } + return total; + } + + @Override + public long addWithoutBreaking(long bytes) { + logger.debug("Used {} grabbing {}", total, bytes); + total += bytes; + return total; + } + }); + + // Collect some buckets with it + try (Directory directory = newDirectory()) { + try (RandomIndexWriter writer = new RandomIndexWriter(random(), directory)) { + writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m", 2.0))); + } + + try (IndexReader indexReader = DirectoryReader.open(directory)) { + IndexSearcher indexSearcher = newSearcher(indexReader, false, false); + SearchContext searchContext = createSearchContext(indexSearcher, createIndexSettings(), new MatchAllDocsQuery(), + new MultiBucketConsumer(Integer.MAX_VALUE, breaker.getBreaker(CircuitBreaker.REQUEST)), breaker, doubleFields()); + TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC)); + Aggregator aggregator = builder.build(searchContext.getQueryShardContext(), null) + .create(searchContext, null, true); + aggregator.preCollection(); + assertThat(indexReader.leaves(), hasSize(1)); + LeafBucketCollector leaf = aggregator.getLeafCollector(indexReader.leaves().get(0)); + + /* + * Collect some number of buckets that we *know* fit in the + * breaker. The number of buckets feels fairly arbitrary but + * it comes from: + * budget = 15k = 20k - 5k for the "default weight" of ever agg + * The 922th bucket causes a resize which requests puts the total + * just over 15k.O + */ + int bucketThatBreaks = 922; + for (int b = 0; b < bucketThatBreaks; b++) { + try { + leaf.collect(0, b); + } catch (Exception e) { + throw new RuntimeException("ADFADFS " + b, e); + } + } + CircuitBreakingException e = expectThrows(CircuitBreakingException.class, () -> leaf.collect(0, bucketThatBreaks)); + assertThat(e.getMessage(), equalTo("test error")); + assertThat(e.getByteLimit(), equalTo(max.getBytes())); + assertThat(e.getBytesWanted(), equalTo(16440L)); + } + } + } + + private TopMetricsAggregationBuilder simpleBuilder(SortBuilder sort) { + return new TopMetricsAggregationBuilder("test", singletonList(sort), + new MultiValuesSourceFieldConfig.Builder().setFieldName("m").build()); + } + + private TopMetricsAggregationBuilder simpleBuilder() { + return simpleBuilder(new FieldSortBuilder("s")); + } + + /** + * Build a query that matches all documents but adds 1 to the score of + * all docs that contain "foo". We use this instead of a term query + * directly because the score that can come from the term query can + * very quite a bit but this is super predictable. + */ + private Query boostFoo() { + return new BooleanQuery.Builder() + .add(new BooleanClause(new MatchAllDocsQuery(), Occur.MUST)) + .add(new BooleanClause(new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term("s", "foo"))), 1.0f), Occur.SHOULD)) + .build(); + } + + private MappedFieldType[] doubleFields() { + return new MappedFieldType[] {numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m")}; + } + + private MappedFieldType[] floatAndDoubleField() { + return new MappedFieldType[] {numberFieldType(NumberType.FLOAT, "s"), numberFieldType(NumberType.DOUBLE, "m")}; + } + + private MappedFieldType[] longAndDoubleField() { + return new MappedFieldType[] {numberFieldType(NumberType.LONG, "s"), numberFieldType(NumberType.DOUBLE, "m")}; + } + + private MappedFieldType[] textAndDoubleField() { + return new MappedFieldType[] {textFieldType("s"), numberFieldType(NumberType.DOUBLE, "m")}; + } + + private MappedFieldType[] geoPointAndDoubleField() { + return new MappedFieldType[] {geoPointFieldType("s"), numberFieldType(NumberType.DOUBLE, "m")}; + } + + private MappedFieldType numberFieldType(NumberType numberType, String name) { + NumberFieldMapper.NumberFieldType type = new NumberFieldMapper.NumberFieldType(numberType); + type.setName(name); + return type; + } + + private MappedFieldType textFieldType(String name) { + TextFieldMapper.TextFieldType type = new TextFieldMapper.TextFieldType(); + type.setName(name); + return type; + } + + private MappedFieldType geoPointFieldType(String name) { + GeoPointFieldMapper.GeoPointFieldType type = new GeoPointFieldMapper.GeoPointFieldType(); + type.setName(name); + type.setHasDocValues(true); + return type; + } + + private IndexableField doubleField(String name, double value) { + return new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong(value)); + } + + private IndexableField floatField(String name, float value) { + return new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(value)); + } + + private IndexableField longField(String name, long value) { + return new SortedNumericDocValuesField(name, value); + } + + private IndexableField textField(String name, String value) { + return new Field(name, value, textFieldType(name)); + } + + private IndexableField geoPointField(String name, double lat, double lon) { + return new LatLonDocValuesField(name, lat, lon); + } + + private InternalTopMetrics collect(TopMetricsAggregationBuilder builder, Query query, + CheckedConsumer buildIndex, MappedFieldType... fields) throws IOException { + InternalTopMetrics result = (InternalTopMetrics) collect((AggregationBuilder) builder, query, buildIndex, fields); + assertThat(result.getSortFormat(), equalTo(DocValueFormat.RAW)); + assertThat(result.getMetricName(), equalTo(builder.getMetricField().getFieldName())); + return result; + } + + private InternalAggregation collect(AggregationBuilder builder, Query query, + CheckedConsumer buildIndex, MappedFieldType... fields) throws IOException { + try (Directory directory = newDirectory()) { + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + buildIndex.accept(indexWriter); + } + + try (IndexReader indexReader = DirectoryReader.open(directory)) { + IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + return search(indexSearcher, query, builder, fields); + } + } + } + + /** + * Builds a simple script that reads the "s" field. + */ + private ScriptSortBuilder scriptSortOnS() { + return new ScriptSortBuilder(new Script(ScriptType.INLINE, MockScriptEngine.NAME, "s", emptyMap()), ScriptSortType.NUMBER); + } + + @Override + protected ScriptService getMockScriptService() { + MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, + singletonMap("s", args -> { + @SuppressWarnings("unchecked") + Map> fields = (Map>) args.get("doc"); + ScriptDocValues.Doubles field = (ScriptDocValues.Doubles) fields.get("s"); + return field.getValue(); + }), + emptyMap()); + Map engines = singletonMap(scriptEngine.getType(), scriptEngine); + return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java index 683f4a7479a..3d245adb443 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java @@ -6,8 +6,6 @@ package org.elasticsearch.license; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -23,8 +21,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestDeleteLicenseAction extends XPackRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteLicenseAction.class)); - RestDeleteLicenseAction() {} @Override @@ -34,7 +30,7 @@ public class RestDeleteLicenseAction extends XPackRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(DELETE, "/_license", DELETE, URI_BASE + "/license", deprecationLogger)); + return singletonList(new ReplacedRoute(DELETE, "/_license", DELETE, URI_BASE + "/license")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetBasicStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetBasicStatus.java index 68da8686708..7be9bd85cce 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetBasicStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetBasicStatus.java @@ -6,8 +6,6 @@ package org.elasticsearch.license; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.XPackClient; @@ -21,8 +19,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetBasicStatus extends XPackRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetBasicStatus.class)); - RestGetBasicStatus() {} @Override @@ -32,7 +28,7 @@ public class RestGetBasicStatus extends XPackRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(GET, "/_license/basic_status", GET, URI_BASE + "/license/basic_status", deprecationLogger)); + return singletonList(new ReplacedRoute(GET, "/_license/basic_status", GET, URI_BASE + "/license/basic_status")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java index 29978cc41b2..b2e19d8d782 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java @@ -6,8 +6,6 @@ package org.elasticsearch.license; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; @@ -31,8 +29,6 @@ import static org.elasticsearch.rest.RestStatus.OK; public class RestGetLicenseAction extends XPackRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetLicenseAction.class)); - RestGetLicenseAction() {} @Override @@ -42,7 +38,7 @@ public class RestGetLicenseAction extends XPackRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(GET, "/_license", GET, URI_BASE + "/license", deprecationLogger)); + return singletonList(new ReplacedRoute(GET, "/_license", GET, URI_BASE + "/license")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetTrialStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetTrialStatus.java index 1d8aa0ed689..a882047151e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetTrialStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetTrialStatus.java @@ -6,8 +6,6 @@ package org.elasticsearch.license; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.XPackClient; @@ -21,8 +19,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetTrialStatus extends XPackRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetTrialStatus.class)); - RestGetTrialStatus() {} @Override @@ -32,7 +28,7 @@ public class RestGetTrialStatus extends XPackRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(GET, "/_license/trial_status", GET, URI_BASE + "/license/trial_status", deprecationLogger)); + return singletonList(new ReplacedRoute(GET, "/_license/trial_status", GET, URI_BASE + "/license/trial_status")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java index e56e95dc450..63b7998d45b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java @@ -6,8 +6,6 @@ package org.elasticsearch.license; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestStatusToXContentListener; import org.elasticsearch.xpack.core.XPackClient; @@ -22,8 +20,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestPostStartBasicLicense extends XPackRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPostStartBasicLicense.class)); - RestPostStartBasicLicense() {} @Override @@ -33,7 +29,7 @@ public class RestPostStartBasicLicense extends XPackRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(POST, "/_license/start_basic", POST, URI_BASE + "/license/start_basic", deprecationLogger)); + return singletonList(new ReplacedRoute(POST, "/_license/start_basic", POST, URI_BASE + "/license/start_basic")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartTrialLicense.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartTrialLicense.java index c326162392b..e442298ca5a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartTrialLicense.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartTrialLicense.java @@ -6,8 +6,6 @@ package org.elasticsearch.license; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestRequest; @@ -26,8 +24,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestPostStartTrialLicense extends XPackRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPostStartTrialLicense.class)); - RestPostStartTrialLicense() {} @Override @@ -37,7 +33,7 @@ public class RestPostStartTrialLicense extends XPackRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(POST, "/_license/start_trial", POST, URI_BASE + "/license/start_trial", deprecationLogger)); + return singletonList(new ReplacedRoute(POST, "/_license/start_trial", POST, URI_BASE + "/license/start_trial")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java index 1d7896f75dd..5eb599dc9bd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java @@ -6,8 +6,6 @@ package org.elasticsearch.license; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.XPackClient; @@ -22,8 +20,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestPutLicenseAction extends XPackRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutLicenseAction.class)); - RestPutLicenseAction() {} @Override @@ -35,8 +31,8 @@ public class RestPutLicenseAction extends XPackRestHandler { public List replacedRoutes() { return unmodifiableList(asList( // TODO: remove POST endpoint? - new ReplacedRoute(POST, "/_license", POST, URI_BASE + "/license", deprecationLogger), - new ReplacedRoute(PUT, "/_license", PUT, URI_BASE + "/license", deprecationLogger))); + new ReplacedRoute(POST, "/_license", POST, URI_BASE + "/license"), + new ReplacedRoute(PUT, "/_license", PUT, URI_BASE + "/license"))); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/analytics/action/AnalyticsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/analytics/action/AnalyticsStatsAction.java index 38f84aefc43..cf66f831208 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/analytics/action/AnalyticsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/analytics/action/AnalyticsStatsAction.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.analytics.action; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.nodes.BaseNodeRequest; @@ -109,32 +110,46 @@ public class AnalyticsStatsAction extends ActionTypewrap( - r -> createAliasListener.onResponse(r.isAcknowledged()), - e -> { - // Possible that the index was created while the request was executing, - // so we need to handle that possibility - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException) { - // Create the alias - createAliasListener.onResponse(true); - } else { - finalListener.onFailure(e); - } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, createIndexRequest, + ActionListener.wrap( + r -> createAliasListener.onResponse(r.isAcknowledged()), + e -> { + // Possible that the index was created while the request was executing, + // so we need to handle that possibility + if (ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException) { + // Create the alias + createAliasListener.onResponse(true); + } else { + finalListener.onFailure(e); } - ), client.admin().indices()::create); - } catch (IOException e) { - finalListener.onFailure(e); - } + } + ), client.admin().indices()::create); return; } @@ -111,42 +95,8 @@ public class AnnotationIndex { finalListener.onResponse(false); } - public static XContentBuilder annotationsMapping() throws IOException { - XContentBuilder builder = jsonBuilder() - .startObject() - .startObject(SINGLE_MAPPING_NAME); - ElasticsearchMappings.addMetaInformation(builder); - builder.startObject(ElasticsearchMappings.PROPERTIES) - .startObject(Annotation.ANNOTATION.getPreferredName()) - .field(ElasticsearchMappings.TYPE, ElasticsearchMappings.TEXT) - .endObject() - .startObject(Annotation.CREATE_TIME.getPreferredName()) - .field(ElasticsearchMappings.TYPE, ElasticsearchMappings.DATE) - .endObject() - .startObject(Annotation.CREATE_USERNAME.getPreferredName()) - .field(ElasticsearchMappings.TYPE, ElasticsearchMappings.KEYWORD) - .endObject() - .startObject(Annotation.TIMESTAMP.getPreferredName()) - .field(ElasticsearchMappings.TYPE, ElasticsearchMappings.DATE) - .endObject() - .startObject(Annotation.END_TIMESTAMP.getPreferredName()) - .field(ElasticsearchMappings.TYPE, ElasticsearchMappings.DATE) - .endObject() - .startObject(Job.ID.getPreferredName()) - .field(ElasticsearchMappings.TYPE, ElasticsearchMappings.KEYWORD) - .endObject() - .startObject(Annotation.MODIFIED_TIME.getPreferredName()) - .field(ElasticsearchMappings.TYPE, ElasticsearchMappings.DATE) - .endObject() - .startObject(Annotation.MODIFIED_USERNAME.getPreferredName()) - .field(ElasticsearchMappings.TYPE, ElasticsearchMappings.KEYWORD) - .endObject() - .startObject(Annotation.TYPE.getPreferredName()) - .field(ElasticsearchMappings.TYPE, ElasticsearchMappings.KEYWORD) - .endObject() - .endObject() - .endObject() - .endObject(); - return builder; + public static String annotationsMapping() { + return TemplateUtils.loadTemplate("/org/elasticsearch/xpack/core/ml/annotations_index_mappings.json", + Version.CURRENT.toString(), MAPPINGS_VERSION_VARIABLE); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java index 7d4e2367cce..b74a80563fb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.ml.job.persistence; import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; @@ -16,7 +17,9 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.core.template.TemplateUtils; import java.util.Arrays; import java.util.Collections; @@ -31,6 +34,9 @@ public final class AnomalyDetectorsIndex { public static final int CONFIG_INDEX_MAX_RESULTS_WINDOW = 10_000; + private static final String RESULTS_MAPPINGS_VERSION_VARIABLE = "xpack.ml.version"; + private static final String RESOURCE_PATH = "/org/elasticsearch/xpack/core/ml/anomalydetection/"; + private AnomalyDetectorsIndex() { } @@ -144,4 +150,12 @@ public final class AnomalyDetectorsIndex { } } + public static String resultsMapping() { + return resultsMapping(MapperService.SINGLE_MAPPING_NAME); + } + + public static String resultsMapping(String mappingType) { + return TemplateUtils.loadTemplate(RESOURCE_PATH + "results_index_mappings.json", + Version.CURRENT.toString(), RESULTS_MAPPINGS_VERSION_VARIABLE, Collections.singletonMap("xpack.ml.mapping_type", mappingType)); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java index a90f0d91970..37b13c9a725 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java @@ -20,60 +20,16 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; import org.elasticsearch.plugins.MapperPlugin; -import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig; -import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats; -import org.elasticsearch.xpack.core.ml.datafeed.DelayedDataCheckConfig; -import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; -import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsDest; -import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsSource; -import org.elasticsearch.xpack.core.ml.dataframe.analyses.BoostedTreeParams; -import org.elasticsearch.xpack.core.ml.dataframe.analyses.Classification; -import org.elasticsearch.xpack.core.ml.dataframe.analyses.OutlierDetection; -import org.elasticsearch.xpack.core.ml.dataframe.analyses.Regression; -import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; -import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; -import org.elasticsearch.xpack.core.ml.job.config.DataDescription; -import org.elasticsearch.xpack.core.ml.job.config.DetectionRule; -import org.elasticsearch.xpack.core.ml.job.config.Detector; -import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig; -import org.elasticsearch.xpack.core.ml.job.config.Operator; -import org.elasticsearch.xpack.core.ml.job.config.RuleCondition; -import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; -import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; -import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; -import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshotField; -import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.TimingStats; -import org.elasticsearch.xpack.core.ml.job.results.AnomalyCause; -import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; -import org.elasticsearch.xpack.core.ml.job.results.Bucket; -import org.elasticsearch.xpack.core.ml.job.results.BucketInfluencer; -import org.elasticsearch.xpack.core.ml.job.results.CategoryDefinition; -import org.elasticsearch.xpack.core.ml.job.results.Forecast; -import org.elasticsearch.xpack.core.ml.job.results.ForecastRequestStats; -import org.elasticsearch.xpack.core.ml.job.results.GeoResults; -import org.elasticsearch.xpack.core.ml.job.results.Influence; -import org.elasticsearch.xpack.core.ml.job.results.Influencer; -import org.elasticsearch.xpack.core.ml.job.results.ModelPlot; -import org.elasticsearch.xpack.core.ml.job.results.ReservedFieldNames; -import org.elasticsearch.xpack.core.ml.job.results.Result; -import org.elasticsearch.xpack.core.ml.notifications.AnomalyDetectionAuditMessage; -import org.elasticsearch.xpack.core.ml.utils.ExponentialAverageCalculationContext; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -109,7 +65,6 @@ public class ElasticsearchMappings { public static final String PROPERTIES = "properties"; public static final String TYPE = "type"; public static final String DYNAMIC = "dynamic"; - public static final String FIELDS = "fields"; /** * Name of the custom 'all' field for results @@ -132,1091 +87,16 @@ public class ElasticsearchMappings { public static final String BOOLEAN = "boolean"; public static final String DATE = "date"; public static final String DOUBLE = "double"; - public static final String GEO_POINT = "geo_point"; public static final String INTEGER = "integer"; public static final String KEYWORD = "keyword"; public static final String LONG = "long"; public static final String TEXT = "text"; - static final String RAW = "raw"; - private static final Logger logger = LogManager.getLogger(ElasticsearchMappings.class); private ElasticsearchMappings() { } - public static XContentBuilder configMapping() throws IOException { - return configMapping(SINGLE_MAPPING_NAME); - } - - public static XContentBuilder configMapping(String mappingType) throws IOException { - XContentBuilder builder = jsonBuilder(); - builder.startObject(); - builder.startObject(mappingType); - addMetaInformation(builder); - addDefaultMapping(builder); - builder.startObject(PROPERTIES); - - addJobConfigFields(builder); - addDatafeedConfigFields(builder); - addDataFrameAnalyticsFields(builder); - - builder.endObject() - .endObject() - .endObject(); - return builder; - } - - public static void addJobConfigFields(XContentBuilder builder) throws IOException { - - builder.startObject(CONFIG_TYPE) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Job.ID.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Job.JOB_TYPE.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Job.JOB_VERSION.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Job.GROUPS.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Job.ANALYSIS_CONFIG.getPreferredName()) - .startObject(PROPERTIES) - .startObject(AnalysisConfig.BUCKET_SPAN.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnalysisConfig.CATEGORIZATION_FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnalysisConfig.CATEGORIZATION_FILTERS.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnalysisConfig.CATEGORIZATION_ANALYZER.getPreferredName()) - .field(ENABLED, false) - .endObject() - .startObject(AnalysisConfig.LATENCY.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnalysisConfig.SUMMARY_COUNT_FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnalysisConfig.DETECTORS.getPreferredName()) - .startObject(PROPERTIES) - .startObject(Detector.DETECTOR_DESCRIPTION_FIELD.getPreferredName()) - .field(TYPE, TEXT) - .endObject() - .startObject(Detector.FUNCTION_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Detector.FIELD_NAME_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Detector.BY_FIELD_NAME_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Detector.OVER_FIELD_NAME_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Detector.PARTITION_FIELD_NAME_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Detector.USE_NULL_FIELD.getPreferredName()) - .field(TYPE, BOOLEAN) - .endObject() - .startObject(Detector.EXCLUDE_FREQUENT_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Detector.CUSTOM_RULES_FIELD.getPreferredName()) - .field(TYPE, NESTED) - .startObject(PROPERTIES) - .startObject(DetectionRule.ACTIONS_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - // RuleScope is a map - .startObject(DetectionRule.SCOPE_FIELD.getPreferredName()) - .field(ENABLED, false) - .endObject() - .startObject(DetectionRule.CONDITIONS_FIELD.getPreferredName()) - .field(TYPE, NESTED) - .startObject(PROPERTIES) - .startObject(RuleCondition.APPLIES_TO_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Operator.OPERATOR_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(RuleCondition.VALUE_FIELD.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .startObject(Detector.DETECTOR_INDEX.getPreferredName()) - .field(TYPE, INTEGER) - .endObject() - .endObject() - .endObject() - - .startObject(AnalysisConfig.INFLUENCERS.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnalysisConfig.MULTIVARIATE_BY_FIELDS.getPreferredName()) - .field(TYPE, BOOLEAN) - .endObject() - .endObject() - .endObject() - - .startObject(Job.ANALYSIS_LIMITS.getPreferredName()) - .startObject(PROPERTIES) - .startObject(AnalysisLimits.MODEL_MEMORY_LIMIT.getPreferredName()) - .field(TYPE, KEYWORD) // TODO Should be a ByteSizeValue - .endObject() - .startObject(AnalysisLimits.CATEGORIZATION_EXAMPLES_LIMIT.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .endObject() - .endObject() - - .startObject(Job.CREATE_TIME.getPreferredName()) - .field(TYPE, DATE) - .endObject() - - .startObject(Job.CUSTOM_SETTINGS.getPreferredName()) - // Custom settings are an untyped map - .field(ENABLED, false) - .endObject() - - .startObject(Job.DATA_DESCRIPTION.getPreferredName()) - .startObject(PROPERTIES) - .startObject(DataDescription.FORMAT_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(DataDescription.TIME_FIELD_NAME_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(DataDescription.TIME_FORMAT_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(DataDescription.FIELD_DELIMITER_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(DataDescription.QUOTE_CHARACTER_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .endObject() - .endObject() - - .startObject(Job.DESCRIPTION.getPreferredName()) - .field(TYPE, TEXT) - .endObject() - .startObject(Job.FINISHED_TIME.getPreferredName()) - .field(TYPE, DATE) - .endObject() - - .startObject(Job.MODEL_PLOT_CONFIG.getPreferredName()) - .startObject(PROPERTIES) - .startObject(ModelPlotConfig.ENABLED_FIELD.getPreferredName()) - .field(TYPE, BOOLEAN) - .endObject() - .startObject(ModelPlotConfig.TERMS_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .endObject() - .endObject() - - .startObject(Job.RENORMALIZATION_WINDOW_DAYS.getPreferredName()) - .field(TYPE, LONG) // TODO should be TimeValue - .endObject() - .startObject(Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Job.MODEL_SNAPSHOT_RETENTION_DAYS.getPreferredName()) - .field(TYPE, LONG) // TODO should be TimeValue - .endObject() - .startObject(Job.RESULTS_RETENTION_DAYS.getPreferredName()) - .field(TYPE, LONG) // TODO should be TimeValue - .endObject() - .startObject(Job.MODEL_SNAPSHOT_ID.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Job.MODEL_SNAPSHOT_MIN_VERSION.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Job.RESULTS_INDEX_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject(); - } - - public static void addDatafeedConfigFields(XContentBuilder builder) throws IOException { - builder.startObject(DatafeedConfig.ID.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(DatafeedConfig.QUERY_DELAY.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(DatafeedConfig.FREQUENCY.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(DatafeedConfig.INDICES.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(DatafeedConfig.QUERY.getPreferredName()) - .field(ENABLED, false) - .endObject() - .startObject(DatafeedConfig.SCROLL_SIZE.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DatafeedConfig.AGGREGATIONS.getPreferredName()) - .field(ENABLED, false) - .endObject() - .startObject(DatafeedConfig.SCRIPT_FIELDS.getPreferredName()) - .field(ENABLED, false) - .endObject() - .startObject(DatafeedConfig.CHUNKING_CONFIG.getPreferredName()) - .startObject(PROPERTIES) - .startObject(ChunkingConfig.MODE_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(ChunkingConfig.TIME_SPAN_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .endObject() - .endObject() - .startObject(DatafeedConfig.DELAYED_DATA_CHECK_CONFIG.getPreferredName()) - .startObject(PROPERTIES) - .startObject(DelayedDataCheckConfig.ENABLED.getPreferredName()) - .field(TYPE, BOOLEAN) - .endObject() - .startObject(DelayedDataCheckConfig.CHECK_WINDOW.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .endObject() - .endObject() - .startObject(DatafeedConfig.HEADERS.getPreferredName()) - .field(ENABLED, false) - .endObject(); - } - - /** - * {@link DataFrameAnalyticsConfig} mapping. - * Does not include mapping for CREATE_TIME as this mapping is added by {@link #addJobConfigFields} method. - */ - public static void addDataFrameAnalyticsFields(XContentBuilder builder) throws IOException { - builder.startObject(DataFrameAnalyticsConfig.ID.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(DataFrameAnalyticsConfig.SOURCE.getPreferredName()) - .startObject(PROPERTIES) - .startObject(DataFrameAnalyticsSource.INDEX.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(DataFrameAnalyticsSource.QUERY.getPreferredName()) - .field(ENABLED, false) - .endObject() - .startObject(DataFrameAnalyticsSource._SOURCE.getPreferredName()) - .field(ENABLED, false) - .endObject() - .endObject() - .endObject() - .startObject(DataFrameAnalyticsConfig.DEST.getPreferredName()) - .startObject(PROPERTIES) - .startObject(DataFrameAnalyticsDest.INDEX.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(DataFrameAnalyticsDest.RESULTS_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .endObject() - .endObject() - .startObject(DataFrameAnalyticsConfig.ANALYZED_FIELDS.getPreferredName()) - .field(ENABLED, false) - .endObject() - .startObject(DataFrameAnalyticsConfig.ANALYSIS.getPreferredName()) - .startObject(PROPERTIES) - .startObject(OutlierDetection.NAME.getPreferredName()) - .startObject(PROPERTIES) - .startObject(OutlierDetection.N_NEIGHBORS.getPreferredName()) - .field(TYPE, INTEGER) - .endObject() - .startObject(OutlierDetection.METHOD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(OutlierDetection.FEATURE_INFLUENCE_THRESHOLD.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .endObject() - .endObject() - .startObject(Regression.NAME.getPreferredName()) - .startObject(PROPERTIES) - .startObject(Regression.DEPENDENT_VARIABLE.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(BoostedTreeParams.LAMBDA.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(BoostedTreeParams.GAMMA.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(BoostedTreeParams.ETA.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(BoostedTreeParams.MAXIMUM_NUMBER_TREES.getPreferredName()) - .field(TYPE, INTEGER) - .endObject() - .startObject(BoostedTreeParams.FEATURE_BAG_FRACTION.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(BoostedTreeParams.NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName()) - .field(TYPE, INTEGER) - .endObject() - .startObject(Regression.PREDICTION_FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Regression.TRAINING_PERCENT.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .endObject() - .endObject() - .startObject(Classification.NAME.getPreferredName()) - .startObject(PROPERTIES) - .startObject(Classification.DEPENDENT_VARIABLE.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(BoostedTreeParams.LAMBDA.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(BoostedTreeParams.GAMMA.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(BoostedTreeParams.ETA.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(BoostedTreeParams.MAXIMUM_NUMBER_TREES.getPreferredName()) - .field(TYPE, INTEGER) - .endObject() - .startObject(BoostedTreeParams.FEATURE_BAG_FRACTION.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(BoostedTreeParams.NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName()) - .field(TYPE, INTEGER) - .endObject() - .startObject(Classification.PREDICTION_FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Classification.NUM_TOP_CLASSES.getPreferredName()) - .field(TYPE, INTEGER) - .endObject() - .startObject(Classification.TRAINING_PERCENT.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - // re-used: CREATE_TIME - .startObject(DataFrameAnalyticsConfig.VERSION.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject(); - } - - /** - * Creates a default mapping which has a dynamic template that - * treats all dynamically added fields as keywords. This is needed - * so that the per-job term fields will not be automatically added - * as fields of type 'text' to the index mappings of newly rolled indices. - * - * @throws IOException On write error - */ - public static void addDefaultMapping(XContentBuilder builder) throws IOException { - builder.startArray("dynamic_templates") - .startObject() - .startObject("strings_as_keywords") - .field("match", "*") - .startObject("mapping") - .field(TYPE, KEYWORD) - .endObject() - .endObject() - .endObject() - .endArray(); - } - - /** - * Inserts "_meta" containing useful information like the version into the mapping - * template. - * - * @param builder The builder for the mappings - * @throws IOException On write error - */ - public static void addMetaInformation(XContentBuilder builder) throws IOException { - builder.startObject("_meta") - .field("version", Version.CURRENT) - .endObject(); - } - - public static XContentBuilder resultsMapping(String mappingType) throws IOException { - return resultsMapping(mappingType, Collections.emptyList()); - } - - public static XContentBuilder resultsMapping(String mappingType, Collection extraTermFields) throws IOException { - XContentBuilder builder = jsonBuilder(); - builder.startObject(); - builder.startObject(mappingType); - addMetaInformation(builder); - addDefaultMapping(builder); - builder.startObject(PROPERTIES); - - // Add result all field for easy searches in kibana - builder.startObject(ALL_FIELD_VALUES) - .field(TYPE, TEXT) - .field(ANALYZER, WHITESPACE) - .endObject(); - - builder.startObject(Job.ID.getPreferredName()) - .field(TYPE, KEYWORD) - .field(COPY_TO, ALL_FIELD_VALUES) - .endObject(); - - builder.startObject(Result.TIMESTAMP.getPreferredName()) - .field(TYPE, DATE) - .endObject(); - - addResultsMapping(builder); - addCategoryDefinitionMapping(builder); - addDataCountsMapping(builder); - addTimingStatsExceptBucketCountMapping(builder); - addDatafeedTimingStats(builder); - addModelSnapshotMapping(builder); - - addTermFields(builder, extraTermFields); - - // end properties - builder.endObject(); - // end type - builder.endObject(); - // end mapping - builder.endObject(); - - - return builder; - } - - /** - * Create the Elasticsearch mapping for results objects - * {@link Bucket}s, {@link AnomalyRecord}s, {@link Influencer} and - * {@link BucketInfluencer} - * - * The mapping has a custom all field containing the *_FIELD_VALUE fields - * e.g. BY_FIELD_VALUE, OVER_FIELD_VALUE, etc. The custom all field {@link #ALL_FIELD_VALUES} - * must be set in the index settings. A custom all field is preferred over the usual - * '_all' field as most fields do not belong in '_all', disabling '_all' and - * using a custom all field simplifies the mapping. - * - * These fields are copied to the custom all field - *
    - *
  • by_field_value
  • - *
  • partition_field_value
  • - *
  • over_field_value
  • - *
  • AnomalyCause.correlated_by_field_value
  • - *
  • AnomalyCause.by_field_value
  • - *
  • AnomalyCause.partition_field_value
  • - *
  • AnomalyCause.over_field_value
  • - *
  • AnomalyRecord.Influencers.influencer_field_values
  • - *
  • Influencer.influencer_field_value
  • - *
- * - * @throws IOException On write error - */ - private static void addResultsMapping(XContentBuilder builder) throws IOException { - builder.startObject(Result.RESULT_TYPE.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Bucket.ANOMALY_SCORE.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(BucketInfluencer.RAW_ANOMALY_SCORE.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(Bucket.INITIAL_ANOMALY_SCORE.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(Result.IS_INTERIM.getPreferredName()) - .field(TYPE, BOOLEAN) - .endObject() - .startObject(Bucket.EVENT_COUNT.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(Bucket.BUCKET_SPAN.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(Bucket.PROCESSING_TIME_MS.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(Bucket.SCHEDULED_EVENTS.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - - .startObject(Bucket.BUCKET_INFLUENCERS.getPreferredName()) - .field(TYPE, NESTED) - .startObject(PROPERTIES) - .startObject(Job.ID.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Result.RESULT_TYPE.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(BucketInfluencer.INFLUENCER_FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(BucketInfluencer.INITIAL_ANOMALY_SCORE.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(BucketInfluencer.ANOMALY_SCORE.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(BucketInfluencer.RAW_ANOMALY_SCORE.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(BucketInfluencer.PROBABILITY.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(Result.TIMESTAMP.getPreferredName()) - .field(TYPE, DATE) - .endObject() - .startObject(BucketInfluencer.BUCKET_SPAN.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(Result.IS_INTERIM.getPreferredName()) - .field(TYPE, BOOLEAN) - .endObject() - .endObject() - .endObject() - - // Model Plot Output - .startObject(ModelPlot.MODEL_FEATURE.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(ModelPlot.MODEL_LOWER.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(ModelPlot.MODEL_UPPER.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(ModelPlot.MODEL_MEDIAN.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject(); - - addForecastFieldsToMapping(builder); - addAnomalyRecordFieldsToMapping(builder); - addInfluencerFieldsToMapping(builder); - addModelSizeStatsFieldsToMapping(builder); - } - - /** - * Generate a keyword mapping for {@code termFields} for the default type - * {@link org.elasticsearch.index.mapper.MapperService#SINGLE_MAPPING_NAME} - * - * If the returned mapping is used in index creation and the new index has a matching template - * then the mapping type ({@link org.elasticsearch.index.mapper.MapperService#SINGLE_MAPPING_NAME}) - * must match the mapping type of the template otherwise the mappings will not be merged correctly. - * - * @param termFields Fields to generate mapping for - * @return The mapping - */ - public static XContentBuilder termFieldsMapping(Collection termFields) { - try { - XContentBuilder builder = jsonBuilder().startObject(); - builder.startObject(SINGLE_MAPPING_NAME); - builder.startObject(PROPERTIES); - addTermFields(builder, termFields); - builder.endObject(); - builder.endObject(); - return builder.endObject(); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - private static void addTermFields(XContentBuilder builder, Collection termFields) throws IOException { - for (String fieldName : termFields) { - if (ReservedFieldNames.isValidFieldName(fieldName)) { - builder.startObject(fieldName).field(TYPE, KEYWORD).endObject(); - } - } - } - - private static void addForecastFieldsToMapping(XContentBuilder builder) throws IOException { - - // Forecast Output - builder.startObject(Forecast.FORECAST_LOWER.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(Forecast.FORECAST_UPPER.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(Forecast.FORECAST_PREDICTION.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(Forecast.FORECAST_ID.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject(); - - // Forecast Stats Output - // re-used: TIMESTAMP, PROCESSING_TIME_MS, PROCESSED_RECORD_COUNT, LATEST_RECORD_TIME - builder.startObject(ForecastRequestStats.START_TIME.getPreferredName()) - .field(TYPE, DATE) - .endObject() - .startObject(ForecastRequestStats.END_TIME.getPreferredName()) - .field(TYPE, DATE) - .endObject() - .startObject(ForecastRequestStats.CREATE_TIME.getPreferredName()) - .field(TYPE, DATE) - .endObject() - .startObject(ForecastRequestStats.EXPIRY_TIME.getPreferredName()) - .field(TYPE, DATE) - .endObject() - .startObject(ForecastRequestStats.MESSAGES.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(ForecastRequestStats.PROGRESS.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(ForecastRequestStats.STATUS.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(ForecastRequestStats.MEMORY_USAGE.getPreferredName()) - .field(TYPE, LONG) - .endObject(); - } - - /** - * AnomalyRecord fields to be added under the 'properties' section of the mapping - * @param builder Add properties to this builder - * @throws IOException On write error - */ - private static void addAnomalyRecordFieldsToMapping(XContentBuilder builder) throws IOException { - builder.startObject(Detector.DETECTOR_INDEX.getPreferredName()) - .field(TYPE, INTEGER) - .endObject() - .startObject(AnomalyRecord.ACTUAL.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(AnomalyRecord.TYPICAL.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(AnomalyRecord.PROBABILITY.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(AnomalyRecord.MULTI_BUCKET_IMPACT.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(AnomalyRecord.FUNCTION.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyRecord.FUNCTION_DESCRIPTION.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyRecord.BY_FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyRecord.BY_FIELD_VALUE.getPreferredName()) - .field(TYPE, KEYWORD) - .field(COPY_TO, ALL_FIELD_VALUES) - .endObject() - .startObject(AnomalyRecord.FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyRecord.PARTITION_FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName()) - .field(TYPE, KEYWORD) - .field(COPY_TO, ALL_FIELD_VALUES) - .endObject() - .startObject(AnomalyRecord.OVER_FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyRecord.OVER_FIELD_VALUE.getPreferredName()) - .field(TYPE, KEYWORD) - .field(COPY_TO, ALL_FIELD_VALUES) - .endObject() - .startObject(AnomalyRecord.RECORD_SCORE.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(AnomalyRecord.INITIAL_RECORD_SCORE.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(AnomalyRecord.CAUSES.getPreferredName()) - .field(TYPE, NESTED) - .startObject(PROPERTIES) - .startObject(AnomalyCause.ACTUAL.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(AnomalyCause.TYPICAL.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(AnomalyCause.PROBABILITY.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(AnomalyCause.FUNCTION.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyCause.FUNCTION_DESCRIPTION.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyCause.BY_FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyCause.BY_FIELD_VALUE.getPreferredName()) - .field(TYPE, KEYWORD) - .field(COPY_TO, ALL_FIELD_VALUES) - .endObject() - .startObject(AnomalyCause.CORRELATED_BY_FIELD_VALUE.getPreferredName()) - .field(TYPE, KEYWORD) - .field(COPY_TO, ALL_FIELD_VALUES) - .endObject() - .startObject(AnomalyCause.FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyCause.PARTITION_FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyCause.PARTITION_FIELD_VALUE.getPreferredName()) - .field(TYPE, KEYWORD) - .field(COPY_TO, ALL_FIELD_VALUES) - .endObject() - .startObject(AnomalyCause.OVER_FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyCause.OVER_FIELD_VALUE.getPreferredName()) - .field(TYPE, KEYWORD) - .field(COPY_TO, ALL_FIELD_VALUES) - .endObject() - .startObject(AnomalyCause.GEO_RESULTS.getPreferredName()) - .startObject(PROPERTIES) - .startObject(GeoResults.ACTUAL_POINT.getPreferredName()) - .field(TYPE, GEO_POINT) - .endObject() - .startObject(GeoResults.TYPICAL_POINT.getPreferredName()) - .field(TYPE, GEO_POINT) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .startObject(AnomalyRecord.INFLUENCERS.getPreferredName()) - /* Array of influences */ - .field(TYPE, NESTED) - .startObject(PROPERTIES) - .startObject(Influence.INFLUENCER_FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Influence.INFLUENCER_FIELD_VALUES.getPreferredName()) - .field(TYPE, KEYWORD) - .field(COPY_TO, ALL_FIELD_VALUES) - .endObject() - .endObject() - .endObject() - .startObject(AnomalyRecord.GEO_RESULTS.getPreferredName()) - .startObject(PROPERTIES) - .startObject(GeoResults.ACTUAL_POINT.getPreferredName()) - .field(TYPE, GEO_POINT) - .endObject() - .startObject(GeoResults.TYPICAL_POINT.getPreferredName()) - .field(TYPE, GEO_POINT) - .endObject() - .endObject() - .endObject(); - } - - private static void addInfluencerFieldsToMapping(XContentBuilder builder) throws IOException { - builder.startObject(Influencer.INFLUENCER_SCORE.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(Influencer.INITIAL_INFLUENCER_SCORE.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(Influencer.INFLUENCER_FIELD_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Influencer.INFLUENCER_FIELD_VALUE.getPreferredName()) - .field(TYPE, KEYWORD) - .field(COPY_TO, ALL_FIELD_VALUES) - .endObject(); - } - - /** - * {@link DataCounts} mapping. - * - * @throws IOException On builder write error - */ - private static void addDataCountsMapping(XContentBuilder builder) throws IOException { - builder.startObject(DataCounts.PROCESSED_RECORD_COUNT.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataCounts.PROCESSED_FIELD_COUNT.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataCounts.INPUT_BYTES.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataCounts.INPUT_RECORD_COUNT.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataCounts.INPUT_FIELD_COUNT.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataCounts.INVALID_DATE_COUNT.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataCounts.MISSING_FIELD_COUNT.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataCounts.OUT_OF_ORDER_TIME_COUNT.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataCounts.EMPTY_BUCKET_COUNT.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataCounts.SPARSE_BUCKET_COUNT.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataCounts.BUCKET_COUNT.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataCounts.EARLIEST_RECORD_TIME.getPreferredName()) - .field(TYPE, DATE) - .endObject() - .startObject(DataCounts.LATEST_RECORD_TIME.getPreferredName()) - .field(TYPE, DATE) - .endObject() - .startObject(DataCounts.LATEST_EMPTY_BUCKET_TIME.getPreferredName()) - .field(TYPE, DATE) - .endObject() - .startObject(DataCounts.LATEST_SPARSE_BUCKET_TIME.getPreferredName()) - .field(TYPE, DATE) - .endObject() - .startObject(DataCounts.LAST_DATA_TIME.getPreferredName()) - .field(TYPE, DATE) - .endObject(); - } - - /** - * {@link TimingStats} mapping. - * Does not include mapping for BUCKET_COUNT as this mapping is added by {@link #addDataCountsMapping} method. - * - * @throws IOException On builder write error - */ - private static void addTimingStatsExceptBucketCountMapping(XContentBuilder builder) throws IOException { - builder - // re-used: BUCKET_COUNT - .startObject(TimingStats.MIN_BUCKET_PROCESSING_TIME_MS.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(TimingStats.MAX_BUCKET_PROCESSING_TIME_MS.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(TimingStats.AVG_BUCKET_PROCESSING_TIME_MS.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(TimingStats.EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_MS.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(TimingStats.EXPONENTIAL_AVG_CALCULATION_CONTEXT.getPreferredName()) - .startObject(PROPERTIES) - .startObject(ExponentialAverageCalculationContext.INCREMENTAL_METRIC_VALUE_MS.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .startObject(ExponentialAverageCalculationContext.LATEST_TIMESTAMP.getPreferredName()) - .field(TYPE, DATE) - .endObject() - .startObject(ExponentialAverageCalculationContext.PREVIOUS_EXPONENTIAL_AVERAGE_MS.getPreferredName()) - .field(TYPE, DOUBLE) - .endObject() - .endObject() - .endObject(); - } - - /** - * {@link DatafeedTimingStats} mapping. - * Does not include mapping for BUCKET_COUNT as this mapping is added by {@link #addDataCountsMapping} method. - * Does not include mapping for EXPONENTIAL_AVG_CALCULATION_CONTEXT as this mapping is added by - * {@link #addTimingStatsExceptBucketCountMapping} method. - * - * @throws IOException On builder write error - */ - private static void addDatafeedTimingStats(XContentBuilder builder) throws IOException { - builder - .startObject(DatafeedTimingStats.SEARCH_COUNT.getPreferredName()) - .field(TYPE, LONG) - .endObject() - // re-used: BUCKET_COUNT - .startObject(DatafeedTimingStats.TOTAL_SEARCH_TIME_MS.getPreferredName()) - .field(TYPE, DOUBLE) - // re-used: EXPONENTIAL_AVG_CALCULATION_CONTEXT - .endObject(); - } - - /** - * Create the Elasticsearch mapping for {@linkplain CategoryDefinition}. - * The '_all' field is disabled as the document isn't meant to be searched. - * - * @throws IOException On builder error - */ - private static void addCategoryDefinitionMapping(XContentBuilder builder) throws IOException { - builder.startObject(CategoryDefinition.CATEGORY_ID.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(CategoryDefinition.TERMS.getPreferredName()) - .field(TYPE, TEXT) - .endObject() - .startObject(CategoryDefinition.REGEX.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(CategoryDefinition.MAX_MATCHING_LENGTH.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(CategoryDefinition.EXAMPLES.getPreferredName()) - .field(TYPE, TEXT) - .endObject(); - } - - /** - * Create the Elasticsearch mapping for state. State could potentially be - * huge (target document size is 16MB and there can be many documents) so all - * analysis by Elasticsearch is disabled. The only way to retrieve state is - * by knowing the ID of a particular document. - */ - public static XContentBuilder stateMapping() throws IOException { - XContentBuilder builder = jsonBuilder(); - builder.startObject(); - builder.startObject(SINGLE_MAPPING_NAME); - addMetaInformation(builder); - builder.field(ENABLED, false); - builder.endObject(); - builder.endObject(); - - return builder; - } - - /** - * Create the Elasticsearch mapping for {@linkplain ModelSnapshot}. - * The '_all' field is disabled but the type is searchable - */ - private static void addModelSnapshotMapping(XContentBuilder builder) throws IOException { - builder.startObject(ModelSnapshot.DESCRIPTION.getPreferredName()) - .field(TYPE, TEXT) - .endObject() - .startObject(ModelSnapshotField.SNAPSHOT_ID.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(ModelSnapshot.SNAPSHOT_DOC_COUNT.getPreferredName()) - .field(TYPE, INTEGER) - .endObject() - .startObject(ModelSnapshot.RETAIN.getPreferredName()) - .field(TYPE, BOOLEAN) - .endObject() - .startObject(ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName()) - .startObject(PROPERTIES) - .startObject(Job.ID.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(Result.RESULT_TYPE.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(ModelSizeStats.TIMESTAMP_FIELD.getPreferredName()) - .field(TYPE, DATE) - .endObject(); - - addModelSizeStatsFieldsToMapping(builder); - - // end model size stats properties - builder.endObject(); - // end model size stats mapping - builder.endObject(); - - builder.startObject(ModelSnapshot.QUANTILES.getPreferredName()) - .field(ENABLED, false) - .endObject().startObject(ModelSnapshot.MIN_VERSION.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(ModelSnapshot.LATEST_RECORD_TIME.getPreferredName()) - .field(TYPE, DATE) - .endObject() - .startObject(ModelSnapshot.LATEST_RESULT_TIME.getPreferredName()) - .field(TYPE, DATE) - .endObject(); - } - - /** - * {@link ModelSizeStats} fields to be added under the 'properties' section of the mapping - * @param builder Add properties to this builder - * @throws IOException On write error - */ - private static void addModelSizeStatsFieldsToMapping(XContentBuilder builder) throws IOException { - builder.startObject(ModelSizeStats.MODEL_BYTES_FIELD.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(ModelSizeStats.TOTAL_BY_FIELD_COUNT_FIELD.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(ModelSizeStats.TOTAL_OVER_FIELD_COUNT_FIELD.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(ModelSizeStats.TOTAL_PARTITION_FIELD_COUNT_FIELD.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(ModelSizeStats.BUCKET_ALLOCATION_FAILURES_COUNT_FIELD.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(ModelSizeStats.MEMORY_STATUS_FIELD.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(ModelSizeStats.LOG_TIME_FIELD.getPreferredName()) - .field(TYPE, DATE) - .endObject(); - } - - public static XContentBuilder auditMessageMapping() throws IOException { - XContentBuilder builder = jsonBuilder().startObject(); - builder.startObject(SINGLE_MAPPING_NAME); - addMetaInformation(builder); - builder.field(DYNAMIC, "false"); - builder.startObject(PROPERTIES) - .startObject(Job.ID.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyDetectionAuditMessage.LEVEL.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyDetectionAuditMessage.MESSAGE.getPreferredName()) - .field(TYPE, TEXT) - .startObject(FIELDS) - .startObject(RAW) - .field(TYPE, KEYWORD) - .endObject() - .endObject() - .endObject() - .startObject(AnomalyDetectionAuditMessage.TIMESTAMP.getPreferredName()) - .field(TYPE, DATE) - .endObject() - .startObject(AnomalyDetectionAuditMessage.NODE_NAME.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .startObject(AnomalyDetectionAuditMessage.JOB_TYPE.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() - .endObject() - .endObject() - .endObject(); - - return builder; - } - static String[] mappingRequiresUpdate(ClusterState state, String[] concreteIndices, Version minVersion) throws IOException { List indicesToUpdate = new ArrayList<>(); @@ -1266,7 +146,7 @@ public class ElasticsearchMappings { } public static void addDocMappingIfMissing(String alias, - CheckedFunction mappingSupplier, + CheckedFunction mappingSupplier, Client client, ClusterState state, ActionListener listener) { AliasOrIndex aliasOrIndex = state.metaData().getAliasAndIndexLookup().get(alias); if (aliasOrIndex == null) { @@ -1290,10 +170,11 @@ public class ElasticsearchMappings { IndexMetaData indexMetaData = state.metaData().index(indicesThatRequireAnUpdate[0]); String mappingType = indexMetaData.mapping().type(); - try (XContentBuilder mapping = mappingSupplier.apply(mappingType)) { + try { + String mapping = mappingSupplier.apply(mappingType); PutMappingRequest putMappingRequest = new PutMappingRequest(indicesThatRequireAnUpdate); putMappingRequest.type(mappingType); - putMappingRequest.source(mapping); + putMappingRequest.source(mapping, XContentType.JSON); executeAsyncWithOrigin(client, ML_ORIGIN, PutMappingAction.INSTANCE, putMappingRequest, ActionListener.wrap(response -> { if (response.isAcknowledged()) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java index 23075b2b9df..890ecbb87e5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java @@ -234,6 +234,7 @@ public final class ReservedFieldNames { Job.MODEL_SNAPSHOT_ID.getPreferredName(), Job.MODEL_SNAPSHOT_MIN_VERSION.getPreferredName(), Job.RESULTS_INDEX_NAME.getPreferredName(), + Job.ALLOW_LAZY_OPEN.getPreferredName(), AnalysisConfig.BUCKET_SPAN.getPreferredName(), AnalysisConfig.CATEGORIZATION_FIELD_NAME.getPreferredName(), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/AuditorField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java similarity index 83% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/AuditorField.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java index 307ff01fa45..3535d33b8c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/AuditorField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.core.ml.notifications; -public final class AuditorField { +public final class NotificationsIndex { public static final String NOTIFICATIONS_INDEX = ".ml-notifications-000001"; - private AuditorField() {} + private NotificationsIndex() {} } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/exporter/MonitoringTemplateUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/exporter/MonitoringTemplateUtils.java index 9d4a941a24c..181d3fea2f4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/exporter/MonitoringTemplateUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/exporter/MonitoringTemplateUtils.java @@ -17,12 +17,11 @@ import org.elasticsearch.xpack.core.template.TemplateUtils; import java.io.IOException; import java.time.Instant; import java.util.Locale; -import java.util.regex.Pattern; public final class MonitoringTemplateUtils { private static final String TEMPLATE_FILE = "/monitoring-%s.json"; - private static final String TEMPLATE_VERSION_PROPERTY = Pattern.quote("${monitoring.template.version}"); + private static final String TEMPLATE_VERSION_PROPERTY = "monitoring.template.version"; /** * The last version of X-Pack that updated the templates and pipelines. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index d8265601085..a6744dc9df7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -113,7 +113,9 @@ public class ReservedRolesStore implements BiConsumer, ActionListene .put(KibanaUser.ROLE_NAME, new RoleDescriptor(KibanaUser.ROLE_NAME, new String[] { "monitor", "manage_index_templates", MonitoringBulkAction.NAME, "manage_saml", "manage_token", "manage_oidc", - GetBuiltinPrivilegesAction.NAME, "delegate_pki", GetLifecycleAction.NAME, PutLifecycleAction.NAME + GetBuiltinPrivilegesAction.NAME, "delegate_pki", GetLifecycleAction.NAME, PutLifecycleAction.NAME, + // The symbolic constant for this one is in SecurityActionMapper, so not accessible from X-Pack core + "cluster:admin/analyze" }, new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java index 6d1c7f087f9..74dc7ac3158 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.core.ssl.rest; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -30,9 +28,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; */ public class RestGetCertificateInfoAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetCertificateInfoAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -40,7 +35,7 @@ public class RestGetCertificateInfoAction extends BaseRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(GET, "/_ssl/certificates", GET, "/_xpack/ssl/certificates", deprecationLogger)); + return singletonList(new ReplacedRoute(GET, "/_ssl/certificates", GET, "/_xpack/ssl/certificates")); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateConfig.java index 4565f7a93b1..315ea79af40 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateConfig.java @@ -7,6 +7,9 @@ package org.elasticsearch.xpack.core.template; import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; import java.util.regex.Pattern; /** @@ -18,6 +21,7 @@ public class IndexTemplateConfig { private final String fileName; private final int version; private final String versionProperty; + private final Map variables; /** * Describes a template to be loaded from a resource file. Includes handling for substituting a version property into the template. @@ -38,10 +42,33 @@ public class IndexTemplateConfig { * @param versionProperty The property that will be replaced with the {@code version} string as described above. */ public IndexTemplateConfig(String templateName, String fileName, int version, String versionProperty) { + this(templateName, fileName, version, versionProperty, Collections.emptyMap()); + } + + /** + * Describes a template to be loaded from a resource file. Includes handling for substituting a version property into the template. + * + * The {@code versionProperty} parameter will be used to substitute the value of {@code version} into the template. For example, + * this template: + * {@code {"myTemplateVersion": "${my.version.property}"}} + * With {@code version = "42"; versionProperty = "my.version.property"} will result in {@code {"myTemplateVersion": "42"}}. + * + * @param templateName The name that will be used for the index template. Literal, include the version in this string if + * it should be used. + * @param fileName The filename the template should be loaded from. Literal, should include leading {@literal /} and + * extension if necessary. + * @param version The version of the template. Substituted for {@code versionProperty} as described above. + * @param versionProperty The property that will be replaced with the {@code version} string as described above. + * @param variables A map of additional variable substitutions. The map's keys are the variable names. + * The corresponding values will replace the variable names. + */ + public IndexTemplateConfig(String templateName, String fileName, int version, String versionProperty, Map variables) + { this.templateName = templateName; this.fileName = fileName; this.version = version; this.versionProperty = versionProperty; + this.variables = Objects.requireNonNull(variables); } public String getFileName() { @@ -61,8 +88,7 @@ public class IndexTemplateConfig { * @return The template as a UTF-8 byte array. */ public byte[] loadBytes() { - final String versionPattern = Pattern.quote("${" + versionProperty + "}"); - String template = TemplateUtils.loadTemplate(fileName, Integer.toString(version), versionPattern); + String template = TemplateUtils.loadTemplate(fileName, Integer.toString(version), versionProperty, variables); assert template != null && template.length() > 0; assert Pattern.compile("\"version\"\\s*:\\s*" + version).matcher(template).find() : "index template must have a version property set to the given version property"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java index f5d95204a79..79802d08ee4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java @@ -119,6 +119,12 @@ public abstract class IndexTemplateRegistry implements ClusterStateListener { return; } + // This registry requires to run on a master node. + // If not a master node, exit. + if (requiresMasterNode() && state.nodes().isLocalNodeElectedMaster() == false) { + return; + } + // if this node is newer than the master node, we probably need to add the template, which might be newer than the // template the master node has, so we need potentially add new templates despite being not the master node DiscoveryNode localNode = event.state().getNodes().getLocalNode(); @@ -130,6 +136,15 @@ public abstract class IndexTemplateRegistry implements ClusterStateListener { } } + /** + * Whether the registry should only apply changes when running on the master node. + * This is useful for plugins where certain actions are performed on master nodes + * and the templates should match the respective version. + */ + protected boolean requiresMasterNode() { + return false; + } + private void addTemplatesIfMissing(ClusterState state) { final List indexTemplates = getTemplateConfigs(); for (IndexTemplateConfig newTemplate : indexTemplates) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/TemplateUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/TemplateUtils.java index 3c7e9cdfb90..57e0cce1d1e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/TemplateUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/TemplateUtils.java @@ -11,11 +11,11 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.compress.NotXContentException; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -23,8 +23,10 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; import java.io.IOException; +import java.util.Collections; import java.util.Map; import java.util.function.Predicate; import java.util.regex.Pattern; @@ -57,12 +59,18 @@ public class TemplateUtils { * Loads a built-in template and returns its source. */ public static String loadTemplate(String resource, String version, String versionProperty) { - try { - BytesReference source = load(resource); - final String filteredJson = filter(source, version, versionProperty); - validate(new BytesArray(filteredJson)); + return loadTemplate(resource, version, versionProperty, Collections.emptyMap()); + } - return filteredJson; + /** + * Loads a built-in template and returns its source after replacing given variables. + */ + public static String loadTemplate(String resource, String version, String versionProperty, Map variables) { + try { + String source = load(resource); + source = replaceVariables(source, version, versionProperty, variables); + validate(source); + return source; } catch (Exception e) { throw new IllegalArgumentException("Unable to load template [" + resource + "]", e); } @@ -71,34 +79,43 @@ public class TemplateUtils { /** * Loads a resource from the classpath and returns it as a {@link BytesReference} */ - public static BytesReference load(String name) throws IOException { - return Streams.readFully(TemplateUtils.class.getResourceAsStream(name)); + public static String load(String name) throws IOException { + return Streams.readFully(TemplateUtils.class.getResourceAsStream(name)).utf8ToString(); } /** * Parses and validates that the source is not empty. */ - public static void validate(BytesReference source) { + public static void validate(String source) { if (source == null) { throw new ElasticsearchParseException("Template must not be null"); } + if (Strings.isEmpty(source)) { + throw new ElasticsearchParseException("Template must not be empty"); + } try { - XContentHelper.convertToMap(source, false, XContentType.JSON).v2(); - } catch (NotXContentException e) { - throw new ElasticsearchParseException("Template must not be empty"); + XContentHelper.convertToMap(JsonXContent.jsonXContent, source, false); } catch (Exception e) { throw new ElasticsearchParseException("Invalid template", e); } } + private static String replaceVariables(String input, String version, String versionProperty, Map variables) { + String template = replaceVariable(input, versionProperty, version); + for (Map.Entry variable : variables.entrySet()) { + template = replaceVariable(template, variable.getKey(), variable.getValue()); + } + return template; + } + /** - * Filters the source: replaces any template version property with the version number + * Replaces all occurences of given variable with the value */ - public static String filter(BytesReference source, String version, String versionProperty) { - return Pattern.compile(versionProperty) - .matcher(source.utf8ToString()) - .replaceAll(version); + public static String replaceVariable(String input, String variable, String value) { + return Pattern.compile("${" + variable + "}", Pattern.LITERAL) + .matcher(input) + .replaceAll(value); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformIndexerStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformIndexerStats.java index 7a513bc4d5f..452ca902056 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformIndexerStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformIndexerStats.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.core.transform.transforms; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -36,23 +37,34 @@ public class TransformIndexerStats extends IndexerJobStats { public static ParseField SEARCH_TOTAL = new ParseField("search_total"); public static ParseField SEARCH_FAILURES = new ParseField("search_failures"); public static ParseField INDEX_FAILURES = new ParseField("index_failures"); - public static ParseField EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS = - new ParseField("exponential_avg_checkpoint_duration_ms"); - public static ParseField EXPONENTIAL_AVG_DOCUMENTS_INDEXED = - new ParseField("exponential_avg_documents_indexed"); - public static ParseField EXPONENTIAL_AVG_DOCUMENTS_PROCESSED = - new ParseField("exponential_avg_documents_processed"); + public static ParseField EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS = new ParseField("exponential_avg_checkpoint_duration_ms"); + public static ParseField EXPONENTIAL_AVG_DOCUMENTS_INDEXED = new ParseField("exponential_avg_documents_indexed"); + public static ParseField EXPONENTIAL_AVG_DOCUMENTS_PROCESSED = new ParseField("exponential_avg_documents_processed"); // This changes how much "weight" past calculations have. // The shorter the window, the less "smoothing" will occur. private static final int EXP_AVG_WINDOW = 10; - private static final double ALPHA = 2.0/(EXP_AVG_WINDOW + 1); + private static final double ALPHA = 2.0 / (EXP_AVG_WINDOW + 1); private static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( - NAME, true, - args -> new TransformIndexerStats( - (long) args[0], (long) args[1], (long) args[2], (long) args[3], (long) args[4], (long) args[5], (long) args[6], - (long) args[7], (long) args[8], (long) args[9], (Double) args[10], (Double) args[11], (Double) args[12])); + NAME, + true, + args -> new TransformIndexerStats( + (long) args[0], + (long) args[1], + (long) args[2], + (long) args[3], + (long) args[4], + (long) args[5], + (long) args[6], + (long) args[7], + (long) args[8], + (long) args[9], + (Double) args[10], + (Double) args[11], + (Double) args[12] + ) + ); static { LENIENT_PARSER.declareLong(constructorArg(), NUM_PAGES); @@ -73,6 +85,7 @@ public class TransformIndexerStats extends IndexerJobStats { private double expAvgCheckpointDurationMs; private double expAvgDocumentsIndexed; private double expAvgDocumentsProcessed; + /** * Create with all stats set to zero */ @@ -80,30 +93,54 @@ public class TransformIndexerStats extends IndexerJobStats { super(); } - public TransformIndexerStats(long numPages, long numInputDocuments, long numOutputDocuments, - long numInvocations, long indexTime, long searchTime, long indexTotal, long searchTotal, - long indexFailures, long searchFailures, Double expAvgCheckpointDurationMs, - Double expAvgDocumentsIndexed, Double expAvgDocumentsProcessed ) { - super(numPages, numInputDocuments, numOutputDocuments, numInvocations, indexTime, searchTime, indexTotal, searchTotal, - indexFailures, searchFailures); + public TransformIndexerStats( + long numPages, + long numInputDocuments, + long numOutputDocuments, + long numInvocations, + long indexTime, + long searchTime, + long indexTotal, + long searchTotal, + long indexFailures, + long searchFailures, + Double expAvgCheckpointDurationMs, + Double expAvgDocumentsIndexed, + Double expAvgDocumentsProcessed + ) { + super( + numPages, + numInputDocuments, + numOutputDocuments, + numInvocations, + indexTime, + searchTime, + indexTotal, + searchTotal, + indexFailures, + searchFailures + ); this.expAvgCheckpointDurationMs = expAvgCheckpointDurationMs == null ? 0.0 : expAvgCheckpointDurationMs; this.expAvgDocumentsIndexed = expAvgDocumentsIndexed == null ? 0.0 : expAvgDocumentsIndexed; this.expAvgDocumentsProcessed = expAvgDocumentsProcessed == null ? 0.0 : expAvgDocumentsProcessed; } - public TransformIndexerStats(long numPages, long numInputDocuments, long numOutputDocuments, - long numInvocations, long indexTime, long searchTime, long indexTotal, long searchTotal, - long indexFailures, long searchFailures) { - this(numPages, numInputDocuments, numOutputDocuments, numInvocations, indexTime, searchTime, indexTotal, searchTotal, - indexFailures, searchFailures, 0.0, 0.0, 0.0); - } - public TransformIndexerStats(TransformIndexerStats other) { - this(other.numPages, other.numInputDocuments, other.numOuputDocuments, other.numInvocations, - other.indexTime, other.searchTime, other.indexTotal, other.searchTotal, other.indexFailures, other.searchFailures); - this.expAvgCheckpointDurationMs = other.expAvgCheckpointDurationMs; - this.expAvgDocumentsIndexed = other.expAvgDocumentsIndexed; - this.expAvgDocumentsProcessed = other.expAvgDocumentsProcessed; + this( + other.numPages, + other.numInputDocuments, + other.numOuputDocuments, + other.numInvocations, + other.indexTime, + other.searchTime, + other.indexTotal, + other.searchTotal, + other.indexFailures, + other.searchFailures, + other.expAvgCheckpointDurationMs, + other.expAvgDocumentsIndexed, + other.expAvgDocumentsProcessed + ); } public TransformIndexerStats(StreamInput in) throws IOException { @@ -180,7 +217,7 @@ public class TransformIndexerStats extends IndexerJobStats { } private double calculateExpAvg(double previousExpValue, double alpha, long observedValue) { - return alpha * observedValue + (1-alpha) * previousExpValue; + return alpha * observedValue + (1 - alpha) * previousExpValue; } @Override @@ -212,9 +249,26 @@ public class TransformIndexerStats extends IndexerJobStats { @Override public int hashCode() { - return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations, - indexTime, searchTime, indexFailures, searchFailures, indexTotal, searchTotal, - expAvgCheckpointDurationMs, expAvgDocumentsIndexed, expAvgDocumentsProcessed); + return Objects.hash( + numPages, + numInputDocuments, + numOuputDocuments, + numInvocations, + indexTime, + searchTime, + indexFailures, + searchFailures, + indexTotal, + searchTotal, + expAvgCheckpointDurationMs, + expAvgDocumentsIndexed, + expAvgDocumentsProcessed + ); + } + + @Override + public String toString() { + return Strings.toString(this); } public static TransformIndexerStats fromXContent(XContentParser parser) { diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/annotations_index_mappings.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/annotations_index_mappings.json new file mode 100644 index 00000000000..41c26b0f83d --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/annotations_index_mappings.json @@ -0,0 +1,36 @@ +{ + "_doc": { + "_meta" : { + "version" : "${xpack.ml.version}" + }, + "properties" : { + "annotation" : { + "type" : "text" + }, + "create_time" : { + "type" : "date" + }, + "create_username" : { + "type" : "keyword" + }, + "end_timestamp" : { + "type" : "date" + }, + "job_id" : { + "type" : "keyword" + }, + "modified_time" : { + "type" : "date" + }, + "modified_username" : { + "type" : "keyword" + }, + "timestamp" : { + "type" : "date" + }, + "type" : { + "type" : "keyword" + } + } + } +} diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/results_index_mappings.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/results_index_mappings.json new file mode 100644 index 00000000000..8ff990d2e50 --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/results_index_mappings.json @@ -0,0 +1,478 @@ +{ + "${xpack.ml.mapping_type}" : { + "_meta" : { + "version" : "${xpack.ml.version}" + }, + "dynamic_templates" : [ + { + "strings_as_keywords" : { + "match" : "*", + "mapping" : { + "type" : "keyword" + } + } + } + ], + "properties" : { + "actual" : { + "type" : "double" + }, + "all_field_values" : { + "type" : "text", + "analyzer" : "whitespace" + }, + "anomaly_score" : { + "type" : "double" + }, + "average_bucket_processing_time_ms" : { + "type" : "double" + }, + "bucket_allocation_failures_count" : { + "type" : "long" + }, + "bucket_count" : { + "type" : "long" + }, + "bucket_influencers" : { + "type" : "nested", + "properties" : { + "anomaly_score" : { + "type" : "double" + }, + "bucket_span" : { + "type" : "long" + }, + "influencer_field_name" : { + "type" : "keyword" + }, + "initial_anomaly_score" : { + "type" : "double" + }, + "is_interim" : { + "type" : "boolean" + }, + "job_id" : { + "type" : "keyword" + }, + "probability" : { + "type" : "double" + }, + "raw_anomaly_score" : { + "type" : "double" + }, + "result_type" : { + "type" : "keyword" + }, + "timestamp" : { + "type" : "date" + } + } + }, + "bucket_span" : { + "type" : "long" + }, + "by_field_name" : { + "type" : "keyword" + }, + "by_field_value" : { + "type" : "keyword", + "copy_to" : [ + "all_field_values" + ] + }, + "category_id" : { + "type" : "long" + }, + "causes" : { + "type" : "nested", + "properties" : { + "actual" : { + "type" : "double" + }, + "by_field_name" : { + "type" : "keyword" + }, + "by_field_value" : { + "type" : "keyword", + "copy_to" : [ + "all_field_values" + ] + }, + "correlated_by_field_value" : { + "type" : "keyword", + "copy_to" : [ + "all_field_values" + ] + }, + "field_name" : { + "type" : "keyword" + }, + "function" : { + "type" : "keyword" + }, + "function_description" : { + "type" : "keyword" + }, + "geo_results" : { + "properties" : { + "actual_point" : { + "type" : "geo_point" + }, + "typical_point" : { + "type" : "geo_point" + } + } + }, + "over_field_name" : { + "type" : "keyword" + }, + "over_field_value" : { + "type" : "keyword", + "copy_to" : [ + "all_field_values" + ] + }, + "partition_field_name" : { + "type" : "keyword" + }, + "partition_field_value" : { + "type" : "keyword", + "copy_to" : [ + "all_field_values" + ] + }, + "probability" : { + "type" : "double" + }, + "typical" : { + "type" : "double" + } + } + }, + "description" : { + "type" : "text" + }, + "detector_index" : { + "type" : "integer" + }, + "earliest_record_timestamp" : { + "type" : "date" + }, + "empty_bucket_count" : { + "type" : "long" + }, + "event_count" : { + "type" : "long" + }, + "examples" : { + "type" : "text" + }, + "exponential_average_bucket_processing_time_ms" : { + "type" : "double" + }, + "exponential_average_calculation_context" : { + "properties" : { + "incremental_metric_value_ms" : { + "type" : "double" + }, + "latest_timestamp" : { + "type" : "date" + }, + "previous_exponential_average_ms" : { + "type" : "double" + } + } + }, + "field_name" : { + "type" : "keyword" + }, + "forecast_create_timestamp" : { + "type" : "date" + }, + "forecast_end_timestamp" : { + "type" : "date" + }, + "forecast_expiry_timestamp" : { + "type" : "date" + }, + "forecast_id" : { + "type" : "keyword" + }, + "forecast_lower" : { + "type" : "double" + }, + "forecast_memory_bytes" : { + "type" : "long" + }, + "forecast_messages" : { + "type" : "keyword" + }, + "forecast_prediction" : { + "type" : "double" + }, + "forecast_progress" : { + "type" : "double" + }, + "forecast_start_timestamp" : { + "type" : "date" + }, + "forecast_status" : { + "type" : "keyword" + }, + "forecast_upper" : { + "type" : "double" + }, + "function" : { + "type" : "keyword" + }, + "function_description" : { + "type" : "keyword" + }, + "geo_results" : { + "properties" : { + "actual_point" : { + "type" : "geo_point" + }, + "typical_point" : { + "type" : "geo_point" + } + } + }, + "influencer_field_name" : { + "type" : "keyword" + }, + "influencer_field_value" : { + "type" : "keyword", + "copy_to" : [ + "all_field_values" + ] + }, + "influencer_score" : { + "type" : "double" + }, + "influencers" : { + "type" : "nested", + "properties" : { + "influencer_field_name" : { + "type" : "keyword" + }, + "influencer_field_values" : { + "type" : "keyword", + "copy_to" : [ + "all_field_values" + ] + } + } + }, + "initial_anomaly_score" : { + "type" : "double" + }, + "initial_influencer_score" : { + "type" : "double" + }, + "initial_record_score" : { + "type" : "double" + }, + "input_bytes" : { + "type" : "long" + }, + "input_field_count" : { + "type" : "long" + }, + "input_record_count" : { + "type" : "long" + }, + "invalid_date_count" : { + "type" : "long" + }, + "is_interim" : { + "type" : "boolean" + }, + "job_id" : { + "type" : "keyword", + "copy_to" : [ + "all_field_values" + ] + }, + "last_data_time" : { + "type" : "date" + }, + "latest_empty_bucket_timestamp" : { + "type" : "date" + }, + "latest_record_time_stamp" : { + "type" : "date" + }, + "latest_record_timestamp" : { + "type" : "date" + }, + "latest_result_time_stamp" : { + "type" : "date" + }, + "latest_sparse_bucket_timestamp" : { + "type" : "date" + }, + "log_time" : { + "type" : "date" + }, + "max_matching_length" : { + "type" : "long" + }, + "maximum_bucket_processing_time_ms" : { + "type" : "double" + }, + "memory_status" : { + "type" : "keyword" + }, + "min_version" : { + "type" : "keyword" + }, + "minimum_bucket_processing_time_ms" : { + "type" : "double" + }, + "missing_field_count" : { + "type" : "long" + }, + "model_bytes" : { + "type" : "long" + }, + "model_feature" : { + "type" : "keyword" + }, + "model_lower" : { + "type" : "double" + }, + "model_median" : { + "type" : "double" + }, + "model_size_stats" : { + "properties" : { + "bucket_allocation_failures_count" : { + "type" : "long" + }, + "job_id" : { + "type" : "keyword" + }, + "log_time" : { + "type" : "date" + }, + "memory_status" : { + "type" : "keyword" + }, + "model_bytes" : { + "type" : "long" + }, + "result_type" : { + "type" : "keyword" + }, + "timestamp" : { + "type" : "date" + }, + "total_by_field_count" : { + "type" : "long" + }, + "total_over_field_count" : { + "type" : "long" + }, + "total_partition_field_count" : { + "type" : "long" + } + } + }, + "model_upper" : { + "type" : "double" + }, + "multi_bucket_impact" : { + "type" : "double" + }, + "out_of_order_timestamp_count" : { + "type" : "long" + }, + "over_field_name" : { + "type" : "keyword" + }, + "over_field_value" : { + "type" : "keyword", + "copy_to" : [ + "all_field_values" + ] + }, + "partition_field_name" : { + "type" : "keyword" + }, + "partition_field_value" : { + "type" : "keyword", + "copy_to" : [ + "all_field_values" + ] + }, + "probability" : { + "type" : "double" + }, + "processed_field_count" : { + "type" : "long" + }, + "processed_record_count" : { + "type" : "long" + }, + "processing_time_ms" : { + "type" : "long" + }, + "quantiles" : { + "type" : "object", + "enabled" : false + }, + "raw_anomaly_score" : { + "type" : "double" + }, + "record_score" : { + "type" : "double" + }, + "regex" : { + "type" : "keyword" + }, + "result_type" : { + "type" : "keyword" + }, + "retain" : { + "type" : "boolean" + }, + "scheduled_events" : { + "type" : "keyword" + }, + "search_count" : { + "type" : "long" + }, + "snapshot_doc_count" : { + "type" : "integer" + }, + "snapshot_id" : { + "type" : "keyword" + }, + "sparse_bucket_count" : { + "type" : "long" + }, + "terms" : { + "type" : "text" + }, + "timestamp" : { + "type" : "date" + }, + "total_by_field_count" : { + "type" : "long" + }, + "total_over_field_count" : { + "type" : "long" + }, + "total_partition_field_count" : { + "type" : "long" + }, + "total_search_time_ms" : { + "type" : "double" + }, + "typical" : { + "type" : "double" + } + } + } +} diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/results_index_template.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/results_index_template.json new file mode 100644 index 00000000000..37a759cba73 --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/results_index_template.json @@ -0,0 +1,19 @@ +{ + "order" : 0, + "version" : ${xpack.ml.version.id}, + "index_patterns" : [ + ".ml-anomalies-*" + ], + "settings" : { + "index" : { + "translog" : { + "durability" : "async" + }, + "auto_expand_replicas" : "0-1", + "query" : { + "default_field" : "all_field_values" + } + } + }, + "mappings": ${xpack.ml.anomalydetection.results.mappings} +} diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/state_index_template.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/state_index_template.json new file mode 100644 index 00000000000..39211ae5b20 --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/state_index_template.json @@ -0,0 +1,21 @@ +{ + "order" : 0, + "version" : ${xpack.ml.version.id}, + "index_patterns" : [ + ".ml-state*" + ], + "settings" : { + "index" : { + "auto_expand_replicas" : "0-1" + } + }, + "mappings" : { + "_doc": { + "_meta": { + "version": "${xpack.ml.version}" + }, + "enabled": false + } + }, + "aliases" : { } +} diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/config_index_mappings.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/config_index_mappings.json new file mode 100644 index 00000000000..aa7211cf448 --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/config_index_mappings.json @@ -0,0 +1,364 @@ +{ + "${xpack.ml.mapping_type}" : { + "_meta" : { + "version" : "${xpack.ml.version}" + }, + "dynamic_templates" : [ + { + "strings_as_keywords" : { + "match" : "*", + "mapping" : { + "type" : "keyword" + } + } + } + ], + "properties" : { + "aggregations" : { + "type" : "object", + "enabled" : false + }, + "allow_lazy_open" : { + "type" : "keyword" + }, + "analysis" : { + "properties" : { + "classification" : { + "properties" : { + "dependent_variable" : { + "type" : "keyword" + }, + "eta" : { + "type" : "double" + }, + "feature_bag_fraction" : { + "type" : "double" + }, + "gamma" : { + "type" : "double" + }, + "lambda" : { + "type" : "double" + }, + "maximum_number_trees" : { + "type" : "integer" + }, + "num_top_classes" : { + "type" : "integer" + }, + "num_top_feature_importance_values" : { + "type" : "integer" + }, + "prediction_field_name" : { + "type" : "keyword" + }, + "training_percent" : { + "type" : "double" + } + } + }, + "outlier_detection" : { + "properties" : { + "feature_influence_threshold" : { + "type" : "double" + }, + "method" : { + "type" : "keyword" + }, + "n_neighbors" : { + "type" : "integer" + } + } + }, + "regression" : { + "properties" : { + "dependent_variable" : { + "type" : "keyword" + }, + "eta" : { + "type" : "double" + }, + "feature_bag_fraction" : { + "type" : "double" + }, + "gamma" : { + "type" : "double" + }, + "lambda" : { + "type" : "double" + }, + "maximum_number_trees" : { + "type" : "integer" + }, + "num_top_feature_importance_values" : { + "type" : "integer" + }, + "prediction_field_name" : { + "type" : "keyword" + }, + "training_percent" : { + "type" : "double" + } + } + } + } + }, + "analysis_config" : { + "properties" : { + "bucket_span" : { + "type" : "keyword" + }, + "categorization_analyzer" : { + "type" : "object", + "enabled" : false + }, + "categorization_field_name" : { + "type" : "keyword" + }, + "categorization_filters" : { + "type" : "keyword" + }, + "detectors" : { + "properties" : { + "by_field_name" : { + "type" : "keyword" + }, + "custom_rules" : { + "type" : "nested", + "properties" : { + "actions" : { + "type" : "keyword" + }, + "conditions" : { + "type" : "nested", + "properties" : { + "applies_to" : { + "type" : "keyword" + }, + "operator" : { + "type" : "keyword" + }, + "value" : { + "type" : "double" + } + } + }, + "scope" : { + "type" : "object", + "enabled" : false + } + } + }, + "detector_description" : { + "type" : "text" + }, + "detector_index" : { + "type" : "integer" + }, + "exclude_frequent" : { + "type" : "keyword" + }, + "field_name" : { + "type" : "keyword" + }, + "function" : { + "type" : "keyword" + }, + "over_field_name" : { + "type" : "keyword" + }, + "partition_field_name" : { + "type" : "keyword" + }, + "use_null" : { + "type" : "boolean" + } + } + }, + "influencers" : { + "type" : "keyword" + }, + "latency" : { + "type" : "keyword" + }, + "multivariate_by_fields" : { + "type" : "boolean" + }, + "summary_count_field_name" : { + "type" : "keyword" + } + } + }, + "analysis_limits" : { + "properties" : { + "categorization_examples_limit" : { + "type" : "long" + }, + "model_memory_limit" : { + "type" : "keyword" + } + } + }, + "analyzed_fields" : { + "type" : "object", + "enabled" : false + }, + "background_persist_interval" : { + "type" : "keyword" + }, + "chunking_config" : { + "properties" : { + "mode" : { + "type" : "keyword" + }, + "time_span" : { + "type" : "keyword" + } + } + }, + "config_type" : { + "type" : "keyword" + }, + "create_time" : { + "type" : "date" + }, + "custom_settings" : { + "type" : "object", + "enabled" : false + }, + "data_description" : { + "properties" : { + "field_delimiter" : { + "type" : "keyword" + }, + "format" : { + "type" : "keyword" + }, + "quote_character" : { + "type" : "keyword" + }, + "time_field" : { + "type" : "keyword" + }, + "time_format" : { + "type" : "keyword" + } + } + }, + "datafeed_id" : { + "type" : "keyword" + }, + "delayed_data_check_config" : { + "properties" : { + "check_window" : { + "type" : "keyword" + }, + "enabled" : { + "type" : "boolean" + } + } + }, + "description" : { + "type" : "text" + }, + "dest" : { + "properties" : { + "index" : { + "type" : "keyword" + }, + "results_field" : { + "type" : "keyword" + } + } + }, + "finished_time" : { + "type" : "date" + }, + "frequency" : { + "type" : "keyword" + }, + "groups" : { + "type" : "keyword" + }, + "headers" : { + "type" : "object", + "enabled" : false + }, + "id" : { + "type" : "keyword" + }, + "indices" : { + "type" : "keyword" + }, + "job_id" : { + "type" : "keyword" + }, + "job_type" : { + "type" : "keyword" + }, + "job_version" : { + "type" : "keyword" + }, + "model_plot_config" : { + "properties" : { + "enabled" : { + "type" : "boolean" + }, + "terms" : { + "type" : "keyword" + } + } + }, + "model_snapshot_id" : { + "type" : "keyword" + }, + "model_snapshot_min_version" : { + "type" : "keyword" + }, + "model_snapshot_retention_days" : { + "type" : "long" + }, + "query" : { + "type" : "object", + "enabled" : false + }, + "query_delay" : { + "type" : "keyword" + }, + "renormalization_window_days" : { + "type" : "long" + }, + "results_index_name" : { + "type" : "keyword" + }, + "results_retention_days" : { + "type" : "long" + }, + "script_fields" : { + "type" : "object", + "enabled" : false + }, + "scroll_size" : { + "type" : "long" + }, + "source" : { + "properties" : { + "_source" : { + "type" : "object", + "enabled" : false + }, + "index" : { + "type" : "keyword" + }, + "query" : { + "type" : "object", + "enabled" : false + } + } + }, + "version" : { + "type" : "keyword" + } + } + } +} diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/config_index_template.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/config_index_template.json new file mode 100644 index 00000000000..8c6c352ab24 --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/config_index_template.json @@ -0,0 +1,15 @@ +{ + "order" : 0, + "version" : ${xpack.ml.version.id}, + "index_patterns" : [ + ".ml-config" + ], + "settings" : { + "index" : { + "max_result_window" : "${xpack.ml.config.max_result_window}", + "number_of_shards" : "1", + "auto_expand_replicas" : "0-1" + } + }, + "mappings": ${xpack.ml.config.mappings} +} diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/inference_index_template.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/inference_index_template.json new file mode 100644 index 00000000000..5cbee23ca80 --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/inference_index_template.json @@ -0,0 +1,72 @@ +{ + "order" : 0, + "version" : ${xpack.ml.version.id}, + "index_patterns" : [ + ".ml-inference-000001" + ], + "settings" : { + "index" : { + "number_of_shards" : "1", + "auto_expand_replicas" : "0-1" + } + }, + "mappings" : { + "_doc": { + "_meta": { + "version": "${xpack.ml.version}" + }, + "dynamic": "false", + "properties": { + "doc_type": { + "type": "keyword" + }, + "model_id": { + "type": "keyword" + }, + "created_by": { + "type": "keyword" + }, + "input": { + "enabled": false + }, + "version": { + "type": "keyword" + }, + "description": { + "type": "text" + }, + "create_time": { + "type": "date" + }, + "tags": { + "type": "keyword" + }, + "metadata": { + "enabled": false + }, + "estimated_operations": { + "type": "long" + }, + "estimated_heap_memory_usage_bytes": { + "type": "long" + }, + "doc_num": { + "type": "long" + }, + "definition": { + "enabled": false + }, + "compression_version": { + "type": "long" + }, + "definition_length": { + "type": "long" + }, + "total_definition_length": { + "type": "long" + } + } + } + }, + "aliases" : { } +} diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/meta_index_template.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/meta_index_template.json new file mode 100644 index 00000000000..19df45c5250 --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/meta_index_template.json @@ -0,0 +1,47 @@ +{ + "order" : 0, + "version" : ${xpack.ml.version.id}, + "index_patterns" : [ + ".ml-meta" + ], + "settings" : { + "index" : { + "number_of_shards" : "1", + "auto_expand_replicas" : "0-1" + } + }, + "mappings" : { + "_doc": { + "_meta": { + "version": "${xpack.ml.version}" + }, + "dynamic_templates": [ + { + "strings_as_keywords": { + "match": "*", + "mapping": { + "type": "keyword" + } + } + } + ], + "properties": { + "calendar_id": { + "type": "keyword" + }, + "job_ids": { + "type": "keyword" + }, + "description": { + "type": "keyword" + }, + "start_time": { + "type": "date" + }, + "end_time": { + "type": "date" + } + } + } + } +} diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/notifications_index_template.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/notifications_index_template.json new file mode 100644 index 00000000000..804707dfb18 --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/notifications_index_template.json @@ -0,0 +1,46 @@ +{ + "order" : 0, + "version" : ${xpack.ml.version.id}, + "index_patterns" : [ + ".ml-notifications-000001" + ], + "settings" : { + "index" : { + "number_of_shards" : "1", + "auto_expand_replicas" : "0-1" + } + }, + "mappings" : { + "_doc": { + "_meta" : { + "version" : "${xpack.ml.version}" + }, + "dynamic" : "false", + "properties" : { + "job_id": { + "type": "keyword" + }, + "level": { + "type": "keyword" + }, + "message": { + "type": "text", + "fields": { + "raw": { + "type": "keyword" + } + } + }, + "timestamp": { + "type": "date" + }, + "node_name": { + "type": "keyword" + }, + "job_type": { + "type": "keyword" + } + } + } + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java index 168dd24109d..74691050974 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java @@ -20,15 +20,13 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ml.MlConfigIndex; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -38,7 +36,6 @@ import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeSta import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.TimingStats; -import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.CategoryDefinition; import org.elasticsearch.xpack.core.ml.job.results.ReservedFieldNames; import org.elasticsearch.xpack.core.ml.job.results.Result; @@ -56,8 +53,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; @@ -125,7 +120,6 @@ public class ElasticsearchMappingsTests extends ESTestCase { compareFields(expected, ReservedFieldNames.RESERVED_CONFIG_FIELD_NAMES); } - private void compareFields(Set expected, Set reserved) { if (reserved.size() != expected.size()) { Set diff = new HashSet<>(reserved); @@ -146,32 +140,6 @@ public class ElasticsearchMappingsTests extends ESTestCase { } } - @SuppressWarnings("unchecked") - public void testTermFieldMapping() throws IOException { - - XContentBuilder builder = ElasticsearchMappings.termFieldsMapping(Arrays.asList("apple", "strawberry", - AnomalyRecord.BUCKET_SPAN.getPreferredName())); - - XContentParser parser = createParser(builder); - Map mapping = (Map) parser.map().get(SINGLE_MAPPING_NAME); - Map properties = (Map) mapping.get(ElasticsearchMappings.PROPERTIES); - - Map instanceMapping = (Map) properties.get("apple"); - assertNotNull(instanceMapping); - String dataType = (String)instanceMapping.get(ElasticsearchMappings.TYPE); - assertEquals(ElasticsearchMappings.KEYWORD, dataType); - - instanceMapping = (Map) properties.get("strawberry"); - assertNotNull(instanceMapping); - dataType = (String)instanceMapping.get(ElasticsearchMappings.TYPE); - assertEquals(ElasticsearchMappings.KEYWORD, dataType); - - // check no mapping for the reserved field - instanceMapping = (Map) properties.get(AnomalyRecord.BUCKET_SPAN.getPreferredName()); - assertNull(instanceMapping); - } - - public void testMappingRequiresUpdateNoMapping() throws IOException { ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); ClusterState cs = csBuilder.build(); @@ -240,7 +208,7 @@ public class ElasticsearchMappingsTests extends ESTestCase { ClusterState clusterState = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("index-name", "0.0")); ElasticsearchMappings.addDocMappingIfMissing( "index-name", - ElasticsearchMappingsTests::fakeMapping, + mappingType -> "{\"_doc\":{\"properties\":{\"some-field\":{\"type\":\"long\"}}}}", client, clusterState, ActionListener.wrap( @@ -260,19 +228,6 @@ public class ElasticsearchMappingsTests extends ESTestCase { assertThat(request.source(), equalTo("{\"_doc\":{\"properties\":{\"some-field\":{\"type\":\"long\"}}}}")); } - private static XContentBuilder fakeMapping(String mappingType) throws IOException { - return jsonBuilder() - .startObject() - .startObject(mappingType) - .startObject(ElasticsearchMappings.PROPERTIES) - .startObject("some-field") - .field(ElasticsearchMappings.TYPE, ElasticsearchMappings.LONG) - .endObject() - .endObject() - .endObject() - .endObject(); - } - private ClusterState getClusterStateWithMappingsWithMetaData(Map namesAndVersions) throws IOException { MetaData.Builder metaDataBuilder = MetaData.builder(); @@ -311,17 +266,17 @@ public class ElasticsearchMappingsTests extends ESTestCase { private Set collectResultsDocFieldNames() throws IOException { // Only the mappings for the results index should be added below. Do NOT add mappings for other indexes here. - return collectFieldNames(ElasticsearchMappings.resultsMapping("_doc")); + return collectFieldNames(AnomalyDetectorsIndex.resultsMapping()); } private Set collectConfigDocFieldNames() throws IOException { // Only the mappings for the config index should be added below. Do NOT add mappings for other indexes here. - return collectFieldNames(ElasticsearchMappings.configMapping()); + return collectFieldNames(MlConfigIndex.mapping()); } - private Set collectFieldNames(XContentBuilder mapping) throws IOException { + private Set collectFieldNames(String mapping) throws IOException { BufferedInputStream inputStream = - new BufferedInputStream(new ByteArrayInputStream(Strings.toString(mapping).getBytes(StandardCharsets.UTF_8))); + new BufferedInputStream(new ByteArrayInputStream(mapping.getBytes(StandardCharsets.UTF_8))); JsonParser parser = new JsonFactory().createParser(inputStream); Set fieldNames = new HashSet<>(); boolean isAfterPropertiesStart = false; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java index 281542af127..d859731b29a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java @@ -24,8 +24,8 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BitSet; import org.elasticsearch.client.Client; import org.elasticsearch.common.CheckedBiConsumer; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexSettings; @@ -522,7 +522,7 @@ public class DocumentSubsetBitsetCacheTests extends ESTestCase { final QueryShardContext shardContext = new QueryShardContext(shardId.id(), indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, mapperService, null, null, xContentRegistry(), writableRegistry(), - client, new IndexSearcher(directoryReader), () -> nowInMillis, null, null); + client, new IndexSearcher(directoryReader), () -> nowInMillis, null, null, () -> true); context = new TestIndexContext(directory, iw, directoryReader, shardContext, leaf); return context; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index 36102090efe..117fa4b2f49 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -84,7 +84,7 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT final long nowInMillis = randomNonNegativeLong(); QueryShardContext realQueryShardContext = new QueryShardContext(shardId.id(), indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, mapperService, null, null, xContentRegistry(), writableRegistry(), - client, null, () -> nowInMillis, null, null); + client, null, () -> nowInMillis, null, null, () -> true); QueryShardContext queryShardContext = spy(realQueryShardContext); DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY, Executors.newSingleThreadExecutor()); XPackLicenseState licenseState = mock(XPackLicenseState.class); @@ -209,7 +209,7 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT final long nowInMillis = randomNonNegativeLong(); QueryShardContext realQueryShardContext = new QueryShardContext(shardId.id(), indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, mapperService, null, null, xContentRegistry(), writableRegistry(), - client, null, () -> nowInMillis, null, null); + client, null, () -> nowInMillis, null, null, () -> true); QueryShardContext queryShardContext = spy(realQueryShardContext); DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY, Executors.newSingleThreadExecutor()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 100b8feec89..edc8dec11e3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -108,7 +108,7 @@ import org.elasticsearch.xpack.core.ml.action.ValidateDetectorAction; import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction; import org.elasticsearch.xpack.core.ml.annotations.AnnotationIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; -import org.elasticsearch.xpack.core.ml.notifications.AuditorField; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkAction; import org.elasticsearch.xpack.core.security.action.DelegatePkiAuthenticationAction; import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesAction; @@ -1135,7 +1135,7 @@ public class ReservedRolesStoreTests extends ESTestCase { assertOnlyReadAllowed(role, MlMetaIndex.INDEX_NAME); assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX); assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); - assertOnlyReadAllowed(role, AuditorField.NOTIFICATIONS_INDEX); + assertOnlyReadAllowed(role, NotificationsIndex.NOTIFICATIONS_INDEX); assertReadWriteDocsButNotDeleteIndexAllowed(role, AnnotationIndex.INDEX_NAME); assertNoAccessAllowed(role, RestrictedIndicesNames.RESTRICTED_NAMES); @@ -1222,7 +1222,7 @@ public class ReservedRolesStoreTests extends ESTestCase { assertNoAccessAllowed(role, MlMetaIndex.INDEX_NAME); assertNoAccessAllowed(role, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX); assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); - assertOnlyReadAllowed(role, AuditorField.NOTIFICATIONS_INDEX); + assertOnlyReadAllowed(role, NotificationsIndex.NOTIFICATIONS_INDEX); assertReadWriteDocsButNotDeleteIndexAllowed(role, AnnotationIndex.INDEX_NAME); assertNoAccessAllowed(role, RestrictedIndicesNames.RESTRICTED_NAMES); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/TemplateUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/TemplateUtilsTests.java index 57ba25af63f..09ce23e741f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/TemplateUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/TemplateUtilsTests.java @@ -8,15 +8,13 @@ package org.elasticsearch.xpack.core.template; import org.apache.lucene.util.Constants; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.template.TemplateUtils; import org.hamcrest.Matcher; import java.io.IOException; +import java.util.HashMap; import java.util.Locale; -import java.util.regex.Pattern; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -25,12 +23,13 @@ import static org.hamcrest.core.Is.is; public class TemplateUtilsTests extends ESTestCase { - private static final String TEST_TEMPLATE = "/monitoring-%s.json"; + private static final String SIMPLE_TEST_TEMPLATE = "/monitoring-%s.json"; + private static final String TEST_TEMPLATE_WITH_VARIABLES = "/template_with_variables-test.json"; - public void testLoadTemplate() throws IOException { + public void testLoadTemplate() { final int version = randomIntBetween(0, 10_000); - String resource = String.format(Locale.ROOT, TEST_TEMPLATE, "test"); - String source = TemplateUtils.loadTemplate(resource, String.valueOf(version), Pattern.quote("${monitoring.template.version}")); + String resource = String.format(Locale.ROOT, SIMPLE_TEST_TEMPLATE, "test"); + String source = TemplateUtils.loadTemplate(resource, String.valueOf(version), "monitoring.template.version"); assertThat(source, notNullValue()); assertThat(source.length(), greaterThan(0)); @@ -46,9 +45,36 @@ public class TemplateUtilsTests extends ESTestCase { "}\n")); } + public void testLoadTemplate_GivenTemplateWithVariables() { + final int version = randomIntBetween(0, 10_000); + Map variables = new HashMap<>(); + variables.put("test.template.field_1", "test_field_1"); + variables.put("test.template.field_2", "\"test_field_2\": {\"type\": \"long\"}"); + + String source = TemplateUtils.loadTemplate(TEST_TEMPLATE_WITH_VARIABLES, String.valueOf(version), + "test.template.version", variables); + + assertThat(source, notNullValue()); + assertThat(source.length(), greaterThan(0)); + assertTemplate(source, equalTo("{\n" + + " \"index_patterns\": \".test-" + version + "\",\n" + + " \"mappings\": {\n" + + " \"doc\": {\n" + + " \"_meta\": {\n" + + " \"template.version\": \"" + version + "\"\n" + + " },\n" + + " \"properties\": {\n" + + " \"test_field_1\": {\"type\": \"keyword\"},\n" + + " \"test_field_2\": {\"type\": \"long\"}\n" + + " }\n" + + " }\n" + + " }\n" + + "}\n")); + } + public void testLoad() throws IOException { - String resource = String.format(Locale.ROOT, TEST_TEMPLATE, "test"); - BytesReference source = TemplateUtils.load(resource); + String resource = String.format(Locale.ROOT, SIMPLE_TEST_TEMPLATE, "test"); + String source = TemplateUtils.load(resource); assertThat(source, notNullValue()); assertThat(source.length(), greaterThan(0)); } @@ -60,35 +86,34 @@ public class TemplateUtilsTests extends ESTestCase { public void testValidateEmptySource() { ElasticsearchParseException exception = expectThrows(ElasticsearchParseException.class, - () -> TemplateUtils.validate(new BytesArray(""))); + () -> TemplateUtils.validate("")); assertThat(exception.getMessage(), is("Template must not be empty")); } public void testValidateInvalidSource() { ElasticsearchParseException exception = expectThrows(ElasticsearchParseException.class, - () -> TemplateUtils.validate(new BytesArray("{\"foo\": \"bar"))); + () -> TemplateUtils.validate("{\"foo\": \"bar")); assertThat(exception.getMessage(), is("Invalid template")); } public void testValidate() throws IOException { - String resource = String.format(Locale.ROOT, TEST_TEMPLATE, "test"); + String resource = String.format(Locale.ROOT, SIMPLE_TEST_TEMPLATE, "test"); TemplateUtils.validate(TemplateUtils.load(resource)); } - public void testFilter() { - assertTemplate(TemplateUtils.filter(new BytesArray("${monitoring.template.version}"), "0", - Pattern.quote("${monitoring.template.version}")), equalTo("0")); - assertTemplate(TemplateUtils.filter(new BytesArray("{\"template\": \"test-${monitoring.template.version}\"}"), "1", - Pattern.quote("${monitoring.template.version}")), equalTo("{\"template\": \"test-1\"}")); - assertTemplate(TemplateUtils.filter(new BytesArray("{\"template\": \"${monitoring.template.version}-test\"}"), "2", - Pattern.quote("${monitoring.template.version}")), equalTo("{\"template\": \"2-test\"}")); - assertTemplate(TemplateUtils.filter(new BytesArray("{\"template\": \"test-${monitoring.template.version}-test\"}"), "3", - Pattern.quote("${monitoring.template.version}")), equalTo("{\"template\": \"test-3-test\"}")); + public void testReplaceVariable() { + assertTemplate(TemplateUtils.replaceVariable("${monitoring.template.version}", + "monitoring.template.version", "0"), equalTo("0")); + assertTemplate(TemplateUtils.replaceVariable("{\"template\": \"test-${monitoring.template.version}\"}", + "monitoring.template.version", "1"), equalTo("{\"template\": \"test-1\"}")); + assertTemplate(TemplateUtils.replaceVariable("{\"template\": \"${monitoring.template.version}-test\"}", + "monitoring.template.version", "2"), equalTo("{\"template\": \"2-test\"}")); + assertTemplate(TemplateUtils.replaceVariable("{\"template\": \"test-${monitoring.template.version}-test\"}", + "monitoring.template.version", "3"), equalTo("{\"template\": \"test-3-test\"}")); final int version = randomIntBetween(0, 100); - assertTemplate(TemplateUtils.filter(new BytesArray("{\"foo-${monitoring.template.version}\": " + - "\"bar-${monitoring.template.version}\"}"), String.valueOf(version), - Pattern.quote("${monitoring.template.version}")), + assertTemplate(TemplateUtils.replaceVariable("{\"foo-${monitoring.template.version}\": " + + "\"bar-${monitoring.template.version}\"}", "monitoring.template.version", String.valueOf(version)), equalTo("{\"foo-" + version + "\": \"bar-" + version + "\"}")); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformIndexerStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformIndexerStatsTests.java index 05866c9c0b1..f83f820e194 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformIndexerStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformIndexerStatsTests.java @@ -31,13 +31,21 @@ public class TransformIndexerStatsTests extends AbstractSerializingTestCase replacedRoutes() { return unmodifiableList(asList( - new ReplacedRoute(GET, "/_migration/deprecations", GET, "/_xpack/migration/deprecations", deprecationLogger), - new ReplacedRoute( - GET, "/{index}/_migration/deprecations", GET, "/{index}/_xpack/migration/deprecations", deprecationLogger))); + new ReplacedRoute(GET, "/_migration/deprecations", GET, "/_xpack/migration/deprecations"), + new ReplacedRoute(GET, "/{index}/_migration/deprecations", GET, "/{index}/_xpack/migration/deprecations"))); } @Override diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Analyzer.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Analyzer.java index 9f18db3c920..82ce55ac74f 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Analyzer.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Analyzer.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.eql.analysis; +import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute; diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/VerificationException.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/VerificationException.java index ac7800db056..b5f7fdab9d8 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/VerificationException.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/VerificationException.java @@ -7,23 +7,14 @@ package org.elasticsearch.xpack.eql.analysis; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.eql.EqlClientException; -import org.elasticsearch.xpack.ql.tree.Location; -import org.elasticsearch.xpack.ql.util.StringUtils; +import org.elasticsearch.xpack.ql.common.Failure; import java.util.Collection; -import java.util.stream.Collectors; public class VerificationException extends EqlClientException { protected VerificationException(Collection sources) { - super(asMessage(sources)); - } - - private static String asMessage(Collection failures) { - return failures.stream().map(f -> { - Location l = f.node().source().source(); - return "line " + l.getLineNumber() + ":" + l.getColumnNumber() + ": " + f.message(); - }).collect(Collectors.joining(StringUtils.NEW_LINE, "Found " + failures.size() + " problem(s)\n", StringUtils.EMPTY)); + super(Failure.failMessage(sources)); } @Override diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Verifier.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Verifier.java index 071bdb0cb00..0d526c4d052 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Verifier.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Verifier.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.eql.analysis; import org.elasticsearch.xpack.ql.capabilities.Unresolvable; +import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; @@ -22,7 +23,7 @@ import java.util.Map; import java.util.Set; import static java.util.stream.Collectors.toMap; -import static org.elasticsearch.xpack.eql.analysis.Failure.fail; +import static org.elasticsearch.xpack.ql.common.Failure.fail; /** * The verifier has the role of checking the analyzed tree for failures and build a list of failures following this check. diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/EsQueryExec.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/EsQueryExec.java new file mode 100644 index 00000000000..c7451b44595 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/EsQueryExec.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.plan.physical; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.eql.querydsl.container.QueryContainer; +import org.elasticsearch.xpack.eql.session.EqlSession; +import org.elasticsearch.xpack.eql.session.Results; +import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; +import java.util.Objects; + +public class EsQueryExec extends LeafExec { + + private final String index; + private final List output; + private final QueryContainer queryContainer; + + public EsQueryExec(Source source, String index, List output, QueryContainer queryContainer) { + super(source); + this.index = index; + this.output = output; + this.queryContainer = queryContainer; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, EsQueryExec::new, index, output, queryContainer); + } + + public EsQueryExec with(QueryContainer queryContainer) { + return new EsQueryExec(source(), index, output, queryContainer); + } + + public String index() { + return index; + } + + @Override + public List output() { + return output; + } + + @Override + public void execute(EqlSession session, ActionListener listener) { + throw new UnsupportedOperationException(); + } + + @Override + public int hashCode() { + return Objects.hash(index, queryContainer, output); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + EsQueryExec other = (EsQueryExec) obj; + return Objects.equals(index, other.index) + && Objects.equals(queryContainer, other.queryContainer) + && Objects.equals(output, other.output); + } + + @Override + public String nodeString() { + return nodeName() + "[" + index + "," + queryContainer + "]"; + } +} \ No newline at end of file diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/FilterExec.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/FilterExec.java new file mode 100644 index 00000000000..fc4b104d50a --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/FilterExec.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.plan.physical; + +import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; +import java.util.Objects; + +public class FilterExec extends UnaryExec implements Unexecutable { + + private final Expression condition; + private final boolean onAggs; + + public FilterExec(Source source, PhysicalPlan child, Expression condition) { + this(source, child, condition, false); + } + + public FilterExec(Source source, PhysicalPlan child, Expression condition, boolean onAggs) { + super(source, child); + this.condition = condition; + this.onAggs = onAggs; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, FilterExec::new, child(), condition, onAggs); + } + + @Override + protected FilterExec replaceChild(PhysicalPlan newChild) { + return new FilterExec(source(), newChild, condition, onAggs); + } + + public Expression condition() { + return condition; + } + + public boolean onAggs() { + return onAggs; + } + + @Override + public List output() { + return child().output(); + } + + @Override + public int hashCode() { + return Objects.hash(condition, onAggs, child()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + FilterExec other = (FilterExec) obj; + return onAggs == other.onAggs + && Objects.equals(condition, other.condition) + && Objects.equals(child(), other.child()); + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/LeafExec.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/LeafExec.java new file mode 100644 index 00000000000..9f39002e925 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/LeafExec.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.plan.physical; + +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.Collections; +import java.util.List; + +public abstract class LeafExec extends PhysicalPlan { + + protected LeafExec(Source source) { + super(source, Collections.emptyList()); + } + + @Override + public final LeafExec replaceChildren(List newChildren) { + throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/LimitExec.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/LimitExec.java new file mode 100644 index 00000000000..ff166851555 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/LimitExec.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.plan.physical; + +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.Objects; + +public class LimitExec extends UnaryExec implements Unexecutable { + + private final Expression limit; + + public LimitExec(Source source, PhysicalPlan child, Expression limit) { + super(source, child); + this.limit = limit; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, LimitExec::new, child(), limit); + } + + @Override + protected LimitExec replaceChild(PhysicalPlan newChild) { + return new LimitExec(source(), newChild, limit); + } + + public Expression limit() { + return limit; + } + + @Override + public int hashCode() { + return Objects.hash(limit, child()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + LimitExec other = (LimitExec) obj; + return Objects.equals(limit, other.limit) + && Objects.equals(child(), other.child()); + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/LocalExec.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/LocalExec.java new file mode 100644 index 00000000000..48bd65d1a1c --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/LocalExec.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.plan.physical; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.eql.session.EmptyExecutable; +import org.elasticsearch.xpack.eql.session.EqlSession; +import org.elasticsearch.xpack.eql.session.Executable; +import org.elasticsearch.xpack.eql.session.Results; +import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; +import java.util.Objects; + +public class LocalExec extends LeafExec { + + private final Executable executable; + + public LocalExec(Source source, Executable executable) { + super(source); + this.executable = executable; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, LocalExec::new, executable); + } + + public Executable executable() { + return executable; + } + + @Override + public List output() { + return executable.output(); + } + + public boolean isEmpty() { + return executable instanceof EmptyExecutable; + } + + @Override + public void execute(EqlSession session, ActionListener listener) { + executable.execute(session, listener); + } + + @Override + public int hashCode() { + return Objects.hash(executable); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + LocalExec other = (LocalExec) obj; + return Objects.equals(executable, other.executable); + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/LocalRelation.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/LocalRelation.java new file mode 100644 index 00000000000..cb30beeb690 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/LocalRelation.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.plan.physical; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.eql.session.EqlSession; +import org.elasticsearch.xpack.eql.session.Executable; +import org.elasticsearch.xpack.eql.session.Results; +import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; +import java.util.Objects; + +import static java.util.Collections.emptyList; + +public class LocalRelation extends LogicalPlan implements Executable { + + private final Executable executable; + + public LocalRelation(Source source, Executable executable) { + super(source, emptyList()); + this.executable = executable; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, LocalRelation::new, executable); + } + + @Override + public LogicalPlan replaceChildren(List newChildren) { + throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); + } + + public Executable executable() { + return executable; + } + + @Override + public boolean expressionsResolved() { + return true; + } + + @Override + public List output() { + return executable.output(); + } + + @Override + public void execute(EqlSession session, ActionListener listener) { + executable.execute(session, listener); + } + + @Override + public int hashCode() { + return executable.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + LocalRelation other = (LocalRelation) obj; + return Objects.equals(executable, other.executable); + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/OrderExec.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/OrderExec.java new file mode 100644 index 00000000000..541c64070c3 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/OrderExec.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.plan.physical; + +import org.elasticsearch.xpack.ql.expression.Order; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; +import java.util.Objects; + +public class OrderExec extends UnaryExec implements Unexecutable { + + private final List order; + + public OrderExec(Source source, PhysicalPlan child, List order) { + super(source, child); + this.order = order; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, OrderExec::new, child(), order); + } + + @Override + protected OrderExec replaceChild(PhysicalPlan newChild) { + return new OrderExec(source(), newChild, order); + } + + public List order() { + return order; + } + + @Override + public int hashCode() { + return Objects.hash(order, child()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + OrderExec other = (OrderExec) obj; + + return Objects.equals(order, other.order) + && Objects.equals(child(), other.child()); + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/UnaryExec.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/UnaryExec.java new file mode 100644 index 00000000000..26389c065d3 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/UnaryExec.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.plan.physical; + +import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +public abstract class UnaryExec extends PhysicalPlan { + + private final PhysicalPlan child; + + protected UnaryExec(Source source, PhysicalPlan child) { + super(source, Collections.singletonList(child)); + this.child = child; + } + + @Override + public final PhysicalPlan replaceChildren(List newChildren) { + if (newChildren.size() != 1) { + throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); + } + return replaceChild(newChildren.get(0)); + } + + protected abstract UnaryExec replaceChild(PhysicalPlan newChild); + + public PhysicalPlan child() { + return child; + } + + @Override + public List output() { + return child.output(); + } + + @Override + public int hashCode() { + return Objects.hashCode(child()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + UnaryExec other = (UnaryExec) obj; + + return Objects.equals(child, other.child); + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/Unexecutable.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/Unexecutable.java new file mode 100644 index 00000000000..69a6ca51271 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/Unexecutable.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.plan.physical; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.eql.planner.PlanningException; +import org.elasticsearch.xpack.eql.session.EqlSession; +import org.elasticsearch.xpack.eql.session.Executable; +import org.elasticsearch.xpack.eql.session.Results; + + +// this is mainly a marker interface to validate a plan before being executed +public interface Unexecutable extends Executable { + + @Override + default void execute(EqlSession session, ActionListener listener) { + throw new PlanningException("Current plan {} is not executable", this); + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/UnplannedExec.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/UnplannedExec.java new file mode 100644 index 00000000000..45061b3f961 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/UnplannedExec.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.plan.physical; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.eql.planner.PlanningException; +import org.elasticsearch.xpack.eql.session.EqlSession; +import org.elasticsearch.xpack.eql.session.Results; +import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; +import java.util.Objects; + +public class UnplannedExec extends LeafExec implements Unexecutable { + + private final LogicalPlan plan; + + public UnplannedExec(Source source, LogicalPlan plan) { + super(source); + this.plan = plan; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, UnplannedExec::new, plan); + } + + public LogicalPlan plan() { + return plan; + } + + @Override + public List output() { + return plan.output(); + } + + @Override + public void execute(EqlSession session, ActionListener listener) { + throw new PlanningException("Current plan {} is not executable", this); + } + + @Override + public int hashCode() { + return plan.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + UnplannedExec other = (UnplannedExec) obj; + return Objects.equals(plan, other.plan); + } + + @Override + public String nodeString() { + return nodeName() + "[" + plan.nodeString() + "]"; + } +} \ No newline at end of file diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/Mapper.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/Mapper.java new file mode 100644 index 00000000000..225683a21d4 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/Mapper.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.eql.planner; + +import org.elasticsearch.xpack.eql.plan.physical.EsQueryExec; +import org.elasticsearch.xpack.eql.plan.physical.FilterExec; +import org.elasticsearch.xpack.eql.plan.physical.LimitExec; +import org.elasticsearch.xpack.eql.plan.physical.LocalExec; +import org.elasticsearch.xpack.eql.plan.physical.LocalRelation; +import org.elasticsearch.xpack.eql.plan.physical.OrderExec; +import org.elasticsearch.xpack.eql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.eql.plan.physical.UnplannedExec; +import org.elasticsearch.xpack.eql.querydsl.container.QueryContainer; +import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.plan.logical.EsRelation; +import org.elasticsearch.xpack.ql.plan.logical.Filter; +import org.elasticsearch.xpack.ql.plan.logical.Limit; +import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.ql.plan.logical.OrderBy; +import org.elasticsearch.xpack.ql.rule.Rule; +import org.elasticsearch.xpack.ql.rule.RuleExecutor; +import org.elasticsearch.xpack.ql.util.ReflectionUtils; + +import java.util.Arrays; +import java.util.List; + +class Mapper extends RuleExecutor { + + PhysicalPlan map(LogicalPlan plan) { + return execute(planLater(plan)); + } + + @Override + protected Iterable.Batch> batches() { + Batch conversion = new Batch("Mapping", new SimpleExecMapper()); + + return Arrays.asList(conversion); + } + + private static PhysicalPlan planLater(LogicalPlan plan) { + return new UnplannedExec(plan.source(), plan); + } + + private static class SimpleExecMapper extends MapExecRule { + + @Override + protected PhysicalPlan map(LogicalPlan p) { + + if (p instanceof LocalRelation) { + return new LocalExec(p.source(), ((LocalRelation) p).executable()); + } + + if (p instanceof Filter) { + Filter fl = (Filter) p; + return new FilterExec(p.source(), map(fl.child()), fl.condition()); + } + + if (p instanceof OrderBy) { + OrderBy o = (OrderBy) p; + return new OrderExec(p.source(), map(o.child()), o.order()); + } + + if (p instanceof Limit) { + Limit l = (Limit) p; + return new LimitExec(p.source(), map(l.child()), l.limit()); + } + + if (p instanceof EsRelation) { + EsRelation c = (EsRelation) p; + List output = c.output(); + QueryContainer container = new QueryContainer(); + if (c.frozen()) { + container = container.withFrozen(); + } + return new EsQueryExec(p.source(), c.index().name(), output, container); + } + + return planLater(p); + } + } + + abstract static class MapExecRule extends Rule { + + private final Class subPlanToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass()); + + @Override + public final PhysicalPlan apply(PhysicalPlan plan) { + return plan.transformUp(this::rule, UnplannedExec.class); + } + + @SuppressWarnings("unchecked") + @Override + protected final PhysicalPlan rule(UnplannedExec plan) { + LogicalPlan subPlan = plan.plan(); + if (subPlanToken.isInstance(subPlan)) { + return map((SubPlan) subPlan); + } + return plan; + } + + protected abstract PhysicalPlan map(SubPlan plan); + } +} \ No newline at end of file diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/Planner.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/Planner.java index 0eb373d6809..e20b03533de 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/Planner.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/Planner.java @@ -7,11 +7,41 @@ package org.elasticsearch.xpack.eql.planner; import org.elasticsearch.xpack.eql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; +import java.util.List; + public class Planner { + private final Mapper mapper = new Mapper(); + private final QueryFolder folder = new QueryFolder(); + public PhysicalPlan plan(LogicalPlan plan) { - throw new UnsupportedOperationException(); + return foldPlan(mapPlan(plan)); + } + + PhysicalPlan mapPlan(LogicalPlan plan) { + return verifyMappingPlan(mapper.map(plan)); + } + + PhysicalPlan foldPlan(PhysicalPlan mapped) { + return verifyExecutingPlan(folder.fold(mapped)); + } + + PhysicalPlan verifyMappingPlan(PhysicalPlan plan) { + List failures = Verifier.verifyMappingPlan(plan); + if (failures.isEmpty() == false) { + throw new PlanningException(failures); + } + return plan; + } + + PhysicalPlan verifyExecutingPlan(PhysicalPlan plan) { + List failures = Verifier.verifyExecutingPlan(plan); + if (failures.isEmpty() == false) { + throw new PlanningException(failures); + } + return plan; } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/PlanningException.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/PlanningException.java new file mode 100644 index 00000000000..93c4c8638e2 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/PlanningException.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.planner; + +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.eql.EqlClientException; +import org.elasticsearch.xpack.ql.common.Failure; + +import java.util.Collection; + +public class PlanningException extends EqlClientException { + + public PlanningException(String message, Object... args) { + super(message, args); + } + + protected PlanningException(Collection sources) { + super(Failure.failMessage(sources)); + } + + @Override + public RestStatus status() { + return RestStatus.BAD_REQUEST; + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/QueryFolder.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/QueryFolder.java new file mode 100644 index 00000000000..c43b71ac9e8 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/QueryFolder.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.eql.planner; + +import org.elasticsearch.xpack.eql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.ql.rule.RuleExecutor; + +import static java.util.Collections.emptyList; + +class QueryFolder extends RuleExecutor { + + PhysicalPlan fold(PhysicalPlan plan) { + return execute(plan); + } + + @Override + protected Iterable.Batch> batches() { + return emptyList(); + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/Verifier.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/Verifier.java new file mode 100644 index 00000000000..82c09890b26 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/Verifier.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.planner; + +import org.elasticsearch.xpack.eql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.eql.plan.physical.Unexecutable; +import org.elasticsearch.xpack.eql.plan.physical.UnplannedExec; +import org.elasticsearch.xpack.ql.common.Failure; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.xpack.ql.common.Failure.fail; + +abstract class Verifier { + + static List verifyMappingPlan(PhysicalPlan plan) { + List failures = new ArrayList<>(); + + plan.forEachUp(p -> { + if (p instanceof UnplannedExec) { + failures.add(fail(p, "Unplanned item")); + } + p.forEachExpressionsUp(e -> { + if (e.childrenResolved() && !e.resolved()) { + failures.add(fail(e, "Unresolved expression")); + } + }); + }); + return failures; + } + + static List verifyExecutingPlan(PhysicalPlan plan) { + List failures = new ArrayList<>(); + + plan.forEachUp(p -> { + if (p instanceof Unexecutable) { + failures.add(fail(p, "Unexecutable item")); + } + p.forEachExpressionsUp(e -> { + if (e.childrenResolved() && !e.resolved()) { + failures.add(fail(e, "Unresolved expression")); + } + }); + }); + + return failures; + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/QueryContainer.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/QueryContainer.java new file mode 100644 index 00000000000..05a54cfd86e --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/QueryContainer.java @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.querydsl.container; + +public class QueryContainer { + + public QueryContainer withFrozen() { + throw new UnsupportedOperationException(); + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/session/EmptyExecutable.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/session/EmptyExecutable.java new file mode 100644 index 00000000000..e32cf3c34c2 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/session/EmptyExecutable.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.session; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.ql.expression.Attribute; + +import java.util.List; +import java.util.Objects; + +public class EmptyExecutable implements Executable { + + private final List output; + + public EmptyExecutable(List output) { + this.output = output; + } + + @Override + public List output() { + return output; + } + + @Override + public void execute(EqlSession session, ActionListener listener) { + listener.onResponse(Results.EMPTY); + } + + @Override + public int hashCode() { + return output.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + EmptyExecutable other = (EmptyExecutable) obj; + return Objects.equals(output, other.output); + } + + @Override + public String toString() { + return output.toString(); + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/session/Results.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/session/Results.java index b0277e3b793..53c2859313a 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/session/Results.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/session/Results.java @@ -7,13 +7,17 @@ package org.elasticsearch.xpack.eql.session; import org.apache.lucene.search.TotalHits; +import org.apache.lucene.search.TotalHits.Relation; import java.util.List; +import static java.util.Collections.emptyList; + public class Results { - private final TotalHits totalHits; + public static final Results EMPTY = new Results(new TotalHits(0, Relation.EQUAL_TO), emptyList()); + private final TotalHits totalHits; private final List results; public Results(TotalHits totalHits, List results) { diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java index 64f5328b5d1..ac339db8f1e 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java @@ -38,7 +38,7 @@ public class VerifierTests extends ESTestCase { private String error(IndexResolution resolution, String eql) { VerificationException e = expectThrows(VerificationException.class, () -> accept(resolution, eql)); assertTrue(e.getMessage().startsWith("Found ")); - String header = "Found 1 problem(s)\nline "; + String header = "Found 1 problem\nline "; return e.getMessage().substring(header.length()); } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/QueryFolderTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/QueryFolderTests.java new file mode 100644 index 00000000000..147cabbfdb5 --- /dev/null +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/QueryFolderTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.eql.planner; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.eql.analysis.Analyzer; +import org.elasticsearch.xpack.eql.analysis.PreAnalyzer; +import org.elasticsearch.xpack.eql.analysis.Verifier; +import org.elasticsearch.xpack.eql.expression.function.EqlFunctionRegistry; +import org.elasticsearch.xpack.eql.optimizer.Optimizer; +import org.elasticsearch.xpack.eql.parser.EqlParser; +import org.elasticsearch.xpack.eql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.ql.QlClientException; +import org.elasticsearch.xpack.ql.index.EsIndex; +import org.elasticsearch.xpack.ql.index.IndexResolution; + +import static org.elasticsearch.xpack.ql.type.TypesTests.loadMapping; + +public class QueryFolderTests extends ESTestCase { + + private EqlParser parser = new EqlParser(); + private PreAnalyzer preAnalyzer = new PreAnalyzer(); + private Analyzer analyzer = new Analyzer(new EqlFunctionRegistry(), new Verifier()); + private Optimizer optimizer = new Optimizer(); + private Planner planner = new Planner(); + + private IndexResolution index = IndexResolution.valid(new EsIndex("test", loadMapping("mapping-default.json"))); + + + private PhysicalPlan plan(IndexResolution resolution, String eql) { + return planner.plan(optimizer.optimize(analyzer.analyze(preAnalyzer.preAnalyze(parser.createStatement(eql), resolution)))); + } + + private PhysicalPlan plan(String eql) { + return plan(index, eql); + } + + public void testBasicPlan() throws Exception { + expectThrows(QlClientException.class, "not yet implemented", () -> plan("process where true")); + } +} diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java index 8b50b69ae0d..e929764e152 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java @@ -73,14 +73,14 @@ public class RestGraphAction extends XPackRestHandler { @Override public List replacedRoutes() { return unmodifiableList(asList( - new ReplacedRoute(GET, "/{index}/_graph/explore", GET, "/{index}" + URI_BASE + "/graph/_explore", deprecationLogger), - new ReplacedRoute(POST, "/{index}/_graph/explore", POST, "/{index}" + URI_BASE + "/graph/_explore", deprecationLogger), + new ReplacedRoute(GET, "/{index}/_graph/explore", GET, "/{index}" + URI_BASE + "/graph/_explore"), + new ReplacedRoute(POST, "/{index}/_graph/explore", POST, "/{index}" + URI_BASE + "/graph/_explore"), new ReplacedRoute( GET, "/{index}/{type}/_graph/explore", - GET, "/{index}/{type}" + URI_BASE + "/graph/_explore", deprecationLogger), + GET, "/{index}/{type}" + URI_BASE + "/graph/_explore"), new ReplacedRoute( POST, "/{index}/{type}_graph/explore", - POST, "/{index}/{type}" + URI_BASE + "/graph/_explore", deprecationLogger))); + POST, "/{index}/{type}" + URI_BASE + "/graph/_explore"))); } @Override diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java index fcf22834a8e..2c8d2097828 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java @@ -24,7 +24,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.UnaryOperator; -import java.util.regex.Pattern; /** * This class activates/deactivates the logstash modules depending if we're running a node client or transport client @@ -35,8 +34,7 @@ public class Logstash extends Plugin implements SystemIndexPlugin { private static final String LOGSTASH_TEMPLATE_FILE_NAME = "logstash-management"; private static final String LOGSTASH_INDEX_TEMPLATE_NAME = ".logstash-management"; private static final String OLD_LOGSTASH_INDEX_NAME = "logstash-index-template"; - private static final String TEMPLATE_VERSION_PATTERN = - Pattern.quote("${logstash.template.version}"); + private static final String TEMPLATE_VERSION_VARIABLE = "logstash.template.version"; private final boolean enabled; private final boolean transportClientMode; @@ -66,7 +64,7 @@ public class Logstash extends Plugin implements SystemIndexPlugin { return templates -> { templates.keySet().removeIf(OLD_LOGSTASH_INDEX_NAME::equals); TemplateUtils.loadTemplateIntoMap("/" + LOGSTASH_TEMPLATE_FILE_NAME + ".json", templates, LOGSTASH_INDEX_TEMPLATE_NAME, - Version.CURRENT.toString(), TEMPLATE_VERSION_PATTERN, LogManager.getLogger(Logstash.class)); + Version.CURRENT.toString(), TEMPLATE_VERSION_VARIABLE, LogManager.getLogger(Logstash.class)); //internal representation of typeless templates requires the default "_doc" type, which is also required for internal templates assert templates.get(LOGSTASH_INDEX_TEMPLATE_NAME).mappings().get(MapperService.SINGLE_MAPPING_NAME) != null; return templates; diff --git a/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapper.java b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapper.java index 5928d9889c8..2cad8287117 100644 --- a/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapper.java +++ b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapper.java @@ -23,6 +23,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -34,6 +35,7 @@ import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; @@ -49,7 +51,10 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.StringFieldType; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; import java.util.Iterator; @@ -306,7 +311,7 @@ public final class FlatObjectFieldMapper extends DynamicKeyFieldMapper { @Override public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, - boolean transpositions) { + boolean transpositions, QueryShardContext context) { throw new UnsupportedOperationException("[fuzzy] queries are not currently supported on keyed " + "[" + CONTENT_TYPE + "] fields."); } @@ -385,6 +390,12 @@ public final class FlatObjectFieldMapper extends DynamicKeyFieldMapper { return new SortField(getFieldName(), source, reverse); } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + throw new IllegalArgumentException("only supported on numeric fields"); + } + @Override public void clear() { delegate.clear(); diff --git a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectFieldTypeTests.java b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectFieldTypeTests.java index 46901035c8a..2a106d6a96c 100644 --- a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectFieldTypeTests.java +++ b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectFieldTypeTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.MappedFieldType; @@ -98,7 +99,12 @@ public class KeyedFlatObjectFieldTypeTests extends FieldTypeTestCase { ft.setName("field"); Query expected = new PrefixQuery(new Term("field", "key\0val")); - assertEquals(expected, ft.prefixQuery("val", MultiTermQuery.CONSTANT_SCORE_REWRITE, null)); + assertEquals(expected, ft.prefixQuery("val", MultiTermQuery.CONSTANT_SCORE_REWRITE, MOCK_QSC)); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.prefixQuery("val", MultiTermQuery.CONSTANT_SCORE_REWRITE, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[prefix] queries cannot be executed when 'search.allow_expensive_queries' is set to false. " + + "For optimised prefix queries on text fields please enable [index_prefixes].", ee.getMessage()); } public void testFuzzyQuery() { @@ -106,7 +112,7 @@ public class KeyedFlatObjectFieldTypeTests extends FieldTypeTestCase { ft.setName("field"); UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, - () -> ft.fuzzyQuery("valuee", Fuzziness.fromEdits(2), 1, 50, true)); + () -> ft.fuzzyQuery("value", Fuzziness.fromEdits(2), 1, 50, true, randomMockShardContext())); assertEquals("[fuzzy] queries are not currently supported on keyed [flattened] fields.", e.getMessage()); } @@ -117,12 +123,12 @@ public class KeyedFlatObjectFieldTypeTests extends FieldTypeTestCase { TermRangeQuery expected = new TermRangeQuery("field", new BytesRef("key\0lower"), new BytesRef("key\0upper"), false, false); - assertEquals(expected, ft.rangeQuery("lower", "upper", false, false, null)); + assertEquals(expected, ft.rangeQuery("lower", "upper", false, false, MOCK_QSC)); expected = new TermRangeQuery("field", new BytesRef("key\0lower"), new BytesRef("key\0upper"), true, true); - assertEquals(expected, ft.rangeQuery("lower", "upper", true, true, null)); + assertEquals(expected, ft.rangeQuery("lower", "upper", true, true, MOCK_QSC)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> ft.rangeQuery("lower", null, false, false, null)); @@ -130,9 +136,14 @@ public class KeyedFlatObjectFieldTypeTests extends FieldTypeTestCase { e.getMessage()); e = expectThrows(IllegalArgumentException.class, () -> - ft.rangeQuery(null, "upper", false, false, null)); + ft.rangeQuery(null, "upper", false, false, MOCK_QSC)); assertEquals("[range] queries on keyed [flattened] fields must include both an upper and a lower bound.", e.getMessage()); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.rangeQuery("lower", "upper", false, false, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[range] queries on [text] or [keyword] fields cannot be executed when " + + "'search.allow_expensive_queries' is set to false.", ee.getMessage()); } public void testRegexpQuery() { @@ -140,7 +151,7 @@ public class KeyedFlatObjectFieldTypeTests extends FieldTypeTestCase { ft.setName("field"); UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, - () -> ft.regexpQuery("valu*", 0, 10, null, null)); + () -> ft.regexpQuery("valu*", 0, 10, null, randomMockShardContext())); assertEquals("[regexp] queries are not currently supported on keyed [flattened] fields.", e.getMessage()); } @@ -149,7 +160,7 @@ public class KeyedFlatObjectFieldTypeTests extends FieldTypeTestCase { ft.setName("field"); UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, - () -> ft.wildcardQuery("valu*", null, null)); + () -> ft.wildcardQuery("valu*", null, randomMockShardContext())); assertEquals("[wildcard] queries are not currently supported on keyed [flattened] fields.", e.getMessage()); } } diff --git a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/RootFlatObjectFieldTypeTests.java b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/RootFlatObjectFieldTypeTests.java index be297663c6e..e0afaf007a5 100644 --- a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/RootFlatObjectFieldTypeTests.java +++ b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/RootFlatObjectFieldTypeTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.FieldTypeTestCase; @@ -78,8 +79,14 @@ public class RootFlatObjectFieldTypeTests extends FieldTypeTestCase { ft.setName("field"); Query expected = new FuzzyQuery(new Term("field", "value"), 2, 1, 50, true); - Query actual = ft.fuzzyQuery("value", Fuzziness.fromEdits(2), 1, 50, true); + Query actual = ft.fuzzyQuery("value", Fuzziness.fromEdits(2), 1, 50, true, MOCK_QSC); assertEquals(expected, actual); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.fuzzyQuery("value", Fuzziness.AUTO, randomInt(10) + 1, randomInt(10) + 1, + randomBoolean(), MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[fuzzy] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + ee.getMessage()); } public void testRangeQuery() { @@ -89,12 +96,17 @@ public class RootFlatObjectFieldTypeTests extends FieldTypeTestCase { TermRangeQuery expected = new TermRangeQuery("field", new BytesRef("lower"), new BytesRef("upper"), false, false); - assertEquals(expected, ft.rangeQuery("lower", "upper", false, false, null)); + assertEquals(expected, ft.rangeQuery("lower", "upper", false, false, MOCK_QSC)); expected = new TermRangeQuery("field", new BytesRef("lower"), new BytesRef("upper"), true, true); - assertEquals(expected, ft.rangeQuery("lower", "upper", true, true, null)); + assertEquals(expected, ft.rangeQuery("lower", "upper", true, true, MOCK_QSC)); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.rangeQuery("lower", "upper", true, true, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[range] queries on [text] or [keyword] fields cannot be executed when " + + "'search.allow_expensive_queries' is set to false.", ee.getMessage()); } public void testRegexpQuery() { @@ -102,8 +114,13 @@ public class RootFlatObjectFieldTypeTests extends FieldTypeTestCase { ft.setName("field"); Query expected = new RegexpQuery(new Term("field", "val.*")); - Query actual = ft.regexpQuery("val.*", 0, 10, null, null); + Query actual = ft.regexpQuery("val.*", 0, 10, null, MOCK_QSC); assertEquals(expected, actual); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.regexpQuery("val.*", randomInt(10), randomInt(10) + 1, null, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[regexp] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + ee.getMessage()); } public void testWildcardQuery() { @@ -111,6 +128,11 @@ public class RootFlatObjectFieldTypeTests extends FieldTypeTestCase { ft.setName("field"); Query expected = new WildcardQuery(new Term("field", new BytesRef("valu*"))); - assertEquals(expected, ft.wildcardQuery("valu*", null, null)); + assertEquals(expected, ft.wildcardQuery("valu*", null, MOCK_QSC)); + + ElasticsearchException ee = expectThrows(ElasticsearchException.class, + () -> ft.wildcardQuery("valu*", null, MOCK_QSC_DISALLOW_EXPENSIVE)); + assertEquals("[wildcard] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + ee.getMessage()); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java index 8fc6bdeef55..3e6837affb0 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner; -import org.elasticsearch.xpack.core.ml.notifications.AuditorField; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.ml.MachineLearning; import org.junit.After; @@ -749,7 +749,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase { // There should be a notification saying that there was a problem extracting data client().performRequest(new Request("POST", "/_refresh")); Response notificationsResponse = client().performRequest( - new Request("GET", AuditorField.NOTIFICATIONS_INDEX + "/_search?size=1000&q=job_id:" + jobId)); + new Request("GET", NotificationsIndex.NOTIFICATIONS_INDEX + "/_search?size=1000&q=job_id:" + jobId)); String notificationsResponseAsString = EntityUtils.toString(notificationsResponse.getEntity()); assertThat(notificationsResponseAsString, containsString("\"message\":\"Datafeed is encountering errors extracting data: " + "action [indices:data/read/search] is unauthorized for user [ml_admin_plus_data]\"")); @@ -956,7 +956,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase { // There should be a notification saying that there was a problem extracting data client().performRequest(new Request("POST", "/_refresh")); Response notificationsResponse = client().performRequest( - new Request("GET", AuditorField.NOTIFICATIONS_INDEX + "/_search?size=1000&q=job_id:" + jobId)); + new Request("GET", NotificationsIndex.NOTIFICATIONS_INDEX + "/_search?size=1000&q=job_id:" + jobId)); String notificationsResponseAsString = EntityUtils.toString(notificationsResponse.getEntity()); assertThat(notificationsResponseAsString, containsString("\"message\":\"Datafeed is encountering errors extracting data: " + "action [indices:admin/xpack/rollup/search] is unauthorized for user [ml_admin_plus_data]\"")); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java index 8bc6cdb69ec..6480eccc852 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.ForecastRequestStats; -import org.elasticsearch.xpack.core.ml.notifications.AuditorField; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.junit.After; import org.junit.Before; @@ -185,7 +185,7 @@ public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase { .setQuery(QueryBuilders.termQuery("result_type", "model_size_stats")) .get().getHits().getTotalHits().value; long totalNotificationsCountBeforeDelete = - client().prepareSearch(AuditorField.NOTIFICATIONS_INDEX).get().getHits().getTotalHits().value; + client().prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX).get().getHits().getTotalHits().value; assertThat(totalModelSizeStatsBeforeDelete, greaterThan(0L)); assertThat(totalNotificationsCountBeforeDelete, greaterThan(0L)); @@ -233,7 +233,7 @@ public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase { .setQuery(QueryBuilders.termQuery("result_type", "model_size_stats")) .get().getHits().getTotalHits().value; long totalNotificationsCountAfterDelete = - client().prepareSearch(AuditorField.NOTIFICATIONS_INDEX).get().getHits().getTotalHits().value; + client().prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX).get().getHits().getTotalHits().value; assertThat(totalModelSizeStatsAfterDelete, equalTo(totalModelSizeStatsBeforeDelete)); assertThat(totalNotificationsCountAfterDelete, greaterThanOrEqualTo(totalNotificationsCountBeforeDelete)); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java index fb1a4a6f004..2e6730674ef 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java @@ -23,7 +23,7 @@ import org.elasticsearch.xpack.core.ml.job.config.Operator; import org.elasticsearch.xpack.core.ml.job.config.RuleCondition; import org.elasticsearch.xpack.core.ml.job.config.RuleScope; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; -import org.elasticsearch.xpack.core.ml.notifications.AuditorField; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.junit.After; import java.io.IOException; @@ -188,7 +188,7 @@ public class DetectionRulesIT extends MlNativeAutodetectIntegTestCase { // Wait until the notification that the filter was updated is indexed assertBusy(() -> { SearchResponse searchResponse = - client().prepareSearch(AuditorField.NOTIFICATIONS_INDEX) + client().prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX) .setSize(1) .addSort("timestamp", SortOrder.DESC) .setQuery(QueryBuilders.boolQuery() diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java index 2c586b34e28..665be4551df 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java @@ -39,7 +39,7 @@ import org.elasticsearch.xpack.core.ml.dataframe.evaluation.Evaluation; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; -import org.elasticsearch.xpack.core.ml.notifications.AuditorField; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.core.ml.utils.PhaseProgress; import org.elasticsearch.xpack.core.ml.utils.QueryProvider; import org.elasticsearch.xpack.ml.dataframe.StoredProgress; @@ -232,7 +232,7 @@ abstract class MlNativeDataFrameAnalyticsIntegTestCase extends MlNativeIntegTest // Make sure we wrote to the audit // Since calls to write the AbstractAuditor are sent and forgot (async) we could have returned from the start, // finished the job (as this is a very short analytics job), all without the audit being fully written. - assertBusy(() -> assertTrue(indexExists(AuditorField.NOTIFICATIONS_INDEX))); + assertBusy(() -> assertTrue(indexExists(NotificationsIndex.NOTIFICATIONS_INDEX))); @SuppressWarnings("unchecked") Matcher[] itemMatchers = Arrays.stream(expectedAuditMessagePrefixes).map(Matchers::startsWith).toArray(Matcher[]::new); assertBusy(() -> { @@ -244,12 +244,12 @@ abstract class MlNativeDataFrameAnalyticsIntegTestCase extends MlNativeIntegTest } private static List fetchAllAuditMessages(String dataFrameAnalyticsId) { - RefreshRequest refreshRequest = new RefreshRequest(AuditorField.NOTIFICATIONS_INDEX); + RefreshRequest refreshRequest = new RefreshRequest(NotificationsIndex.NOTIFICATIONS_INDEX); RefreshResponse refreshResponse = client().execute(RefreshAction.INSTANCE, refreshRequest).actionGet(); assertThat(refreshResponse.getStatus().getStatus(), anyOf(equalTo(200), equalTo(201))); SearchRequest searchRequest = new SearchRequestBuilder(client(), SearchAction.INSTANCE) - .setIndices(AuditorField.NOTIFICATIONS_INDEX) + .setIndices(NotificationsIndex.NOTIFICATIONS_INDEX) .addSort("timestamp", SortOrder.ASC) .setQuery(QueryBuilders.termQuery("job_id", dataFrameAnalyticsId)) .request(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java index fd728f39545..cfc7b9c1408 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java @@ -21,7 +21,7 @@ import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.Bucket; -import org.elasticsearch.xpack.core.ml.notifications.AuditorField; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.junit.After; import java.io.IOException; @@ -225,7 +225,7 @@ public class ScheduledEventsIT extends MlNativeAutodetectIntegTestCase { // Wait until the notification that the process was updated is indexed assertBusy(() -> { SearchResponse searchResponse = - client().prepareSearch(AuditorField.NOTIFICATIONS_INDEX) + client().prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX) .setSize(1) .addSort("timestamp", SortOrder.DESC) .setQuery(QueryBuilders.boolQuery() @@ -301,7 +301,7 @@ public class ScheduledEventsIT extends MlNativeAutodetectIntegTestCase { // Wait until the notification that the job was updated is indexed assertBusy(() -> { SearchResponse searchResponse = - client().prepareSearch(AuditorField.NOTIFICATIONS_INDEX) + client().prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX) .setSize(1) .addSort("timestamp", SortOrder.DESC) .setQuery(QueryBuilders.boolQuery() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 4ba2840dcd3..5dd7d8f8331 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -9,21 +9,18 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.OriginSettingClient; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; @@ -37,10 +34,8 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; @@ -135,8 +130,7 @@ import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvide import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; -import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; -import org.elasticsearch.xpack.core.ml.notifications.AuditorField; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.core.template.TemplateUtils; import org.elasticsearch.xpack.ml.action.TransportCloseJobAction; import org.elasticsearch.xpack.ml.action.TransportDeleteCalendarAction; @@ -215,7 +209,6 @@ import org.elasticsearch.xpack.ml.dataframe.process.results.AnalyticsResult; import org.elasticsearch.xpack.ml.dataframe.process.results.MemoryUsageEstimationResult; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; -import org.elasticsearch.xpack.ml.inference.persistence.InferenceInternalIndex; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.JobManagerHolder; @@ -254,6 +247,7 @@ import org.elasticsearch.xpack.ml.rest.calendar.RestGetCalendarsAction; import org.elasticsearch.xpack.ml.rest.calendar.RestPostCalendarEventAction; import org.elasticsearch.xpack.ml.rest.calendar.RestPutCalendarAction; import org.elasticsearch.xpack.ml.rest.calendar.RestPutCalendarJobAction; +import org.elasticsearch.xpack.ml.rest.cat.RestCatDataFrameAnalyticsAction; import org.elasticsearch.xpack.ml.rest.cat.RestCatDatafeedsAction; import org.elasticsearch.xpack.ml.rest.cat.RestCatJobsAction; import org.elasticsearch.xpack.ml.rest.cat.RestCatTrainedModelsAction; @@ -320,7 +314,6 @@ import java.util.function.Supplier; import java.util.function.UnaryOperator; import static java.util.Collections.emptyList; -import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; public class MachineLearning extends Plugin implements SystemIndexPlugin, AnalysisPlugin, IngestPlugin, PersistentTaskPlugin { public static final String NAME = "ml"; @@ -524,6 +517,8 @@ public class MachineLearning extends Plugin implements SystemIndexPlugin, Analys return Collections.singletonList(new JobManagerHolder()); } + new MlIndexTemplateRegistry(settings, clusterService, threadPool, client, xContentRegistry); + AnomalyDetectionAuditor anomalyDetectionAuditor = new AnomalyDetectionAuditor(client, clusterService.getNodeName()); DataFrameAnalyticsAuditor dataFrameAnalyticsAuditor = new DataFrameAnalyticsAuditor(client, clusterService.getNodeName()); InferenceAuditor inferenceAuditor = new InferenceAuditor(client, clusterService.getNodeName()); @@ -788,7 +783,8 @@ public class MachineLearning extends Plugin implements SystemIndexPlugin, Analys // CAT Handlers new RestCatJobsAction(), new RestCatTrainedModelsAction(), - new RestCatDatafeedsAction() + new RestCatDatafeedsAction(), + new RestCatDataFrameAnalyticsAction() ); } @@ -896,112 +892,14 @@ public class MachineLearning extends Plugin implements SystemIndexPlugin, Analys @Override public UnaryOperator> getIndexTemplateMetaDataUpgrader() { - return templates -> { - - try (XContentBuilder auditMapping = ElasticsearchMappings.auditMessageMapping()) { - IndexTemplateMetaData notificationMessageTemplate = - IndexTemplateMetaData.builder(AuditorField.NOTIFICATIONS_INDEX) - .putMapping(SINGLE_MAPPING_NAME, Strings.toString(auditMapping)) - .patterns(Collections.singletonList(AuditorField.NOTIFICATIONS_INDEX)) - .version(Version.CURRENT.id) - .settings(Settings.builder() - // Our indexes are small and one shard puts the - // least possible burden on Elasticsearch - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")) - .build(); - templates.put(AuditorField.NOTIFICATIONS_INDEX, notificationMessageTemplate); - } catch (IOException e) { - logger.warn("Error loading the template for the notification message index", e); - } - - try (XContentBuilder docMapping = MlMetaIndex.docMapping()) { - IndexTemplateMetaData metaTemplate = - IndexTemplateMetaData.builder(MlMetaIndex.INDEX_NAME) - .patterns(Collections.singletonList(MlMetaIndex.INDEX_NAME)) - .settings(Settings.builder() - // Our indexes are small and one shard puts the - // least possible burden on Elasticsearch - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")) - .version(Version.CURRENT.id) - .putMapping(SINGLE_MAPPING_NAME, Strings.toString(docMapping)) - .build(); - templates.put(MlMetaIndex.INDEX_NAME, metaTemplate); - } catch (IOException e) { - logger.warn("Error loading the template for the " + MlMetaIndex.INDEX_NAME + " index", e); - } - - try (XContentBuilder configMapping = ElasticsearchMappings.configMapping()) { - IndexTemplateMetaData configTemplate = - IndexTemplateMetaData.builder(AnomalyDetectorsIndex.configIndexName()) - .patterns(Collections.singletonList(AnomalyDetectorsIndex.configIndexName())) - .settings(Settings.builder() - // Our indexes are small and one shard puts the - // least possible burden on Elasticsearch - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1") - .put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), - AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)) - .version(Version.CURRENT.id) - .putMapping(SINGLE_MAPPING_NAME, Strings.toString(configMapping)) - .build(); - templates.put(AnomalyDetectorsIndex.configIndexName(), configTemplate); - } catch (IOException e) { - logger.warn("Error loading the template for the " + AnomalyDetectorsIndex.configIndexName() + " index", e); - } - - try (XContentBuilder stateMapping = ElasticsearchMappings.stateMapping()) { - IndexTemplateMetaData stateTemplate = - IndexTemplateMetaData.builder(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX) - .patterns(Collections.singletonList(AnomalyDetectorsIndex.jobStateIndexPattern())) - // TODO review these settings - .settings(Settings.builder() - .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")) - .putMapping(SINGLE_MAPPING_NAME, Strings.toString(stateMapping)) - .version(Version.CURRENT.id) - .build(); - - templates.put(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX, stateTemplate); - } catch (IOException e) { - logger.error("Error loading the template for the " + AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX + " index", e); - } - - try (XContentBuilder docMapping = ElasticsearchMappings.resultsMapping(SINGLE_MAPPING_NAME)) { - IndexTemplateMetaData jobResultsTemplate = - IndexTemplateMetaData.builder(AnomalyDetectorsIndex.jobResultsIndexPrefix()) - .patterns(Collections.singletonList(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*")) - .settings(Settings.builder() - .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1") - // Sacrifice durability for performance: in the event of power - // failure we can lose the last 5 seconds of changes, but it's - // much faster - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), "async") - // set the default all search field - .put(IndexSettings.DEFAULT_FIELD_SETTING.getKey(), ElasticsearchMappings.ALL_FIELD_VALUES)) - .putMapping(SINGLE_MAPPING_NAME, Strings.toString(docMapping)) - .version(Version.CURRENT.id) - .build(); - templates.put(AnomalyDetectorsIndex.jobResultsIndexPrefix(), jobResultsTemplate); - } catch (IOException e) { - logger.error("Error loading the template for the " + AnomalyDetectorsIndex.jobResultsIndexPrefix() + " indices", e); - } - - try { - templates.put(InferenceIndexConstants.LATEST_INDEX_NAME, InferenceInternalIndex.getIndexTemplateMetaData()); - } catch (IOException e) { - logger.error("Error loading the template for the " + InferenceIndexConstants.LATEST_INDEX_NAME + " index", e); - } - - return templates; - }; + return UnaryOperator.identity(); } public static boolean allTemplatesInstalled(ClusterState clusterState) { boolean allPresent = true; List templateNames = Arrays.asList( - AuditorField.NOTIFICATIONS_INDEX, + NotificationsIndex.NOTIFICATIONS_INDEX, MlMetaIndex.INDEX_NAME, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX, AnomalyDetectorsIndex.jobResultsIndexPrefix(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java index d8407d67b9a..b2bcb43689b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java @@ -32,9 +32,11 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.xpack.core.ml.MlConfigIndex; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; @@ -43,7 +45,6 @@ import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; -import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.ToXContentParams; import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider; @@ -494,7 +495,7 @@ public class MlConfigMigrator { .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW) ); - createIndexRequest.mapping(SINGLE_MAPPING_NAME, ElasticsearchMappings.configMapping()); + createIndexRequest.mapping(SINGLE_MAPPING_NAME, MlConfigIndex.mapping(), XContentType.JSON); } catch (Exception e) { logger.error("error writing the .ml-config mappings", e); listener.onFailure(e); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java new file mode 100644 index 00000000000..be52cc9202c --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml; + +import org.elasticsearch.Version; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.ml.MlConfigIndex; +import org.elasticsearch.xpack.core.ml.MlMetaIndex; +import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; +import org.elasticsearch.xpack.core.template.IndexTemplateConfig; +import org.elasticsearch.xpack.core.template.IndexTemplateRegistry; +import org.elasticsearch.xpack.core.template.LifecyclePolicyConfig; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class MlIndexTemplateRegistry extends IndexTemplateRegistry { + + private static final String ROOT_RESOURCE_PATH = "/org/elasticsearch/xpack/core/ml/"; + private static final String ANOMALY_DETECTION_PATH = ROOT_RESOURCE_PATH + "anomalydetection/"; + private static final String VERSION_PATTERN = "xpack.ml.version"; + private static final String VERSION_ID_PATTERN = "xpack.ml.version.id"; + + private static final IndexTemplateConfig ANOMALY_DETECTION_RESULTS_TEMPLATE = anomalyDetectionResultsTemplate(); + + private static final IndexTemplateConfig ANOMALY_DETECTION_STATE_TEMPLATE = new IndexTemplateConfig( + AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX,ANOMALY_DETECTION_PATH + "state_index_template.json", + Version.CURRENT.id, VERSION_PATTERN, + Collections.singletonMap(VERSION_ID_PATTERN, String.valueOf(Version.CURRENT.id))); + + private static final IndexTemplateConfig META_TEMPLATE = new IndexTemplateConfig(MlMetaIndex.INDEX_NAME, + ROOT_RESOURCE_PATH + "meta_index_template.json", Version.CURRENT.id, VERSION_PATTERN, + Collections.singletonMap(VERSION_ID_PATTERN, String.valueOf(Version.CURRENT.id))); + + private static final IndexTemplateConfig NOTIFICATIONS_TEMPLATE = new IndexTemplateConfig(NotificationsIndex.NOTIFICATIONS_INDEX, + ROOT_RESOURCE_PATH + "notifications_index_template.json", Version.CURRENT.id, VERSION_PATTERN, + Collections.singletonMap(VERSION_ID_PATTERN, String.valueOf(Version.CURRENT.id))); + + private static final IndexTemplateConfig CONFIG_TEMPLATE = configTemplate(); + + private static final IndexTemplateConfig INFERENCE_TEMPLATE = new IndexTemplateConfig(InferenceIndexConstants.LATEST_INDEX_NAME, + ROOT_RESOURCE_PATH + "inference_index_template.json", Version.CURRENT.id, VERSION_PATTERN, + Collections.singletonMap(VERSION_ID_PATTERN, String.valueOf(Version.CURRENT.id))); + + private static IndexTemplateConfig configTemplate() { + Map variables = new HashMap<>(); + variables.put(VERSION_ID_PATTERN, String.valueOf(Version.CURRENT.id)); + variables.put("xpack.ml.config.max_result_window", + String.valueOf(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)); + variables.put("xpack.ml.config.mappings", MlConfigIndex.mapping()); + + return new IndexTemplateConfig(AnomalyDetectorsIndex.configIndexName(), + ROOT_RESOURCE_PATH + "config_index_template.json", + Version.CURRENT.id, VERSION_PATTERN, + variables); + } + + private static IndexTemplateConfig anomalyDetectionResultsTemplate() { + Map variables = new HashMap<>(); + variables.put(VERSION_ID_PATTERN, String.valueOf(Version.CURRENT.id)); + variables.put("xpack.ml.anomalydetection.results.mappings", AnomalyDetectorsIndex.resultsMapping()); + + return new IndexTemplateConfig(AnomalyDetectorsIndex.jobResultsIndexPrefix(), + ANOMALY_DETECTION_PATH + "results_index_template.json", + Version.CURRENT.id, VERSION_PATTERN, + variables); + } + + public MlIndexTemplateRegistry(Settings nodeSettings, ClusterService clusterService, ThreadPool threadPool, Client client, + NamedXContentRegistry xContentRegistry) { + super(nodeSettings, clusterService, threadPool, client, xContentRegistry); + } + + @Override + protected boolean requiresMasterNode() { + return true; + } + + @Override + protected List getTemplateConfigs() { + return Arrays.asList( + ANOMALY_DETECTION_RESULTS_TEMPLATE, + ANOMALY_DETECTION_STATE_TEMPLATE, + CONFIG_TEMPLATE, + INFERENCE_TEMPLATE, + META_TEMPLATE, + NOTIFICATIONS_TEMPLATE + ); + } + + @Override + protected List getPolicyConfigs() { + return Collections.emptyList(); + } + + @Override + protected String getOrigin() { + return ClientHelper.ML_ORIGIN; + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java index 89306c0a60c..2af917157ff 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.common.validation.SourceDestValidator; import org.elasticsearch.xpack.core.ml.MachineLearningField; +import org.elasticsearch.xpack.core.ml.MlConfigIndex; import org.elasticsearch.xpack.core.ml.action.PutDataFrameAnalyticsAction; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.job.messages.Messages; @@ -208,7 +209,7 @@ public class TransportPutDataFrameAnalyticsAction } ElasticsearchMappings.addDocMappingIfMissing( AnomalyDetectorsIndex.configIndexName(), - ElasticsearchMappings::configMapping, + MlConfigIndex::mapping, client, clusterState, ActionListener.wrap( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java index 1179268b152..8f00956949d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java @@ -36,6 +36,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.ml.MlConfigIndex; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.PutDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -210,7 +211,7 @@ public class TransportPutDatafeedAction extends TransportMasterNodeAction createAliasListener = ActionListener.wrap(success -> { + ActionListener indexAndMappingsListener = ActionListener.wrap(success -> { final IndicesAliasesRequest request = client.admin().indices().prepareAliases() .addAlias(indexName, readAliasName, QueryBuilders.termQuery(Job.ID.getPreferredName(), job.getId())) .addAlias(indexName, writeAliasName).request(); @@ -293,54 +294,50 @@ public class JobResultsProvider { if (!state.getMetaData().hasIndex(indexName)) { LOGGER.trace("ES API CALL: create index {}", indexName); CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); - // This assumes the requested mapping will be merged with mappings from the template, - // and may need to be revisited if template merging is ever refactored - try (XContentBuilder termFieldsMapping = ElasticsearchMappings.termFieldsMapping(termFields)) { - createIndexRequest.mapping(SINGLE_MAPPING_NAME, termFieldsMapping); - } executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, createIndexRequest, ActionListener.wrap( - r -> createAliasListener.onResponse(r.isAcknowledged()), + // Add the term field mappings and alias. The complication is that the state at the + // beginning of the operation doesn't have any knowledge of the index, as it's only + // just been created. So we need yet another operation to get the mappings for it. + r -> getLatestIndexMappingsAndAddTerms(indexName, termFields, indexAndMappingsListener), e -> { // Possible that the index was created while the request was executing, // so we need to handle that possibility if (ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException) { - LOGGER.info("Index already exists"); - // Add the term field mappings and alias. The complication is that the state at the - // beginning of the operation doesn't have any knowledge of the index, as it's only - // just been created. So we need yet another operation to get the mappings for it. - getLatestIndexMappings(indexName, ActionListener.wrap( - response -> { - // Expect one index and one type. If this is not the case then it means the - // index has been deleted almost immediately after being created, and this is - // so unlikely that it's reasonable to fail the whole operation. - ImmutableOpenMap indexMappings = - response.getMappings().iterator().next().value; - MappingMetaData typeMappings = indexMappings.iterator().next().value; - addTermsAndAliases(typeMappings, indexName, termFields, createAliasListener); - }, - finalListener::onFailure - )); + LOGGER.info("Index [{}] already exists", indexName); + getLatestIndexMappingsAndAddTerms(indexName, termFields, indexAndMappingsListener); } else { finalListener.onFailure(e); } } ), client.admin().indices()::create); } else { - MappingMetaData mapping = state.metaData().index(indexName).mapping(); - addTermsAndAliases(mapping, indexName, termFields, createAliasListener); + MappingMetaData indexMappings = state.metaData().index(indexName).mapping(); + addTermsMapping(indexMappings, indexName, termFields, indexAndMappingsListener); } } - private void getLatestIndexMappings(final String indexName, final ActionListener listener) { + private void getLatestIndexMappingsAndAddTerms(String indexName, Collection termFields, ActionListener listener) { + + ActionListener getMappingsListener = ActionListener.wrap( + getMappingsResponse -> { + // Expect one index and one type. If this is not the case then it means the + // index has been deleted almost immediately after being created, and this is + // so unlikely that it's reasonable to fail the whole operation. + ImmutableOpenMap indexMappings = getMappingsResponse.getMappings().iterator().next().value; + MappingMetaData typeMappings = indexMappings.iterator().next().value; + addTermsMapping(typeMappings, indexName, termFields, listener); + }, + listener::onFailure + ); GetMappingsRequest getMappingsRequest = client.admin().indices().prepareGetMappings(indexName).request(); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, getMappingsRequest, listener, + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, getMappingsRequest, getMappingsListener, client.admin().indices()::getMappings); } - private void addTermsAndAliases(final MappingMetaData mapping, final String indexName, final Collection termFields, - final ActionListener listener) { + private void addTermsMapping(MappingMetaData mapping, String indexName, Collection termFields, + ActionListener listener) { long fieldCountLimit = MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.get(settings); if (violatedFieldCountLimit(termFields.size(), fieldCountLimit, mapping)) { @@ -380,8 +377,9 @@ public class JobResultsProvider { private void updateIndexMappingWithTermFields(String indexName, String mappingType, Collection termFields, ActionListener listener) { - // Put the whole mapping, not just the term fields, otherwise we'll wipe the _meta section of the mapping - try (XContentBuilder termFieldsMapping = ElasticsearchMappings.resultsMapping(mappingType, termFields)) { + + try (XContentBuilder termFieldsMapping = JsonXContent.contentBuilder()) { + createTermFieldsMapping(termFieldsMapping, mappingType, termFields); final PutMappingRequest request = client.admin().indices().preparePutMapping(indexName) .setType(mappingType) .setSource(termFieldsMapping).request(); @@ -401,6 +399,21 @@ public class JobResultsProvider { } } + // Visible for testing + static void createTermFieldsMapping(XContentBuilder builder, String mappingType, Collection termFields) throws IOException { + builder.startObject(); + builder.startObject(mappingType); + builder.startObject("properties"); + for (String fieldName : termFields) { + if (ReservedFieldNames.isValidFieldName(fieldName)) { + builder.startObject(fieldName).field(ElasticsearchMappings.TYPE, ElasticsearchMappings.KEYWORD).endObject(); + } + } + builder.endObject(); + builder.endObject(); + builder.endObject(); + } + /** * Get the job's data counts * diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index 9ef73319811..9eef65bcbbe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -441,7 +441,7 @@ public class AutodetectProcessManager implements ClusterStateListener { // Try adding the results doc mapping - this updates to the latest version if an old mapping is present ElasticsearchMappings.addDocMappingIfMissing(AnomalyDetectorsIndex.jobResultsAliasedName(jobId), - ElasticsearchMappings::resultsMapping, client, clusterState, resultsMappingUpdateHandler); + AnomalyDetectorsIndex::resultsMapping, client, clusterState, resultsMappingUpdateHandler); } private boolean createProcessAndSetRunning(ProcessContext processContext, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AnomalyDetectionAuditor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AnomalyDetectionAuditor.java index 48c5872b057..6e3af2b8928 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AnomalyDetectionAuditor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AnomalyDetectionAuditor.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.ml.notifications; import org.elasticsearch.client.Client; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditor; -import org.elasticsearch.xpack.core.ml.notifications.AuditorField; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.core.ml.notifications.AnomalyDetectionAuditMessage; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; @@ -15,6 +15,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; public class AnomalyDetectionAuditor extends AbstractAuditor { public AnomalyDetectionAuditor(Client client, String nodeName) { - super(client, nodeName, AuditorField.NOTIFICATIONS_INDEX, ML_ORIGIN, AnomalyDetectionAuditMessage::new); + super(client, nodeName, NotificationsIndex.NOTIFICATIONS_INDEX, ML_ORIGIN, AnomalyDetectionAuditMessage::new); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/DataFrameAnalyticsAuditor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/DataFrameAnalyticsAuditor.java index 1c9be78d241..1acccaafcda 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/DataFrameAnalyticsAuditor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/DataFrameAnalyticsAuditor.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.ml.notifications; import org.elasticsearch.client.Client; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditor; -import org.elasticsearch.xpack.core.ml.notifications.AuditorField; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.core.ml.notifications.DataFrameAnalyticsAuditMessage; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; @@ -15,6 +15,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; public class DataFrameAnalyticsAuditor extends AbstractAuditor { public DataFrameAnalyticsAuditor(Client client, String nodeName) { - super(client, nodeName, AuditorField.NOTIFICATIONS_INDEX, ML_ORIGIN, DataFrameAnalyticsAuditMessage::new); + super(client, nodeName, NotificationsIndex.NOTIFICATIONS_INDEX, ML_ORIGIN, DataFrameAnalyticsAuditMessage::new); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/InferenceAuditor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/InferenceAuditor.java index dfce44af7c9..2be3e76e85b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/InferenceAuditor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/InferenceAuditor.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.ml.notifications; import org.elasticsearch.client.Client; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditor; -import org.elasticsearch.xpack.core.ml.notifications.AuditorField; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.core.ml.notifications.InferenceAuditMessage; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; @@ -15,6 +15,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; public class InferenceAuditor extends AbstractAuditor { public InferenceAuditor(Client client, String nodeName) { - super(client, nodeName, AuditorField.NOTIFICATIONS_INDEX, ML_ORIGIN, InferenceAuditMessage::new); + super(client, nodeName, NotificationsIndex.NOTIFICATIONS_INDEX, ML_ORIGIN, InferenceAuditMessage::new); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java index 11ca99d9d3a..e6eb50f8b04 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -22,9 +20,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestDeleteExpiredDataAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestDeleteExpiredDataAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -35,8 +30,7 @@ public class RestDeleteExpiredDataAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(DELETE, MachineLearning.BASE_PATH + "_delete_expired_data", - DELETE, MachineLearning.PRE_V7_BASE_PATH + "_delete_expired_data", - deprecationLogger) + DELETE, MachineLearning.PRE_V7_BASE_PATH + "_delete_expired_data") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java index 94e7ffc15c6..3ac75f7970a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.ml.rest; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -30,9 +28,6 @@ public class RestFindFileStructureAction extends BaseRestHandler { private static final TimeValue DEFAULT_TIMEOUT = new TimeValue(25, TimeUnit.SECONDS); - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestFindFileStructureAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -43,8 +38,7 @@ public class RestFindFileStructureAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "find_file_structure", - POST, MachineLearning.PRE_V7_BASE_PATH + "find_file_structure", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "find_file_structure") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlInfoAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlInfoAction.java index c29b3ad6850..0d9c9fed19f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlInfoAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlInfoAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -22,9 +20,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestMlInfoAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestMlInfoAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -35,8 +30,7 @@ public class RestMlInfoAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(GET, MachineLearning.BASE_PATH + "info", - GET, MachineLearning.PRE_V7_BASE_PATH + "info", - deprecationLogger) + GET, MachineLearning.PRE_V7_BASE_PATH + "info") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java index 31bc22e99c4..7b9654db228 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -22,9 +20,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestSetUpgradeModeAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestSetUpgradeModeAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -35,8 +30,7 @@ public class RestSetUpgradeModeAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "set_upgrade_mode", - POST, MachineLearning.PRE_V7_BASE_PATH + "set_upgrade_mode", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "set_upgrade_mode") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java index 58f5fff7b44..c076ee44ce6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.calendar; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -23,9 +21,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestDeleteCalendarAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestDeleteCalendarAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -36,7 +31,7 @@ public class RestDeleteCalendarAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(DELETE, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", - DELETE, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", deprecationLogger) + DELETE, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarEventAction.java index 9fbc3b3ba50..4747f2d6405 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarEventAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.calendar; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestDeleteCalendarEventAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestDeleteCalendarEventAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -39,7 +34,7 @@ public class RestDeleteCalendarEventAction extends BaseRestHandler { new ReplacedRoute(DELETE, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events/{" + ScheduledEvent.EVENT_ID.getPreferredName() + "}", DELETE, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events/{" + - ScheduledEvent.EVENT_ID.getPreferredName() + "}", deprecationLogger) + ScheduledEvent.EVENT_ID.getPreferredName() + "}") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarJobAction.java index 61fbda4f3f2..e40f4b684a5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarJobAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.calendar; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestDeleteCalendarJobAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestDeleteCalendarJobAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -39,7 +34,7 @@ public class RestDeleteCalendarJobAction extends BaseRestHandler { new ReplacedRoute(DELETE, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/jobs/{" + Job.ID.getPreferredName() + "}", DELETE, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/jobs/{" + - Job.ID.getPreferredName() + "}", deprecationLogger) + Job.ID.getPreferredName() + "}") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarEventsAction.java index c3baeebfbee..223bf7de175 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarEventsAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.calendar; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -26,9 +24,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetCalendarEventsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetCalendarEventsAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -39,7 +34,7 @@ public class RestGetCalendarEventsAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(GET, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events", - GET, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events", deprecationLogger) + GET, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java index eb2e74b309b..f0bb5ac9917 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.ml.rest.calendar; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -28,9 +26,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestGetCalendarsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetCalendarsAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -41,13 +36,13 @@ public class RestGetCalendarsAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( new ReplacedRoute(GET, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", - GET, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", deprecationLogger), + GET, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}"), new ReplacedRoute(GET, MachineLearning.BASE_PATH + "calendars/", - GET, MachineLearning.PRE_V7_BASE_PATH + "calendars/", deprecationLogger), + GET, MachineLearning.PRE_V7_BASE_PATH + "calendars/"), new ReplacedRoute(POST, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", - POST, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", deprecationLogger), + POST, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}"), new ReplacedRoute(POST, MachineLearning.BASE_PATH + "calendars/", - POST, MachineLearning.PRE_V7_BASE_PATH + "calendars/", deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "calendars/") )); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPostCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPostCalendarEventAction.java index dfab91e85cb..6ff5332c455 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPostCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPostCalendarEventAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.calendar; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestPostCalendarEventAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestPostCalendarEventAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -37,7 +32,7 @@ public class RestPostCalendarEventAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events", - POST, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events", deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java index caba1a115cf..4d2fa79f93f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.calendar; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestPutCalendarAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestPutCalendarAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -37,7 +32,7 @@ public class RestPutCalendarAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(PUT, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", - PUT, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", deprecationLogger) + PUT, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarJobAction.java index 4c8709742aa..43d20b54314 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarJobAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.calendar; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestPutCalendarJobAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestPutCalendarJobAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -40,8 +35,7 @@ public class RestPutCalendarJobAction extends BaseRestHandler { MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/jobs/{" + Job.ID.getPreferredName() + "}", PUT, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/jobs/{" + - Job.ID.getPreferredName() + "}", - deprecationLogger) + Job.ID.getPreferredName() + "}") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDataFrameAnalyticsAction.java new file mode 100644 index 00000000000..87a503159b9 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDataFrameAnalyticsAction.java @@ -0,0 +1,196 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.rest.cat; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.Table; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.action.RestActionListener; +import org.elasticsearch.rest.action.RestResponseListener; +import org.elasticsearch.rest.action.cat.AbstractCatAction; +import org.elasticsearch.rest.action.cat.RestTable; +import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsAction; +import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsStatsAction; +import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsStatsAction.Response.Stats; +import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; +import org.elasticsearch.xpack.core.ml.utils.PhaseProgress; + +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +import static java.util.Arrays.asList; +import static java.util.Collections.unmodifiableList; +import static java.util.stream.Collectors.joining; +import static java.util.stream.Collectors.toMap; +import static org.elasticsearch.rest.RestRequest.Method.GET; + +public class RestCatDataFrameAnalyticsAction extends AbstractCatAction { + + @Override + public List routes() { + return unmodifiableList(asList( + new Route(GET, "_cat/ml/data_frame/analytics/{" + DataFrameAnalyticsConfig.ID.getPreferredName() + "}"), + new Route(GET, "_cat/ml/data_frame/analytics"))); + } + + @Override + public String getName() { + return "cat_ml_get_data_frame_analytics_action"; + } + + @Override + protected RestChannelConsumer doCatRequest(RestRequest restRequest, NodeClient client) { + String dataFrameAnalyticsId = restRequest.param(DataFrameAnalyticsConfig.ID.getPreferredName()); + if (Strings.isNullOrEmpty(dataFrameAnalyticsId)) { + dataFrameAnalyticsId = MetaData.ALL; + } + + GetDataFrameAnalyticsAction.Request getRequest = new GetDataFrameAnalyticsAction.Request(dataFrameAnalyticsId); + getRequest.setAllowNoResources( + restRequest.paramAsBoolean( + GetDataFrameAnalyticsAction.Request.ALLOW_NO_MATCH.getPreferredName(), getRequest.isAllowNoResources())); + + GetDataFrameAnalyticsStatsAction.Request getStatsRequest = new GetDataFrameAnalyticsStatsAction.Request(dataFrameAnalyticsId); + getStatsRequest.setAllowNoMatch(true); + + return channel -> client.execute( + GetDataFrameAnalyticsAction.INSTANCE, getRequest, new RestActionListener(channel) { + @Override + public void processResponse(GetDataFrameAnalyticsAction.Response getResponse) { + client.execute( + GetDataFrameAnalyticsStatsAction.INSTANCE, + getStatsRequest, + new RestResponseListener(channel) { + @Override + public RestResponse buildResponse(GetDataFrameAnalyticsStatsAction.Response getStatsResponse) throws Exception { + return RestTable.buildResponse(buildTable(getResponse, getStatsResponse), channel); + } + }); + } + }); + } + + @Override + protected void documentation(StringBuilder sb) { + sb.append("/_cat/ml/data_frame/analytics\n"); + sb.append("/_cat/ml/data_frame/analytics/{").append(DataFrameAnalyticsConfig.ID.getPreferredName()).append("}\n"); + } + + @Override + protected Table getTableWithHeader(RestRequest unused) { + return getTableWithHeader(); + } + + private static Table getTableWithHeader() { + return new Table() + .startHeaders() + // DFA config info + .addCell("id", TableColumnAttributeBuilder.builder("the id").build()) + .addCell("type", + TableColumnAttributeBuilder.builder("analysis type") + .setAliases("t") + .build()) + .addCell("create_time", + TableColumnAttributeBuilder.builder("job creation time") + .setAliases("ct", "createTime") + .build()) + .addCell("version", + TableColumnAttributeBuilder.builder("the version of Elasticsearch when the analytics was created", false) + .setAliases("v") + .build()) + .addCell("source_index", + TableColumnAttributeBuilder.builder("source index", false) + .setAliases("si", "sourceIndex") + .build()) + .addCell("dest_index", + TableColumnAttributeBuilder.builder("destination index", false) + .setAliases("di", "destIndex") + .build()) + .addCell("description", + TableColumnAttributeBuilder.builder("description", false) + .setAliases("d") + .build()) + .addCell("model_memory_limit", + TableColumnAttributeBuilder.builder("model memory limit", false) + .setAliases("mml", "modelMemoryLimit") + .build()) + // DFA stats info + .addCell("state", + TableColumnAttributeBuilder.builder("job state") + .setAliases("s") + .setTextAlignment(TableColumnAttributeBuilder.TextAlign.RIGHT) + .build()) + .addCell("failure_reason", + TableColumnAttributeBuilder.builder("failure reason", false) + .setAliases("fr", "failureReason") + .build()) + .addCell("progress", + TableColumnAttributeBuilder.builder("progress", false) + .setAliases("p") + .build()) + .addCell("assignment_explanation", + TableColumnAttributeBuilder.builder("why the job is or is not assigned to a node", false) + .setAliases("ae", "assignmentExplanation") + .build()) + // Node info + .addCell("node.id", + TableColumnAttributeBuilder.builder("id of the assigned node", false) + .setAliases("ni", "nodeId") + .build()) + .addCell("node.name", + TableColumnAttributeBuilder.builder("name of the assigned node", false) + .setAliases("nn", "nodeName") + .build()) + .addCell("node.ephemeral_id", + TableColumnAttributeBuilder.builder("ephemeral id of the assigned node", false) + .setAliases("ne", "nodeEphemeralId") + .build()) + .addCell("node.address", + TableColumnAttributeBuilder.builder("network address of the assigned node", false) + .setAliases("na", "nodeAddress") + .build()) + .endHeaders(); + } + + private static Table buildTable(GetDataFrameAnalyticsAction.Response getResponse, + GetDataFrameAnalyticsStatsAction.Response getStatsResponse) { + Map statsById = getStatsResponse.getResponse().results().stream().collect(toMap(Stats::getId, Function.identity())); + Table table = getTableWithHeader(); + for (DataFrameAnalyticsConfig config : getResponse.getResources().results()) { + Stats stats = statsById.get(config.getId()); + DiscoveryNode node = stats == null ? null : stats.getNode(); + table + .startRow() + .addCell(config.getId()) + .addCell(config.getAnalysis().getWriteableName()) + .addCell(config.getCreateTime()) + .addCell(config.getVersion()) + .addCell(String.join(",", config.getSource().getIndex())) + .addCell(config.getDest().getIndex()) + .addCell(config.getDescription()) + .addCell(config.getModelMemoryLimit()) + .addCell(stats == null ? null : stats.getState()) + .addCell(stats == null ? null : stats.getFailureReason()) + .addCell(stats == null ? null : progressToString(stats.getProgress())) + .addCell(stats == null ? null : stats.getAssignmentExplanation()) + .addCell(node == null ? null : node.getId()) + .addCell(node == null ? null : node.getName()) + .addCell(node == null ? null : node.getEphemeralId()) + .addCell(node == null ? null : node.getAddress().toString()) + .endRow(); + } + return table; + } + + private static String progressToString(List phases) { + return phases.stream().map(p -> p.getPhase() + ":" + p.getProgressPercent()).collect(joining(",")); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDatafeedsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDatafeedsAction.java index 00114f4d30d..0515bad3ce5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDatafeedsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDatafeedsAction.java @@ -70,12 +70,12 @@ public class RestCatDatafeedsAction extends AbstractCatAction { table.startHeaders(); // Datafeed Info - table.addCell("id", TableColumnAttributeBuilder.builder().setDescription("the datafeed_id").build()); - table.addCell("state", TableColumnAttributeBuilder.builder() - .setDescription("the datafeed state") - .setAliases("s") - .setTextAlignment(TableColumnAttributeBuilder.TextAlign.RIGHT) - .build()); + table.addCell("id", TableColumnAttributeBuilder.builder("the datafeed_id").build()); + table.addCell("state", + TableColumnAttributeBuilder.builder("the datafeed state") + .setAliases("s") + .setTextAlignment(TableColumnAttributeBuilder.TextAlign.RIGHT) + .build()); table.addCell("assignment_explanation", TableColumnAttributeBuilder.builder("why the datafeed is or is not assigned to a node", false) .setAliases("ae") diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatJobsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatJobsAction.java index 3e0eb12846c..64cd39edf30 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatJobsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatJobsAction.java @@ -75,17 +75,15 @@ public class RestCatJobsAction extends AbstractCatAction { table.startHeaders(); // Job Info - table.addCell("id", TableColumnAttributeBuilder.builder().setDescription("the job_id").build()); - table.addCell("state", TableColumnAttributeBuilder.builder() - .setDescription("the job state") - .setAliases("s") - .setTextAlignment(TableColumnAttributeBuilder.TextAlign.RIGHT) - .build()); + table.addCell("id", TableColumnAttributeBuilder.builder("the job_id").build()); + table.addCell("state", + TableColumnAttributeBuilder.builder("the job state") + .setAliases("s") + .setTextAlignment(TableColumnAttributeBuilder.TextAlign.RIGHT) + .build()); table.addCell("opened_time", - TableColumnAttributeBuilder.builder() - .setDescription("the amount of time the job has been opened") + TableColumnAttributeBuilder.builder("the amount of time the job has been opened", false) .setAliases("ot") - .setDisplayByDefault(false) .build()); table.addCell("assignment_explanation", TableColumnAttributeBuilder.builder("why the job is or is not assigned to a node", false) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatTrainedModelsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatTrainedModelsAction.java index 8a45b462ad0..fad00d3b0bd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatTrainedModelsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatTrainedModelsAction.java @@ -130,17 +130,15 @@ public class RestCatTrainedModelsAction extends AbstractCatAction { table.startHeaders(); // Trained Model Info - table.addCell("id", TableColumnAttributeBuilder.builder().setDescription("the trained model id").build()); + table.addCell("id", TableColumnAttributeBuilder.builder("the trained model id").build()); table.addCell("created_by", TableColumnAttributeBuilder.builder("who created the model", false) .setAliases("c", "createdBy") .setTextAlignment(TableColumnAttributeBuilder.TextAlign.RIGHT) .build()); - table.addCell("heap_size", TableColumnAttributeBuilder.builder() - .setDescription("the estimated heap size to keep the model in memory") + table.addCell("heap_size", TableColumnAttributeBuilder.builder("the estimated heap size to keep the model in memory") .setAliases("hs","modelHeapSize") .build()); - table.addCell("operations", TableColumnAttributeBuilder.builder() - .setDescription("the estimated number of operations to use the model") + table.addCell("operations", TableColumnAttributeBuilder.builder("the estimated number of operations to use the model") .setAliases("o", "modelOperations") .build()); table.addCell("license", TableColumnAttributeBuilder.builder("The license level of the model", false) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java index e692688e30c..1294906f0dc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.datafeeds; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestDeleteDatafeedAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestDeleteDatafeedAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -37,7 +32,7 @@ public class RestDeleteDatafeedAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(DELETE, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", - DELETE, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", deprecationLogger) + DELETE, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java index 3a617808d4f..8bb02d280db 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.ml.rest.datafeeds; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -25,9 +23,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetDatafeedStatsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetDatafeedStatsAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -38,10 +33,9 @@ public class RestGetDatafeedStatsAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( new ReplacedRoute(GET, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stats", - GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stats", - deprecationLogger), + GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stats"), new ReplacedRoute(GET, MachineLearning.BASE_PATH + "datafeeds/_stats", - GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/_stats", deprecationLogger) + GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/_stats") )); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java index 19a4de3dd14..5f3f0ea4d98 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.datafeeds; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetDatafeedsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetDatafeedsAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -37,9 +32,9 @@ public class RestGetDatafeedsAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( new ReplacedRoute(GET, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", - GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", deprecationLogger), + GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}"), new ReplacedRoute(GET, MachineLearning.BASE_PATH + "datafeeds", - GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds", deprecationLogger) + GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds") )); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPreviewDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPreviewDatafeedAction.java index 85b28bbc143..7df1b79e266 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPreviewDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPreviewDatafeedAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.datafeeds; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -23,9 +21,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestPreviewDatafeedAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestPreviewDatafeedAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -36,8 +31,7 @@ public class RestPreviewDatafeedAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(GET, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_preview", - GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_preview", - deprecationLogger) + GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_preview") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java index bff6f803ffb..8b71e137bea 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.datafeeds; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestPutDatafeedAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestPutDatafeedAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -37,8 +32,7 @@ public class RestPutDatafeedAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(PUT, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", - PUT, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", - deprecationLogger) + PUT, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedAction.java index 2e61d71b29c..23649cdf055 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.ml.rest.datafeeds; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -32,9 +30,6 @@ public class RestStartDatafeedAction extends BaseRestHandler { private static final String DEFAULT_START = "0"; - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestStartDatafeedAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -45,8 +40,7 @@ public class RestStartDatafeedAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_start", - POST, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_start", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_start") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java index efbc6d01a89..ea5035fbffe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.datafeeds; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -30,9 +28,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestStopDatafeedAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestStopDatafeedAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -43,8 +38,7 @@ public class RestStopDatafeedAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stop", - POST, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stop", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stop") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java index 6c4e03f39f9..0375be2907b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.datafeeds; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestUpdateDatafeedAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestUpdateDatafeedAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -37,8 +32,7 @@ public class RestUpdateDatafeedAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_update", - POST, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_update", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_update") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java index bb94ffcda4f..9ada7999a9f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.filter; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -23,9 +21,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestDeleteFilterAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestDeleteFilterAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -36,8 +31,7 @@ public class RestDeleteFilterAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(DELETE, MachineLearning.BASE_PATH + "filters/{" + Request.FILTER_ID.getPreferredName() + "}", - DELETE, MachineLearning.PRE_V7_BASE_PATH + "filters/{" + Request.FILTER_ID.getPreferredName() + "}", - deprecationLogger) + DELETE, MachineLearning.PRE_V7_BASE_PATH + "filters/{" + Request.FILTER_ID.getPreferredName() + "}") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java index 421af0bdee6..2752adfdd5f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.ml.rest.filter; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestStatusToXContentListener; @@ -26,9 +24,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetFiltersAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetFiltersAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -39,11 +34,9 @@ public class RestGetFiltersAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( new ReplacedRoute(GET, MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}", - GET, MachineLearning.PRE_V7_BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}", - deprecationLogger), + GET, MachineLearning.PRE_V7_BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}"), new ReplacedRoute(GET, MachineLearning.BASE_PATH + "filters/", - GET, MachineLearning.PRE_V7_BASE_PATH + "filters/", - deprecationLogger) + GET, MachineLearning.PRE_V7_BASE_PATH + "filters/") )); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestPutFilterAction.java index 85a01ac0edf..2ec99070aae 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestPutFilterAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.filter; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestPutFilterAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestPutFilterAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -37,8 +32,7 @@ public class RestPutFilterAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(PUT, MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}", - PUT, MachineLearning.PRE_V7_BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}", - deprecationLogger) + PUT, MachineLearning.PRE_V7_BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java index 6bf83acdc5e..9a5aa4b46e3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.filter; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestUpdateFilterAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestUpdateFilterAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -37,8 +32,7 @@ public class RestUpdateFilterAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}/_update", - POST, MachineLearning.PRE_V7_BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}/_update", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}/_update") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java index 0b8ed5a9e37..9e343bebcc0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.job; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -25,9 +23,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestCloseJobAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestCloseJobAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -38,7 +33,7 @@ public class RestCloseJobAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_close", - POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_close", deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_close") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java index 093b062dda1..2478e8169a2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.ml.rest.job; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -25,9 +23,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestDeleteForecastAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestDeleteForecastAction.class)); - @Override public List routes() { return singletonList( @@ -41,7 +36,7 @@ public class RestDeleteForecastAction extends BaseRestHandler { new ReplacedRoute(DELETE, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_forecast/{" + Forecast.FORECAST_ID.getPreferredName() + "}", DELETE, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + - "}/_forecast/{" + Forecast.FORECAST_ID.getPreferredName() + "}", deprecationLogger) + "}/_forecast/{" + Forecast.FORECAST_ID.getPreferredName() + "}") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java index 03515a40a7c..94b41cf7693 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.job; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -29,9 +27,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestDeleteJobAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestDeleteJobAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -42,7 +37,7 @@ public class RestDeleteJobAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(DELETE, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", - DELETE, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", deprecationLogger) + DELETE, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java index 17c3ef60144..606e8d341e5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.job; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -30,9 +28,6 @@ public class RestFlushJobAction extends BaseRestHandler { private static final String DEFAULT_ADVANCE_TIME = ""; private static final String DEFAULT_SKIP_TIME = ""; - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestFlushJobAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -43,8 +38,7 @@ public class RestFlushJobAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_flush", - POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_flush", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_flush") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestForecastJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestForecastJobAction.java index 5d0aee2f4db..38e10409c5a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestForecastJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestForecastJobAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.job; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestForecastJobAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestForecastJobAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -37,8 +32,7 @@ public class RestForecastJobAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_forecast", - POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_forecast", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_forecast") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java index caf3402db06..22060cab8cd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java @@ -5,11 +5,9 @@ */ package org.elasticsearch.xpack.ml.rest.job; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -26,9 +24,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetJobStatsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetJobStatsAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -39,10 +34,9 @@ public class RestGetJobStatsAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( new ReplacedRoute(GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_stats", - GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_stats", - deprecationLogger), + GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_stats"), new ReplacedRoute(GET, MachineLearning.BASE_PATH + "anomaly_detectors/_stats", - GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/_stats", deprecationLogger) + GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/_stats") )); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java index b980646a045..0b0d8f41811 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java @@ -5,11 +5,9 @@ */ package org.elasticsearch.xpack.ml.rest.job; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -26,9 +24,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetJobsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetJobsAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -39,10 +34,9 @@ public class RestGetJobsAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( new ReplacedRoute(GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", - GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", - deprecationLogger), + GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}"), new ReplacedRoute(GET, MachineLearning.BASE_PATH + "anomaly_detectors", - GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors", deprecationLogger) + GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors") )); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestOpenJobAction.java index 21b30539ade..631c1aa526a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestOpenJobAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.ml.rest.job; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; @@ -29,9 +27,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestOpenJobAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestOpenJobAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -42,8 +37,7 @@ public class RestOpenJobAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_open", - POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_open", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_open") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java index 26d79114fa4..01ac45eb4f2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.job; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestStatusToXContentListener; @@ -26,9 +24,6 @@ public class RestPostDataAction extends BaseRestHandler { private static final String DEFAULT_RESET_START = ""; private static final String DEFAULT_RESET_END = ""; - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestPostDataAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -39,8 +34,7 @@ public class RestPostDataAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_data", - POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_data", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_data") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java index a3e69048f72..14756178ea9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.job; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestPostJobUpdateAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestPostJobUpdateAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -37,8 +32,7 @@ public class RestPostJobUpdateAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_update", - POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_update", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_update") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java index 20667e299e4..af0f7cc5102 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.job; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestPutJobAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestPutJobAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -37,8 +32,7 @@ public class RestPutJobAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(PUT, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", - PUT, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", - deprecationLogger) + PUT, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java index 75fb5b474b5..02920345e84 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.modelsnapshots; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -24,9 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestDeleteModelSnapshotAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestDeleteModelSnapshotAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -39,8 +34,7 @@ public class RestDeleteModelSnapshotAction extends BaseRestHandler { new ReplacedRoute(DELETE, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/model_snapshots/{" + ModelSnapshotField.SNAPSHOT_ID.getPreferredName() + "}", DELETE, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + - "}/model_snapshots/{" + ModelSnapshotField.SNAPSHOT_ID.getPreferredName() + "}", - deprecationLogger) + "}/model_snapshots/{" + ModelSnapshotField.SNAPSHOT_ID.getPreferredName() + "}") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java index 24b9e32492a..b9e510af4fc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.modelsnapshots; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -38,9 +36,6 @@ public class RestGetModelSnapshotsAction extends BaseRestHandler { private static final String DEFAULT_END = null; private static final boolean DEFAULT_DESC_ORDER = true; - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetModelSnapshotsAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -53,23 +48,19 @@ public class RestGetModelSnapshotsAction extends BaseRestHandler { new ReplacedRoute(GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/model_snapshots/{" + Request.SNAPSHOT_ID.getPreferredName() + "}", GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" - + Job.ID.getPreferredName() + "}/model_snapshots/{" + Request.SNAPSHOT_ID.getPreferredName() + "}", - deprecationLogger), + + Job.ID.getPreferredName() + "}/model_snapshots/{" + Request.SNAPSHOT_ID.getPreferredName() + "}"), new ReplacedRoute(POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/model_snapshots/{" + Request.SNAPSHOT_ID.getPreferredName() + "}", POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" - + Job.ID.getPreferredName() + "}/model_snapshots/{" + Request.SNAPSHOT_ID.getPreferredName() + "}", - deprecationLogger), + + Job.ID.getPreferredName() + "}/model_snapshots/{" + Request.SNAPSHOT_ID.getPreferredName() + "}"), new ReplacedRoute(GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/model_snapshots", GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" - + Job.ID.getPreferredName() + "}/model_snapshots", - deprecationLogger), + + Job.ID.getPreferredName() + "}/model_snapshots"), new ReplacedRoute(POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/model_snapshots", POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" - + Job.ID.getPreferredName() + "}/model_snapshots", - deprecationLogger) + + Job.ID.getPreferredName() + "}/model_snapshots") )); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java index 2fe0c0eecd0..d01ba85bcce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.modelsnapshots; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -26,9 +24,6 @@ public class RestRevertModelSnapshotAction extends BaseRestHandler { private static final boolean DELETE_INTERVENING_DEFAULT = false; - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestRevertModelSnapshotAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -42,8 +37,7 @@ public class RestRevertModelSnapshotAction extends BaseRestHandler { POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/model_snapshots/{" + RevertModelSnapshotAction.Request.SNAPSHOT_ID.getPreferredName() + "}/_revert", POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/model_snapshots/{" + - RevertModelSnapshotAction.Request.SNAPSHOT_ID.getPreferredName() + "}/_revert", - deprecationLogger) + RevertModelSnapshotAction.Request.SNAPSHOT_ID.getPreferredName() + "}/_revert") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java index 1a3b43a03b3..9f1db32b327 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.modelsnapshots; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -25,9 +23,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestUpdateModelSnapshotAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestUpdateModelSnapshotAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -40,8 +35,7 @@ public class RestUpdateModelSnapshotAction extends BaseRestHandler { new ReplacedRoute(POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/model_snapshots/{" + ModelSnapshotField.SNAPSHOT_ID +"}/_update", POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" - + Job.ID.getPreferredName() + "}/model_snapshots/{" + ModelSnapshotField.SNAPSHOT_ID +"}/_update", - deprecationLogger) + + Job.ID.getPreferredName() + "}/model_snapshots/{" + ModelSnapshotField.SNAPSHOT_ID +"}/_update") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetBucketsAction.java index ecf94c1fe22..6a776eb111c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetBucketsAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.ml.rest.results; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -29,9 +27,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestGetBucketsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetBucketsAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -44,23 +39,19 @@ public class RestGetBucketsAction extends BaseRestHandler { new ReplacedRoute(GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/buckets/{" + Result.TIMESTAMP.getPreferredName() + "}", GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() - + "}/results/buckets/{" + Result.TIMESTAMP.getPreferredName() + "}", - deprecationLogger), + + "}/results/buckets/{" + Result.TIMESTAMP.getPreferredName() + "}"), new ReplacedRoute(POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/buckets/{" + Result.TIMESTAMP.getPreferredName() + "}", POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() - + "}/results/buckets/{" + Result.TIMESTAMP.getPreferredName() + "}", - deprecationLogger), + + "}/results/buckets/{" + Result.TIMESTAMP.getPreferredName() + "}"), new ReplacedRoute(GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/buckets", GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() - + "}/results/buckets", - deprecationLogger), + + "}/results/buckets"), new ReplacedRoute(POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/buckets", POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() - + "}/results/buckets", - deprecationLogger) + + "}/results/buckets") )); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java index 55a4fbffd73..2cb15f8a49e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.results; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -28,9 +26,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestGetCategoriesAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetCategoriesAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -44,21 +39,17 @@ public class RestGetCategoriesAction extends BaseRestHandler { GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/categories/{" + Request.CATEGORY_ID.getPreferredName() + "}", GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/categories/{" - + Request.CATEGORY_ID.getPreferredName() + "}", - deprecationLogger), + + Request.CATEGORY_ID.getPreferredName() + "}"), new ReplacedRoute(POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/categories/{" + Request.CATEGORY_ID.getPreferredName() + "}", POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + - "}/results/categories/{" + Request.CATEGORY_ID.getPreferredName() + "}", - deprecationLogger), + "}/results/categories/{" + Request.CATEGORY_ID.getPreferredName() + "}"), new ReplacedRoute( GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/categories", - GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/categories", - deprecationLogger), + GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/categories"), new ReplacedRoute( POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/categories", - POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/categories", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/categories") )); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetInfluencersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetInfluencersAction.java index 639ba3f17f3..e191f935bd4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetInfluencersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetInfluencersAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.results; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -27,9 +25,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestGetInfluencersAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetInfluencersAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -41,12 +36,10 @@ public class RestGetInfluencersAction extends BaseRestHandler { return Collections.unmodifiableList(Arrays.asList( new ReplacedRoute( GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/influencers", - GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/influencers", - deprecationLogger), + GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/influencers"), new ReplacedRoute( POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/influencers", - POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/influencers", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/influencers") )); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java index 541fac368e1..03d17d1a2e2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.results; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -27,9 +25,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestGetOverallBucketsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetOverallBucketsAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -41,12 +36,10 @@ public class RestGetOverallBucketsAction extends BaseRestHandler { return Collections.unmodifiableList(Arrays.asList( new ReplacedRoute( GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/overall_buckets", - GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/overall_buckets", - deprecationLogger), + GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/overall_buckets"), new ReplacedRoute( POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/overall_buckets", - POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/overall_buckets", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/overall_buckets") )); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetRecordsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetRecordsAction.java index 738da300e7d..c8207b1dc08 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetRecordsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetRecordsAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.results; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -27,9 +25,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestGetRecordsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetRecordsAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -41,12 +36,10 @@ public class RestGetRecordsAction extends BaseRestHandler { return Collections.unmodifiableList(Arrays.asList( new ReplacedRoute( GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/records", - GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/records", - deprecationLogger), + GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/records"), new ReplacedRoute( POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/records", - POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/records", - deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/records") )); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateDetectorAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateDetectorAction.java index 07dda46e4ca..0f6e86c0b66 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateDetectorAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateDetectorAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.validate; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -23,9 +21,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestValidateDetectorAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestValidateDetectorAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -36,7 +31,7 @@ public class RestValidateDetectorAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "anomaly_detectors/_validate/detector", - POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/_validate/detector", deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/_validate/detector") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateJobConfigAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateJobConfigAction.java index aa96b3c591f..c0665ffc469 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateJobConfigAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateJobConfigAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ml.rest.validate; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -23,9 +21,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestValidateJobConfigAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestValidateJobConfigAction.class)); - @Override public List routes() { return Collections.emptyList(); @@ -36,7 +31,7 @@ public class RestValidateJobConfigAction extends BaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, MachineLearning.BASE_PATH + "anomaly_detectors/_validate", - POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/_validate", deprecationLogger) + POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/_validate") ); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java index 2809d7e57ef..94968422708 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java @@ -47,7 +47,7 @@ import org.elasticsearch.xpack.core.ml.job.config.Operator; import org.elasticsearch.xpack.core.ml.job.config.RuleCondition; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; -import org.elasticsearch.xpack.core.ml.notifications.AuditorField; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.JobNodeSelector; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; @@ -235,7 +235,7 @@ public class TransportOpenJobActionTests extends ESTestCase { indices.add(AnomalyDetectorsIndex.configIndexName()); indices.add(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX); indices.add(MlMetaIndex.INDEX_NAME); - indices.add(AuditorField.NOTIFICATIONS_INDEX); + indices.add(NotificationsIndex.NOTIFICATIONS_INDEX); indices.add(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); for (String indexName : indices) { IndexMetaData.Builder indexMetaData = IndexMetaData.builder(indexName); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java index c7b7faa8175..3fa6a8721bd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java @@ -5,8 +5,11 @@ */ package org.elasticsearch.xpack.ml.integration; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; @@ -15,6 +18,7 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.OriginSettingClient; +import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; @@ -32,10 +36,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.PutJobAction; -import org.elasticsearch.xpack.core.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.calendars.Calendar; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; @@ -78,8 +82,13 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.isIn; import static org.hamcrest.Matchers.not; @@ -116,6 +125,73 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase { waitForMlTemplates(); } + public void testPutJob_CreatesResultsIndex() { + + Job.Builder job1 = new Job.Builder("first_job"); + job1.setAnalysisConfig(createAnalysisConfig("by_field_1", Collections.emptyList())); + job1.setDataDescription(new DataDescription.Builder()); + + // Put fist job. This should create the results index as it's the first job. + client().execute(PutJobAction.INSTANCE, new PutJobAction.Request(job1)).actionGet(); + + String sharedResultsIndex = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; + Map mappingProperties = getIndexMappingProperties(sharedResultsIndex); + + // Assert mappings have a few fields from the template + assertThat(mappingProperties.keySet(), hasItems("anomaly_score", "bucket_count")); + // Assert mappings have the by field + assertThat(mappingProperties.keySet(), hasItem("by_field_1")); + + // Check aliases have been created + assertThat(getAliases(sharedResultsIndex), containsInAnyOrder(AnomalyDetectorsIndex.jobResultsAliasedName(job1.getId()), + AnomalyDetectorsIndex.resultsWriteAlias(job1.getId()))); + + // Now let's create a second job to test things work when the index exists already + assertThat(mappingProperties.keySet(), not(hasItem("by_field_2"))); + + Job.Builder job2 = new Job.Builder("second_job"); + job2.setAnalysisConfig(createAnalysisConfig("by_field_2", Collections.emptyList())); + job2.setDataDescription(new DataDescription.Builder()); + + client().execute(PutJobAction.INSTANCE, new PutJobAction.Request(job2)).actionGet(); + + mappingProperties = getIndexMappingProperties(sharedResultsIndex); + + // Assert mappings have a few fields from the template + assertThat(mappingProperties.keySet(), hasItems("anomaly_score", "bucket_count")); + // Assert mappings have the by field + assertThat(mappingProperties.keySet(), hasItems("by_field_1", "by_field_2")); + + // Check aliases have been created + assertThat(getAliases(sharedResultsIndex), containsInAnyOrder( + AnomalyDetectorsIndex.jobResultsAliasedName(job1.getId()), + AnomalyDetectorsIndex.resultsWriteAlias(job1.getId()), + AnomalyDetectorsIndex.jobResultsAliasedName(job2.getId()), + AnomalyDetectorsIndex.resultsWriteAlias(job2.getId()) + )); + } + + public void testPutJob_WithCustomResultsIndex() { + Job.Builder job = new Job.Builder("foo"); + job.setResultsIndexName("bar"); + job.setAnalysisConfig(createAnalysisConfig("by_field", Collections.emptyList())); + job.setDataDescription(new DataDescription.Builder()); + + client().execute(PutJobAction.INSTANCE, new PutJobAction.Request(job)).actionGet(); + + String customIndex = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-bar"; + Map mappingProperties = getIndexMappingProperties(customIndex); + + // Assert mappings have a few fields from the template + assertThat(mappingProperties.keySet(), hasItems("anomaly_score", "bucket_count")); + // Assert mappings have the by field + assertThat(mappingProperties.keySet(), hasItem("by_field")); + + // Check aliases have been created + assertThat(getAliases(customIndex), containsInAnyOrder(AnomalyDetectorsIndex.jobResultsAliasedName(job.getId()), + AnomalyDetectorsIndex.resultsWriteAlias(job.getId()))); + } + @AwaitsFix(bugUrl ="https://github.com/elastic/elasticsearch/issues/40134") public void testMultipleSimultaneousJobCreations() { @@ -268,6 +344,39 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase { } } + private Map getIndexMappingProperties(String index) { + GetMappingsRequest request = new GetMappingsRequest().indices(index); + GetMappingsResponse response = client().execute(GetMappingsAction.INSTANCE, request).actionGet(); + ImmutableOpenMap> indexMappings = response.getMappings(); + assertNotNull(indexMappings); + ImmutableOpenMap typeMappings = indexMappings.get(index); + assertNotNull("expected " + index + " in " + indexMappings, typeMappings); + assertEquals("expected 1 type in " + typeMappings, 1, typeMappings.size()); + Map mappings = typeMappings.iterator().next().value.getSourceAsMap(); + assertNotNull(mappings); + + // Assert _meta info is present + assertThat(mappings.keySet(), hasItem("_meta")); + @SuppressWarnings("unchecked") + Map meta = (Map) mappings.get("_meta"); + assertThat(meta.keySet(), hasItem("version")); + assertThat(meta.get("version"), equalTo(Version.CURRENT.toString())); + + @SuppressWarnings("unchecked") + Map properties = (Map) mappings.get("properties"); + assertNotNull("expected 'properties' field in " + mappings, properties); + return properties; + } + + private Set getAliases(String index) { + GetAliasesResponse getAliasesResponse = client().admin().indices().getAliases( + new GetAliasesRequest().indices(index)).actionGet(); + ImmutableOpenMap> aliases = getAliasesResponse.getAliases(); + assertThat(aliases.containsKey(index), is(true)); + List aliasMetaData = aliases.get(index); + return aliasMetaData.stream().map(AliasMetaData::alias).collect(Collectors.toSet()); + } + private List getCalendars(String jobId) throws Exception { CountDownLatch latch = new CountDownLatch(1); AtomicReference exceptionHolder = new AtomicReference<>(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java index bc2358e0f86..c2619f56e51 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java @@ -9,8 +9,6 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.search.MultiSearchAction; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.MultiSearchRequestBuilder; @@ -19,26 +17,19 @@ import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.Index; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESTestCase; @@ -47,17 +38,14 @@ import org.elasticsearch.xpack.core.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; -import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.TimingStats; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.CategoryDefinition; import org.elasticsearch.xpack.core.ml.job.results.Influencer; -import org.elasticsearch.xpack.core.ml.job.results.Result; import org.elasticsearch.xpack.core.ml.utils.ExponentialAverageCalculationContext; import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery; -import org.mockito.ArgumentCaptor; import java.io.IOException; import java.time.Instant; @@ -68,183 +56,20 @@ import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; -import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilder; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.mockito.Matchers.any; -import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; public class JobResultsProviderTests extends ESTestCase { - private static final String CLUSTER_NAME = "myCluster"; - - @SuppressWarnings("unchecked") - public void testCreateJobResultsIndex() { - String resultsIndexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; - QueryBuilder jobFilter = QueryBuilders.termQuery("job_id", "foo"); - - MockClientBuilder clientBuilder = new MockClientBuilder(CLUSTER_NAME); - ArgumentCaptor captor = ArgumentCaptor.forClass(CreateIndexRequest.class); - clientBuilder.createIndexRequest(captor, resultsIndexName); - clientBuilder.prepareAlias(resultsIndexName, AnomalyDetectorsIndex.jobResultsAliasedName("foo"), jobFilter); - clientBuilder.prepareAlias(resultsIndexName, AnomalyDetectorsIndex.resultsWriteAlias("foo")); - - Job.Builder job = buildJobBuilder("foo"); - JobResultsProvider provider = createProvider(clientBuilder.build()); - AtomicReference resultHolder = new AtomicReference<>(); - - ClusterState cs = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().indices(ImmutableOpenMap.of())) - .build(); - - ClusterService clusterService = mock(ClusterService.class); - - doAnswer(invocationOnMock -> { - AckedClusterStateUpdateTask task = (AckedClusterStateUpdateTask) invocationOnMock.getArguments()[1]; - task.execute(cs); - return null; - }).when(clusterService).submitStateUpdateTask(eq("put-job-foo"), any(AckedClusterStateUpdateTask.class)); - - provider.createJobResultIndex(job.build(), cs, new ActionListener() { - @Override - public void onResponse(Boolean aBoolean) { - CreateIndexRequest request = captor.getValue(); - assertNotNull(request); - assertEquals(resultsIndexName, request.index()); - clientBuilder.verifyIndexCreated(resultsIndexName); - resultHolder.set(aBoolean); - } - - @Override - public void onFailure(Exception e) { - fail(e.toString()); - } - }); - - assertNotNull(resultHolder.get()); - assertTrue(resultHolder.get()); - } - - @SuppressWarnings("unchecked") - public void testCreateJobWithExistingIndex() { - QueryBuilder jobFilter = QueryBuilders.termQuery("job_id", "foo"); - MockClientBuilder clientBuilder = new MockClientBuilder(CLUSTER_NAME); - clientBuilder.prepareAlias(AnomalyDetectorsIndex.jobResultsAliasedName("foo"), - AnomalyDetectorsIndex.jobResultsAliasedName("foo123"), jobFilter); - clientBuilder.preparePutMapping(mock(AcknowledgedResponse.class), Result.TYPE.getPreferredName()); - - GetMappingsResponse getMappingsResponse = mock(GetMappingsResponse.class); - ImmutableOpenMap typeMappings = ImmutableOpenMap.of(); - - ImmutableOpenMap> mappings = - ImmutableOpenMap.>builder() - .fPut(AnomalyDetectorsIndex.jobResultsAliasedName("foo"), typeMappings).build(); - when(getMappingsResponse.mappings()).thenReturn(mappings); - clientBuilder.prepareGetMapping(getMappingsResponse); - - Job.Builder job = buildJobBuilder("foo123"); - job.setResultsIndexName("foo"); - JobResultsProvider provider = createProvider(clientBuilder.build()); - - Index index = mock(Index.class); - when(index.getName()).thenReturn(AnomalyDetectorsIndex.jobResultsAliasedName("foo")); - IndexMetaData indexMetaData = mock(IndexMetaData.class); - when(indexMetaData.getIndex()).thenReturn(index); - - ImmutableOpenMap aliases = ImmutableOpenMap.of(); - when(indexMetaData.getAliases()).thenReturn(aliases); - when(indexMetaData.getSettings()).thenReturn(Settings.EMPTY); - - ImmutableOpenMap indexMap = ImmutableOpenMap.builder() - .fPut(AnomalyDetectorsIndex.jobResultsAliasedName("foo"), indexMetaData).build(); - - ClusterState cs2 = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().indices(indexMap)).build(); - - ClusterService clusterService = mock(ClusterService.class); - - doAnswer(invocationOnMock -> { - AckedClusterStateUpdateTask task = (AckedClusterStateUpdateTask) invocationOnMock.getArguments()[1]; - task.execute(cs2); - return null; - }).when(clusterService).submitStateUpdateTask(eq("put-job-foo123"), any(AckedClusterStateUpdateTask.class)); - - doAnswer(invocationOnMock -> { - AckedClusterStateUpdateTask task = (AckedClusterStateUpdateTask) invocationOnMock.getArguments()[1]; - task.execute(cs2); - return null; - }).when(clusterService).submitStateUpdateTask(eq("index-aliases"), any(AckedClusterStateUpdateTask.class)); - - provider.createJobResultIndex(job.build(), cs2, new ActionListener() { - @Override - public void onResponse(Boolean aBoolean) { - assertTrue(aBoolean); - verify(clientBuilder.build().admin().indices(), times(1)).preparePutMapping(any()); - } - - @Override - public void onFailure(Exception e) { - fail(e.toString()); - } - }); - } - - @SuppressWarnings("unchecked") - public void testCreateJobRelatedIndicies_createsAliasBecauseIndexNameIsSet() { - String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-bar"; - String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName("foo"); - String writeAliasName = AnomalyDetectorsIndex.resultsWriteAlias("foo"); - QueryBuilder jobFilter = QueryBuilders.termQuery("job_id", "foo"); - - MockClientBuilder clientBuilder = new MockClientBuilder(CLUSTER_NAME); - ArgumentCaptor captor = ArgumentCaptor.forClass(CreateIndexRequest.class); - clientBuilder.createIndexRequest(captor, indexName); - clientBuilder.prepareAlias(indexName, readAliasName, jobFilter); - clientBuilder.prepareAlias(indexName, writeAliasName); - clientBuilder.preparePutMapping(mock(AcknowledgedResponse.class), Result.TYPE.getPreferredName()); - - Job.Builder job = buildJobBuilder("foo"); - job.setResultsIndexName("bar"); - Client client = clientBuilder.build(); - JobResultsProvider provider = createProvider(client); - - ImmutableOpenMap indexMap = ImmutableOpenMap.builder().build(); - - ClusterState cs = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().indices(indexMap)).build(); - - ClusterService clusterService = mock(ClusterService.class); - - doAnswer(invocationOnMock -> { - AckedClusterStateUpdateTask task = (AckedClusterStateUpdateTask) invocationOnMock.getArguments()[1]; - task.execute(cs); - return null; - }).when(clusterService).submitStateUpdateTask(eq("put-job-foo"), any(AckedClusterStateUpdateTask.class)); - - provider.createJobResultIndex(job.build(), cs, new ActionListener() { - @Override - public void onResponse(Boolean aBoolean) { - verify(client.admin().indices(), times(1)).prepareAliases(); - verify(client.admin().indices().prepareAliases(), times(1)).addAlias(indexName, readAliasName, jobFilter); - verify(client.admin().indices().prepareAliases(), times(1)).addAlias(indexName, writeAliasName); - } - - @Override - public void onFailure(Exception e) { - fail(e.toString()); - } - }); - } public void testBuckets_OneBucketNoInterim() throws IOException { String jobId = "TestJobIdentification"; @@ -853,7 +678,7 @@ public class JobResultsProviderTests extends ESTestCase { contextMap.put(ExponentialAverageCalculationContext.LATEST_TIMESTAMP.getPreferredName(), Instant.ofEpochMilli(1000_000_000)); contextMap.put(ExponentialAverageCalculationContext.PREVIOUS_EXPONENTIAL_AVERAGE_MS.getPreferredName(), 200.0); timingStatsMap.put(DatafeedTimingStats.EXPONENTIAL_AVG_CALCULATION_CONTEXT.getPreferredName(), contextMap); - + List> source = Arrays.asList(timingStatsMap); SearchResponse response = createSearchResponse(source); Client client = getMockedClient( @@ -1041,6 +866,32 @@ public class JobResultsProviderTests extends ESTestCase { verifyNoMoreInteractions(client); } + @SuppressWarnings("unchecked") + public void testCreateTermFieldsMapping() throws IOException { + + XContentBuilder termFieldsMapping = JsonXContent.contentBuilder(); + JobResultsProvider.createTermFieldsMapping(termFieldsMapping, "_doc", Arrays.asList("apple", "strawberry", + AnomalyRecord.BUCKET_SPAN.getPreferredName())); + + XContentParser parser = createParser(termFieldsMapping); + Map typeMappings = (Map) parser.map().get("_doc"); + Map properties = (Map) typeMappings.get("properties"); + + Map instanceMapping = (Map) properties.get("apple"); + assertNotNull(instanceMapping); + String dataType = (String)instanceMapping.get("type"); + assertEquals("keyword", dataType); + + instanceMapping = (Map) properties.get("strawberry"); + assertNotNull(instanceMapping); + dataType = (String)instanceMapping.get("type"); + assertEquals("keyword", dataType); + + // check no mapping for the reserved field + instanceMapping = (Map) properties.get(AnomalyRecord.BUCKET_SPAN.getPreferredName()); + assertNull(instanceMapping); + } + private JobResultsProvider createProvider(Client client) { return new JobResultsProvider(client, Settings.EMPTY); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java index 75a99e2899d..a2b628df858 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java @@ -10,37 +10,22 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; -import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; -import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequestBuilder; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchScrollRequestBuilder; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.Client; import org.elasticsearch.client.ClusterAdminClient; import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -54,22 +39,14 @@ import org.mockito.ArgumentCaptor; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.concurrent.ExecutionException; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyInt; -import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class MockClientBuilder { @@ -79,14 +56,11 @@ public class MockClientBuilder { private ClusterAdminClient clusterAdminClient; private IndicesAdminClient indicesAdminClient; - private IndicesAliasesRequestBuilder aliasesRequestBuilder; - public MockClientBuilder(String clusterName) { client = mock(Client.class); adminClient = mock(AdminClient.class); clusterAdminClient = mock(ClusterAdminClient.class); indicesAdminClient = mock(IndicesAdminClient.class); - aliasesRequestBuilder = mock(IndicesAliasesRequestBuilder.class); when(client.admin()).thenReturn(adminClient); when(adminClient.cluster()).thenReturn(clusterAdminClient); @@ -99,7 +73,7 @@ public class MockClientBuilder { } @SuppressWarnings({ "unchecked" }) - public MockClientBuilder addClusterStatusYellowResponse() throws InterruptedException, ExecutionException { + public MockClientBuilder addClusterStatusYellowResponse() { PlainActionFuture actionFuture = mock(PlainActionFuture.class); ClusterHealthRequestBuilder clusterHealthRequestBuilder = mock(ClusterHealthRequestBuilder.class); @@ -110,64 +84,6 @@ public class MockClientBuilder { return this; } - @SuppressWarnings({ "unchecked" }) - public MockClientBuilder addClusterStatusYellowResponse(String index) throws InterruptedException, ExecutionException { - PlainActionFuture actionFuture = mock(PlainActionFuture.class); - ClusterHealthRequestBuilder clusterHealthRequestBuilder = mock(ClusterHealthRequestBuilder.class); - - when(clusterAdminClient.prepareHealth(index)).thenReturn(clusterHealthRequestBuilder); - when(clusterHealthRequestBuilder.setWaitForYellowStatus()).thenReturn(clusterHealthRequestBuilder); - when(clusterHealthRequestBuilder.execute()).thenReturn(actionFuture); - when(actionFuture.actionGet()).thenReturn(mock(ClusterHealthResponse.class)); - return this; - } - - @SuppressWarnings({ "rawtypes", "unchecked" }) - public MockClientBuilder addIndicesExistsResponse(String index, boolean exists) throws InterruptedException, ExecutionException { - ActionFuture actionFuture = mock(ActionFuture.class); - ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(IndicesExistsRequest.class); - - when(indicesAdminClient.exists(requestCaptor.capture())).thenReturn(actionFuture); - doAnswer(invocation -> { - IndicesExistsRequest request = (IndicesExistsRequest) invocation.getArguments()[0]; - return request.indices()[0].equals(index) ? actionFuture : null; - }).when(indicesAdminClient).exists(any(IndicesExistsRequest.class)); - when(actionFuture.get()).thenReturn(new IndicesExistsResponse(exists)); - when(actionFuture.actionGet()).thenReturn(new IndicesExistsResponse(exists)); - return this; - } - - @SuppressWarnings({ "unchecked" }) - public MockClientBuilder addIndicesDeleteResponse(String index, boolean exists, boolean exception, - ActionListener actionListener) throws InterruptedException, ExecutionException, IOException { - StreamInput si = mock(StreamInput.class); - // this looks complicated but Mockito can't mock the final method - // DeleteIndexResponse.isAcknowledged() and the only way to create - // one with a true response is reading from a stream. - when(si.readByte()).thenReturn((byte) 0x01); - AcknowledgedResponse response = DeleteIndexAction.INSTANCE.getResponseReader().read(si); - - doAnswer(invocation -> { - DeleteIndexRequest deleteIndexRequest = (DeleteIndexRequest) invocation.getArguments()[0]; - assertArrayEquals(new String[] { index }, deleteIndexRequest.indices()); - if (exception) { - actionListener.onFailure(new InterruptedException()); - } else { - actionListener.onResponse(new AcknowledgedResponse(true)); - } - return null; - }).when(indicesAdminClient).delete(any(DeleteIndexRequest.class), any(ActionListener.class)); - return this; - } - - public MockClientBuilder prepareGet(String index, String type, String id, GetResponse response) { - GetRequestBuilder getRequestBuilder = mock(GetRequestBuilder.class); - when(getRequestBuilder.get()).thenReturn(response); - when(getRequestBuilder.setFetchSource(false)).thenReturn(getRequestBuilder); - when(client.prepareGet(index, type, id)).thenReturn(getRequestBuilder); - return this; - } - @SuppressWarnings("unchecked") public MockClientBuilder get(GetResponse response) { doAnswer(new Answer() { @@ -192,64 +108,6 @@ public class MockClientBuilder { return this; } - @SuppressWarnings({ "rawtypes", "unchecked" }) - public MockClientBuilder createIndexRequest(ArgumentCaptor requestCapture, final String index) { - - doAnswer(invocation -> { - CreateIndexResponse response = new CreateIndexResponse(true, true, index) {}; - ((ActionListener) invocation.getArguments()[1]).onResponse(response); - return null; - }).when(indicesAdminClient).create(requestCapture.capture(), any(ActionListener.class)); - return this; - } - - @SuppressWarnings("unchecked") - public MockClientBuilder prepareSearchExecuteListener(String index, SearchResponse response) { - SearchRequestBuilder builder = mock(SearchRequestBuilder.class); - when(builder.setTypes(anyString())).thenReturn(builder); - when(builder.addSort(any(SortBuilder.class))).thenReturn(builder); - when(builder.setFetchSource(anyBoolean())).thenReturn(builder); - when(builder.setScroll(anyString())).thenReturn(builder); - when(builder.addDocValueField(any(String.class))).thenReturn(builder); - when(builder.addDocValueField(any(String.class), any(String.class))).thenReturn(builder); - when(builder.addSort(any(String.class), any(SortOrder.class))).thenReturn(builder); - when(builder.setQuery(any())).thenReturn(builder); - when(builder.setSize(anyInt())).thenReturn(builder); - - doAnswer(new Answer() { - @Override - public Void answer(InvocationOnMock invocationOnMock) throws Throwable { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[0]; - listener.onResponse(response); - return null; - } - }).when(builder).execute(any()); - - when(client.prepareSearch(eq(index))).thenReturn(builder); - - return this; - } - - @SuppressWarnings("unchecked") - public MockClientBuilder prepareSearchScrollExecuteListener(SearchResponse response) { - SearchScrollRequestBuilder builder = mock(SearchScrollRequestBuilder.class); - when(builder.setScroll(anyString())).thenReturn(builder); - when(builder.setScrollId(anyString())).thenReturn(builder); - - doAnswer(new Answer() { - @Override - public Void answer(InvocationOnMock invocationOnMock) throws Throwable { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[0]; - listener.onResponse(response); - return null; - } - }).when(builder).execute(any()); - - when(client.prepareSearchScroll(anyString())).thenReturn(builder); - - return this; - } - public MockClientBuilder prepareSearch(String index, String type, int from, int size, SearchResponse response, ArgumentCaptor filter) { SearchRequestBuilder builder = mock(SearchRequestBuilder.class); @@ -352,38 +210,6 @@ public class MockClientBuilder { return this; } - @SuppressWarnings("unchecked") - public MockClientBuilder prepareAlias(String indexName, String alias, QueryBuilder filter) { - when(aliasesRequestBuilder.addAlias(eq(indexName), eq(alias), eq(filter))).thenReturn(aliasesRequestBuilder); - when(indicesAdminClient.prepareAliases()).thenReturn(aliasesRequestBuilder); - doAnswer(new Answer() { - @Override - public Void answer(InvocationOnMock invocationOnMock) throws Throwable { - ActionListener listener = - (ActionListener) invocationOnMock.getArguments()[0]; - listener.onResponse(mock(AcknowledgedResponse.class)); - return null; - } - }).when(aliasesRequestBuilder).execute(any()); - return this; - } - - @SuppressWarnings("unchecked") - public MockClientBuilder prepareAlias(String indexName, String alias) { - when(aliasesRequestBuilder.addAlias(eq(indexName), eq(alias))).thenReturn(aliasesRequestBuilder); - when(indicesAdminClient.prepareAliases()).thenReturn(aliasesRequestBuilder); - doAnswer(new Answer() { - @Override - public Void answer(InvocationOnMock invocationOnMock) throws Throwable { - ActionListener listener = - (ActionListener) invocationOnMock.getArguments()[1]; - listener.onResponse(mock(AcknowledgedResponse.class)); - return null; - } - }).when(indicesAdminClient).aliases(any(IndicesAliasesRequest.class), any(ActionListener.class)); - return this; - } - @SuppressWarnings("unchecked") public MockClientBuilder prepareBulk(BulkResponse response) { PlainActionFuture actionFuture = mock(PlainActionFuture.class); @@ -402,70 +228,7 @@ public class MockClientBuilder { return this; } - @SuppressWarnings("unchecked") - public MockClientBuilder preparePutMapping(AcknowledgedResponse response, String type) { - PutMappingRequestBuilder requestBuilder = mock(PutMappingRequestBuilder.class); - when(requestBuilder.setType(eq(type))).thenReturn(requestBuilder); - when(requestBuilder.setSource(any(XContentBuilder.class))).thenReturn(requestBuilder); - doAnswer(new Answer() { - @Override - public Void answer(InvocationOnMock invocationOnMock) throws Throwable { - ActionListener listener = - (ActionListener) invocationOnMock.getArguments()[0]; - listener.onResponse(response); - return null; - } - }).when(requestBuilder).execute(any()); - - when(indicesAdminClient.preparePutMapping(any())).thenReturn(requestBuilder); - return this; - } - - @SuppressWarnings("unchecked") - public MockClientBuilder prepareGetMapping(GetMappingsResponse response) { - GetMappingsRequestBuilder builder = mock(GetMappingsRequestBuilder.class); - - doAnswer(new Answer() { - @Override - public Void answer(InvocationOnMock invocationOnMock) throws Throwable { - ActionListener listener = - (ActionListener) invocationOnMock.getArguments()[0]; - listener.onResponse(response); - return null; - } - }).when(builder).execute(any()); - - when(indicesAdminClient.prepareGetMappings(any())).thenReturn(builder); - return this; - } - - @SuppressWarnings("unchecked") - public MockClientBuilder putTemplate(ArgumentCaptor requestCaptor) { - doAnswer(new Answer() { - @Override - public Void answer(InvocationOnMock invocationOnMock) throws Throwable { - ActionListener listener = - (ActionListener) invocationOnMock.getArguments()[1]; - listener.onResponse(mock(AcknowledgedResponse.class)); - return null; - } - }).when(indicesAdminClient).putTemplate(requestCaptor.capture(), any(ActionListener.class)); - return this; - } - - public Client build() { return client; } - - public void verifyIndexCreated(String index) { - ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(CreateIndexRequest.class); - verify(indicesAdminClient).create(requestCaptor.capture(), any()); - assertEquals(index, requestCaptor.getValue().index()); - } - - public void resetIndices() { - reset(indicesAdminClient); - } - } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java index 639f0e2156e..13abbbb7d80 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java @@ -5,12 +5,9 @@ */ package org.elasticsearch.xpack.monitoring.rest.action; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestRequest; @@ -40,8 +37,6 @@ public class RestMonitoringBulkAction extends XPackRestHandler { public static final String MONITORING_ID = "system_id"; public static final String MONITORING_VERSION = "system_api_version"; public static final String INTERVAL = "interval"; - private static final Logger logger = LogManager.getLogger(RestMonitoringBulkAction.class); - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); private static final List ALL_VERSIONS = asList( MonitoringTemplateUtils.TEMPLATE_VERSION, @@ -65,10 +60,10 @@ public class RestMonitoringBulkAction extends XPackRestHandler { return unmodifiableList(asList( new ReplacedRoute( POST, "/_monitoring/bulk", - POST, "/_xpack/monitoring/_bulk", deprecationLogger), + POST, "/_xpack/monitoring/_bulk"), new ReplacedRoute( PUT, "/_monitoring/bulk", - PUT, "/_xpack/monitoring/_bulk", deprecationLogger))); + PUT, "/_xpack/monitoring/_bulk"))); } @Override diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Failure.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/common/Failure.java similarity index 51% rename from x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Failure.java rename to x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/common/Failure.java index d4dadb5be64..3c3e95aada2 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Failure.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/common/Failure.java @@ -4,35 +4,39 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.eql.analysis; +package org.elasticsearch.xpack.ql.common; +import org.elasticsearch.xpack.ql.tree.Location; import org.elasticsearch.xpack.ql.tree.Node; +import org.elasticsearch.xpack.ql.util.StringUtils; +import java.util.Collection; import java.util.Objects; +import java.util.stream.Collectors; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; -class Failure { +public class Failure { private final Node node; private final String message; - Failure(Node node, String message) { + public Failure(Node node, String message) { this.node = node; this.message = message; } - Node node() { + public Node node() { return node; } - String message() { + public String message() { return message; } @Override public int hashCode() { - return Objects.hash(message, node); + return Objects.hash(node); } @Override @@ -46,7 +50,7 @@ class Failure { } Failure other = (Failure) obj; - return Objects.equals(message, other.message) && Objects.equals(node, other.node); + return Objects.equals(node, other.node); } @Override @@ -54,7 +58,16 @@ class Failure { return message; } - static Failure fail(Node source, String message, Object... args) { + public static Failure fail(Node source, String message, Object... args) { return new Failure(source, format(message, args)); } + + public static String failMessage(Collection failures) { + return failures.stream().map(f -> { + Location l = f.node().source().source(); + return "line " + l.getLineNumber() + ":" + l.getColumnNumber() + ": " + f.message(); + }).collect(Collectors.joining(StringUtils.NEW_LINE, + format("Found {} problem{}\n", failures.size(), failures.size() > 1 ? "s" : StringUtils.EMPTY), + StringUtils.EMPTY)); + } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/QlSourceBuilder.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/QlSourceBuilder.java index 06bc216b812..c8e9d06d5f4 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/QlSourceBuilder.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/QlSourceBuilder.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ql.execution.search; import org.elasticsearch.common.Strings; import org.elasticsearch.script.Script; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat; import java.util.LinkedHashMap; import java.util.LinkedHashSet; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java index 9bad837f5c1..4e7c4484eb3 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java @@ -6,10 +6,8 @@ package org.elasticsearch.xpack.rollup.rest; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; @@ -24,8 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestDeleteRollupJobAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteRollupJobAction.class)); - public static final ParseField ID = new ParseField("id"); @Override @@ -35,7 +31,7 @@ public class RestDeleteRollupJobAction extends BaseRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(DELETE, "/_rollup/job/{id}", DELETE, "/_xpack/rollup/job/{id}/", deprecationLogger)); + return singletonList(new ReplacedRoute(DELETE, "/_rollup/job/{id}", DELETE, "/_xpack/rollup/job/{id}/")); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java index a0c6bda82ba..b62d8d57019 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java @@ -6,10 +6,8 @@ package org.elasticsearch.xpack.rollup.rest; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -23,8 +21,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetRollupCapsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetRollupCapsAction.class)); - public static final ParseField ID = new ParseField("id"); @Override @@ -34,7 +30,7 @@ public class RestGetRollupCapsAction extends BaseRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(GET, "/_rollup/data/{id}", GET, "/_xpack/rollup/data/{id}/", deprecationLogger)); + return singletonList(new ReplacedRoute(GET, "/_rollup/data/{id}", GET, "/_xpack/rollup/data/{id}/")); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java index fea3d123585..c5ef6d909ae 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java @@ -6,12 +6,10 @@ package org.elasticsearch.xpack.rollup.rest; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -25,9 +23,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetRollupIndexCapsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetRollupIndexCapsAction.class)); - static final ParseField INDEX = new ParseField("index"); @Override @@ -37,7 +32,7 @@ public class RestGetRollupIndexCapsAction extends BaseRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(GET, "/{index}/_rollup/data", GET, "/{index}/_xpack/rollup/data", deprecationLogger)); + return singletonList(new ReplacedRoute(GET, "/{index}/_rollup/data", GET, "/{index}/_xpack/rollup/data")); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java index 1d060d29e03..7ba3e7a535a 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java @@ -6,10 +6,8 @@ package org.elasticsearch.xpack.rollup.rest; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -23,8 +21,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetRollupJobsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetRollupJobsAction.class)); - public static final ParseField ID = new ParseField("id"); @Override @@ -34,7 +30,7 @@ public class RestGetRollupJobsAction extends BaseRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(GET, "/_rollup/job/{id}", GET, "/_xpack/rollup/job/{id}/", deprecationLogger)); + return singletonList(new ReplacedRoute(GET, "/_rollup/job/{id}", GET, "/_xpack/rollup/job/{id}/")); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java index 3e8edd6d013..fc75716f43d 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java @@ -6,9 +6,7 @@ package org.elasticsearch.xpack.rollup.rest; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -23,8 +21,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestPutRollupJobAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutRollupJobAction.class)); - @Override public List routes() { return emptyList(); @@ -32,7 +28,7 @@ public class RestPutRollupJobAction extends BaseRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(PUT, "/_rollup/job/{id}", PUT, "/_xpack/rollup/job/{id}", deprecationLogger)); + return singletonList(new ReplacedRoute(PUT, "/_rollup/job/{id}", PUT, "/_xpack/rollup/job/{id}")); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java index 25728e64f6d..9f83b667cf2 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java @@ -6,9 +6,7 @@ package org.elasticsearch.xpack.rollup.rest; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -23,8 +21,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestStartRollupJobAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestStartRollupJobAction.class)); - @Override public List routes() { return emptyList(); @@ -33,7 +29,7 @@ public class RestStartRollupJobAction extends BaseRestHandler { @Override public List replacedRoutes() { return singletonList( - new ReplacedRoute(POST, "/_rollup/job/{id}/_start", POST, "/_xpack/rollup/job/{id}/_start", deprecationLogger)); + new ReplacedRoute(POST, "/_rollup/job/{id}/_start", POST, "/_xpack/rollup/job/{id}/_start")); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java index b818c590ac3..c93c10142e8 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java @@ -6,9 +6,7 @@ package org.elasticsearch.xpack.rollup.rest; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -24,8 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestStopRollupJobAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestStopRollupJobAction.class)); - @Override public List routes() { return emptyList(); @@ -34,7 +30,7 @@ public class RestStopRollupJobAction extends BaseRestHandler { @Override public List replacedRoutes() { return singletonList( - new ReplacedRoute(POST, "/_rollup/job/{id}/_stop", POST, "/_xpack/rollup/job/{id}/_stop", deprecationLogger)); + new ReplacedRoute(POST, "/_rollup/job/{id}/_stop", POST, "/_xpack/rollup/job/{id}/_stop")); } @Override diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 98f4d9e4283..8809bfc8761 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -93,7 +93,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { settings = createIndexSettings(); queryShardContext = new QueryShardContext(0, settings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, - null, null, null, null, () -> 0L, null, null); + null, null, null, null, () -> 0L, null, null, () -> true); } public void testSimpleDateHisto() throws Exception { diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index be9ad73c1e9..9b666bb4104 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -45,7 +45,7 @@ dependencies { compile "net.shibboleth.utilities:java-support:7.5.1" compile "org.apache.santuario:xmlsec:2.1.4" compile "io.dropwizard.metrics:metrics-core:3.2.2" - compile ("org.cryptacular:cryptacular:1.2.3") { + compile ("org.cryptacular:cryptacular:1.2.4") { exclude group: 'org.bouncycastle' } compile "org.slf4j:slf4j-api:${versions.slf4j}" diff --git a/x-pack/plugin/security/licenses/cryptacular-1.2.3.jar.sha1 b/x-pack/plugin/security/licenses/cryptacular-1.2.3.jar.sha1 deleted file mode 100644 index 9b5ebbac8b6..00000000000 --- a/x-pack/plugin/security/licenses/cryptacular-1.2.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7b0398d04a68ff7f58657938b3bdc5f2799b4b49 \ No newline at end of file diff --git a/x-pack/plugin/security/licenses/cryptacular-1.2.4.jar.sha1 b/x-pack/plugin/security/licenses/cryptacular-1.2.4.jar.sha1 new file mode 100644 index 00000000000..19095bb5dff --- /dev/null +++ b/x-pack/plugin/security/licenses/cryptacular-1.2.4.jar.sha1 @@ -0,0 +1 @@ +4994c015d87886212683245d13e87f6fb903a760 \ No newline at end of file diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateAction.java index 33dcc311c48..48826055845 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.rest.action; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -32,7 +30,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestAuthenticateAction extends SecurityBaseRestHandler { private final SecurityContext securityContext; - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestAuthenticateAction.class)); public RestAuthenticateAction(Settings settings, SecurityContext securityContext, XPackLicenseState licenseState) { super(settings, licenseState); @@ -48,7 +45,7 @@ public class RestAuthenticateAction extends SecurityBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList(new ReplacedRoute(GET, "/_security/_authenticate", GET, - "/_xpack/security/_authenticate", deprecationLogger)); + "/_xpack/security/_authenticate")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java index d2efb784a76..e97045e695a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.security.rest.action.oauth2; -import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; @@ -13,7 +12,6 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -48,7 +46,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; */ public final class RestGetTokenAction extends TokenBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetTokenAction.class)); static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("token_request", a -> new CreateTokenRequest((String) a[0], (String) a[1], (SecureString) a[2], (SecureString) a[3], (String) a[4], (String) a[5])); @@ -78,7 +75,7 @@ public final class RestGetTokenAction extends TokenBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( - new ReplacedRoute(POST, "/_security/oauth2/token", POST, "/_xpack/security/oauth2/token", deprecationLogger) + new ReplacedRoute(POST, "/_security/oauth2/token", POST, "/_xpack/security/oauth2/token") ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java index 26caf487950..d9a16453229 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java @@ -5,11 +5,9 @@ */ package org.elasticsearch.xpack.security.rest.action.oauth2; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -35,7 +33,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; */ public final class RestInvalidateTokenAction extends TokenBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestInvalidateTokenAction.class)); static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("invalidate_token", a -> { final String token = (String) a[0]; @@ -77,7 +74,7 @@ public final class RestInvalidateTokenAction extends TokenBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( - new ReplacedRoute(DELETE, "/_security/oauth2/token", DELETE, "/_xpack/security/oauth2/token", deprecationLogger) + new ReplacedRoute(DELETE, "/_security/oauth2/token", DELETE, "/_xpack/security/oauth2/token") ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestDeletePrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestDeletePrivilegesAction.java index 1ac737c9f72..0add93b5127 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestDeletePrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestDeletePrivilegesAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.rest.action.privilege; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; @@ -33,9 +31,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; */ public class RestDeletePrivilegesAction extends SecurityBaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestDeletePrivilegesAction.class)); - public RestDeletePrivilegesAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); } @@ -49,7 +44,7 @@ public class RestDeletePrivilegesAction extends SecurityBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList(new ReplacedRoute(DELETE, "/_security/privilege/{application}/{privilege}", DELETE, - "/_xpack/security/privilege/{application}/{privilege}", deprecationLogger)); + "/_xpack/security/privilege/{application}/{privilege}")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestGetPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestGetPrivilegesAction.java index 0cf4bc6d4b8..3f35b616195 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestGetPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestGetPrivilegesAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.security.rest.action.privilege; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -38,8 +36,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; */ public class RestGetPrivilegesAction extends SecurityBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetPrivilegesAction.class)); - public RestGetPrivilegesAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); } @@ -53,11 +49,11 @@ public class RestGetPrivilegesAction extends SecurityBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( - new ReplacedRoute(GET, "/_security/privilege/", GET, "/_xpack/security/privilege/", deprecationLogger), + new ReplacedRoute(GET, "/_security/privilege/", GET, "/_xpack/security/privilege/"), new ReplacedRoute(GET, "/_security/privilege/{application}", - GET, "/_xpack/security/privilege/{application}", deprecationLogger), + GET, "/_xpack/security/privilege/{application}"), new ReplacedRoute(GET, "/_security/privilege/{application}/{privilege}", - GET, "/_xpack/security/privilege/{application}/{privilege}", deprecationLogger) + GET, "/_xpack/security/privilege/{application}/{privilege}") )); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java index 38bde90e1f2..f657bd2ba25 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.rest.action.privilege; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; @@ -37,7 +35,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; * Rest endpoint to add one or more {@link ApplicationPrivilege} objects to the security index */ public class RestPutPrivilegesAction extends SecurityBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutPrivilegesAction.class)); public RestPutPrivilegesAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); @@ -52,8 +49,8 @@ public class RestPutPrivilegesAction extends SecurityBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( - new ReplacedRoute(PUT, "/_security/privilege/", PUT, "/_xpack/security/privilege/", deprecationLogger), - new ReplacedRoute(POST, "/_security/privilege/", POST, "/_xpack/security/privilege/", deprecationLogger) + new ReplacedRoute(PUT, "/_security/privilege/", PUT, "/_xpack/security/privilege/"), + new ReplacedRoute(POST, "/_security/privilege/", POST, "/_xpack/security/privilege/") )); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/realm/RestClearRealmCacheAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/realm/RestClearRealmCacheAction.java index df2f8c89d2b..1ddc00a3fa1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/realm/RestClearRealmCacheAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/realm/RestClearRealmCacheAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.rest.action.realm; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; @@ -24,8 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public final class RestClearRealmCacheAction extends SecurityBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestClearRealmCacheAction.class)); - public RestClearRealmCacheAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); } @@ -40,7 +36,7 @@ public final class RestClearRealmCacheAction extends SecurityBaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, "/_security/realm/{realms}/_clear_cache", - POST, "/_xpack/security/realm/{realms}/_clear_cache", deprecationLogger) + POST, "/_xpack/security/realm/{realms}/_clear_cache") ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestClearRolesCacheAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestClearRolesCacheAction.java index af62e8d5ccc..d4fdec1610e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestClearRolesCacheAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestClearRolesCacheAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.rest.action.role; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; @@ -24,8 +22,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public final class RestClearRolesCacheAction extends SecurityBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestClearRolesCacheAction.class)); - public RestClearRolesCacheAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); } @@ -39,7 +35,7 @@ public final class RestClearRolesCacheAction extends SecurityBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList(new ReplacedRoute(POST, "/_security/role/{name}/_clear_cache", POST, - "/_xpack/security/role/{name}/_clear_cache", deprecationLogger)); + "/_xpack/security/role/{name}/_clear_cache")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java index fa92846d256..119aa8fbcef 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.rest.action.role; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; @@ -31,8 +29,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; */ public class RestDeleteRoleAction extends SecurityBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteRoleAction.class)); - public RestDeleteRoleAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); } @@ -46,7 +42,7 @@ public class RestDeleteRoleAction extends SecurityBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList(new ReplacedRoute(DELETE, "/_security/role/{name}", DELETE, - "/_xpack/security/role/{name}", deprecationLogger)); + "/_xpack/security/role/{name}")); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestGetRolesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestGetRolesAction.java index e935c6c837e..76c042862c0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestGetRolesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestGetRolesAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.security.rest.action.role; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; @@ -34,8 +32,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; */ public class RestGetRolesAction extends SecurityBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetRolesAction.class)); - public RestGetRolesAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); } @@ -49,8 +45,8 @@ public class RestGetRolesAction extends SecurityBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( - new ReplacedRoute(GET, "/_security/role/", GET, "/_xpack/security/role/", deprecationLogger), - new ReplacedRoute(GET, "/_security/role/{name}", GET, "/_xpack/security/role/{name}", deprecationLogger) + new ReplacedRoute(GET, "/_security/role/", GET, "/_xpack/security/role/"), + new ReplacedRoute(GET, "/_security/role/{name}", GET, "/_xpack/security/role/{name}") )); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java index 83cd63714ce..54f47c93d1b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.rest.action.role; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; @@ -34,8 +32,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; */ public class RestPutRoleAction extends SecurityBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutRoleAction.class)); - public RestPutRoleAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); } @@ -49,8 +45,8 @@ public class RestPutRoleAction extends SecurityBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( - new ReplacedRoute(POST, "/_security/role/{name}", POST, "/_xpack/security/role/{name}", deprecationLogger), - new ReplacedRoute(PUT, "/_security/role/{name}", PUT, "/_xpack/security/role/{name}", deprecationLogger) + new ReplacedRoute(POST, "/_security/role/{name}", POST, "/_xpack/security/role/{name}"), + new ReplacedRoute(PUT, "/_security/role/{name}", PUT, "/_xpack/security/role/{name}") )); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestDeleteRoleMappingAction.java index f2415f8a2cc..aaae64ae68f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestDeleteRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestDeleteRoleMappingAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.rest.action.rolemapping; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; @@ -31,9 +29,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; */ public class RestDeleteRoleMappingAction extends SecurityBaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestDeleteRoleMappingAction.class)); - public RestDeleteRoleMappingAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); } @@ -47,7 +42,7 @@ public class RestDeleteRoleMappingAction extends SecurityBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( - new ReplacedRoute(DELETE, "/_security/role_mapping/{name}", DELETE, "/_xpack/security/role_mapping/{name}", deprecationLogger) + new ReplacedRoute(DELETE, "/_security/role_mapping/{name}", DELETE, "/_xpack/security/role_mapping/{name}") ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestGetRoleMappingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestGetRoleMappingsAction.java index 76da6cc46c7..79ab0fe573e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestGetRoleMappingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestGetRoleMappingsAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.rest.action.rolemapping; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; @@ -33,8 +31,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; */ public class RestGetRoleMappingsAction extends SecurityBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetRoleMappingsAction.class)); - public RestGetRoleMappingsAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); } @@ -48,8 +44,8 @@ public class RestGetRoleMappingsAction extends SecurityBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( - new ReplacedRoute(GET, "/_security/role_mapping/", GET, "/_xpack/security/role_mapping/", deprecationLogger), - new ReplacedRoute(GET, "/_security/role_mapping/{name}", GET, "/_xpack/security/role_mapping/{name}", deprecationLogger) + new ReplacedRoute(GET, "/_security/role_mapping/", GET, "/_xpack/security/role_mapping/"), + new ReplacedRoute(GET, "/_security/role_mapping/{name}", GET, "/_xpack/security/role_mapping/{name}") )); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java index bb4b21144a9..22227eb0dbb 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.rest.action.rolemapping; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; @@ -36,8 +34,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; */ public class RestPutRoleMappingAction extends SecurityBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutRoleMappingAction.class)); - public RestPutRoleMappingAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); } @@ -51,8 +47,8 @@ public class RestPutRoleMappingAction extends SecurityBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( - new ReplacedRoute(POST, "/_security/role_mapping/{name}", POST, "/_xpack/security/role_mapping/{name}", deprecationLogger), - new ReplacedRoute(PUT, "/_security/role_mapping/{name}", PUT, "/_xpack/security/role_mapping/{name}", deprecationLogger) + new ReplacedRoute(POST, "/_security/role_mapping/{name}", POST, "/_xpack/security/role_mapping/{name}"), + new ReplacedRoute(PUT, "/_security/role_mapping/{name}", PUT, "/_xpack/security/role_mapping/{name}") )); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlAuthenticateAction.java index e210e9ac8a3..3dbdb950d18 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlAuthenticateAction.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -37,7 +36,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; */ public class RestSamlAuthenticateAction extends SamlBaseRestHandler { private static final Logger logger = LogManager.getLogger(); - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); static class Input { String content; @@ -77,7 +75,7 @@ public class RestSamlAuthenticateAction extends SamlBaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, "/_security/saml/authenticate", - POST, "/_xpack/security/saml/authenticate", deprecationLogger) + POST, "/_xpack/security/saml/authenticate") ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlInvalidateSessionAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlInvalidateSessionAction.java index 0d5c0e3a027..8b69562aa64 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlInvalidateSessionAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlInvalidateSessionAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.security.rest.action.saml; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -35,8 +33,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; */ public class RestSamlInvalidateSessionAction extends SamlBaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestSamlInvalidateSessionAction.class)); static final ObjectParser PARSER = new ObjectParser<>("saml_invalidate_session", SamlInvalidateSessionRequest::new); @@ -60,7 +56,7 @@ public class RestSamlInvalidateSessionAction extends SamlBaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, "/_security/saml/invalidate", - POST, "/_xpack/security/saml/invalidate", deprecationLogger) + POST, "/_xpack/security/saml/invalidate") ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlLogoutAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlLogoutAction.java index 0e947c9eb03..a7836f9ce05 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlLogoutAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlLogoutAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.security.rest.action.saml; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -37,7 +35,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; */ public class RestSamlLogoutAction extends SamlBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSamlLogoutAction.class)); static final ObjectParser PARSER = new ObjectParser<>("saml_logout", SamlLogoutRequest::new); static { @@ -59,7 +56,7 @@ public class RestSamlLogoutAction extends SamlBaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, "/_security/saml/logout", - POST, "/_xpack/security/saml/logout", deprecationLogger) + POST, "/_xpack/security/saml/logout") ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java index 13d2f771ce6..758f9cb2c98 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.security.rest.action.saml; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -37,8 +35,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; */ public class RestSamlPrepareAuthenticationAction extends SamlBaseRestHandler { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestSamlPrepareAuthenticationAction.class)); static final ObjectParser PARSER = new ObjectParser<>("saml_prepare_authn", SamlPrepareAuthenticationRequest::new); @@ -62,7 +58,7 @@ public class RestSamlPrepareAuthenticationAction extends SamlBaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( new ReplacedRoute(POST, "/_security/saml/prepare", - POST, "/_xpack/security/saml/prepare", deprecationLogger) + POST, "/_xpack/security/saml/prepare") ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java index de9f588fb4f..e1f41a9bb63 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.rest.action.user; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; @@ -36,7 +34,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestChangePasswordAction extends SecurityBaseRestHandler implements RestRequestFilter { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestChangePasswordAction.class)); private final SecurityContext securityContext; private final Hasher passwordHasher; @@ -56,13 +53,13 @@ public class RestChangePasswordAction extends SecurityBaseRestHandler implements // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( new ReplacedRoute(PUT, "/_security/user/{username}/_password", - PUT, "/_xpack/security/user/{username}/_password", deprecationLogger), + PUT, "/_xpack/security/user/{username}/_password"), new ReplacedRoute(POST, "/_security/user/{username}/_password", - POST, "/_xpack/security/user/{username}/_password", deprecationLogger), + POST, "/_xpack/security/user/{username}/_password"), new ReplacedRoute(PUT, "/_security/user/_password", - PUT, "/_xpack/security/user/_password", deprecationLogger), + PUT, "/_xpack/security/user/_password"), new ReplacedRoute(POST, "/_security/user/_password", - POST, "/_xpack/security/user/_password", deprecationLogger) + POST, "/_xpack/security/user/_password") )); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestDeleteUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestDeleteUserAction.java index ec32528a954..560a1f3b21c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestDeleteUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestDeleteUserAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.rest.action.user; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; @@ -31,8 +29,6 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; */ public class RestDeleteUserAction extends SecurityBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteUserAction.class)); - public RestDeleteUserAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); } @@ -46,7 +42,7 @@ public class RestDeleteUserAction extends SecurityBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( - new ReplacedRoute(DELETE, "/_security/user/{username}", DELETE, "/_xpack/security/user/{username}", deprecationLogger) + new ReplacedRoute(DELETE, "/_security/user/{username}", DELETE, "/_xpack/security/user/{username}") ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java index 6a0cf3e2972..6e603f0b7bd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.security.rest.action.user; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -41,8 +39,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetUserPrivilegesAction extends SecurityBaseRestHandler { private final SecurityContext securityContext; - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(RestGetUserPrivilegesAction.class)); public RestGetUserPrivilegesAction(Settings settings, SecurityContext securityContext, XPackLicenseState licenseState) { super(settings, licenseState); @@ -58,7 +54,7 @@ public class RestGetUserPrivilegesAction extends SecurityBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.singletonList( - new ReplacedRoute(GET, "/_security/user/_privileges", GET, "/_xpack/security/user/_privileges", deprecationLogger) + new ReplacedRoute(GET, "/_security/user/_privileges", GET, "/_xpack/security/user/_privileges") ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUsersAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUsersAction.java index 675036e6721..c2a5e1995dc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUsersAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUsersAction.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.security.rest.action.user; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; @@ -34,8 +32,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; */ public class RestGetUsersAction extends SecurityBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetUsersAction.class)); - public RestGetUsersAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); } @@ -49,8 +45,8 @@ public class RestGetUsersAction extends SecurityBaseRestHandler { public List replacedRoutes() { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( - new ReplacedRoute(GET, "/_security/user/", GET, "/_xpack/security/user/", deprecationLogger), - new ReplacedRoute(GET, "/_security/user/{username}", GET, "/_xpack/security/user/{username}", deprecationLogger) + new ReplacedRoute(GET, "/_security/user/", GET, "/_xpack/security/user/"), + new ReplacedRoute(GET, "/_security/user/{username}", GET, "/_xpack/security/user/{username}") )); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java index ea7a680a261..1697792e6bc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java @@ -5,12 +5,10 @@ */ package org.elasticsearch.xpack.security.rest.action.user; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -44,7 +42,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestHasPrivilegesAction extends SecurityBaseRestHandler { private final SecurityContext securityContext; - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestHasPrivilegesAction.class)); public RestHasPrivilegesAction(Settings settings, SecurityContext securityContext, XPackLicenseState licenseState) { super(settings, licenseState); @@ -61,13 +58,13 @@ public class RestHasPrivilegesAction extends SecurityBaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( new ReplacedRoute(GET, "/_security/user/{username}/_has_privileges", - GET, "/_xpack/security/user/{username}/_has_privileges", deprecationLogger), + GET, "/_xpack/security/user/{username}/_has_privileges"), new ReplacedRoute(POST, "/_security/user/{username}/_has_privileges", - POST, "/_xpack/security/user/{username}/_has_privileges", deprecationLogger), + POST, "/_xpack/security/user/{username}/_has_privileges"), new ReplacedRoute(GET, "/_security/user/_has_privileges", - GET, "/_xpack/security/user/_has_privileges", deprecationLogger), + GET, "/_xpack/security/user/_has_privileges"), new ReplacedRoute(POST, "/_security/user/_has_privileges", - POST, "/_xpack/security/user/_has_privileges", deprecationLogger) + POST, "/_xpack/security/user/_has_privileges") )); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestPutUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestPutUserAction.java index e8ebea5342a..397a9c634c9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestPutUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestPutUserAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.rest.action.user; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -40,7 +38,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestPutUserAction extends SecurityBaseRestHandler implements RestRequestFilter { private final Hasher passwordHasher; - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutUserAction.class)); public RestPutUserAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); @@ -57,9 +54,9 @@ public class RestPutUserAction extends SecurityBaseRestHandler implements RestRe // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( new ReplacedRoute(POST, "/_security/user/{username}", - POST, "/_xpack/security/user/{username}", deprecationLogger), + POST, "/_xpack/security/user/{username}"), new ReplacedRoute(PUT, "/_security/user/{username}", - PUT, "/_xpack/security/user/{username}", deprecationLogger) + PUT, "/_xpack/security/user/{username}") )); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java index 38c88625f65..4b953b7856c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.rest.action.user; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; @@ -34,8 +32,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; */ public class RestSetEnabledAction extends SecurityBaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSetEnabledAction.class)); - public RestSetEnabledAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); } @@ -50,13 +46,13 @@ public class RestSetEnabledAction extends SecurityBaseRestHandler { // TODO: remove deprecated endpoint in 8.0.0 return Collections.unmodifiableList(Arrays.asList( new ReplacedRoute(POST, "/_security/user/{username}/_enable", - POST, "/_xpack/security/user/{username}/_enable", deprecationLogger), + POST, "/_xpack/security/user/{username}/_enable"), new ReplacedRoute(PUT, "/_security/user/{username}/_enable", - PUT, "/_xpack/security/user/{username}/_enable", deprecationLogger), + PUT, "/_xpack/security/user/{username}/_enable"), new ReplacedRoute(POST, "/_security/user/{username}/_disable", - POST, "/_xpack/security/user/{username}/_disable", deprecationLogger), + POST, "/_xpack/security/user/{username}/_disable"), new ReplacedRoute(PUT, "/_security/user/{username}/_disable", - PUT, "/_xpack/security/user/{username}/_disable", deprecationLogger) + PUT, "/_xpack/security/user/{username}/_disable") )); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java index 2c6f3902b38..f53fa59fde9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java @@ -66,7 +66,6 @@ import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.function.Supplier; -import java.util.regex.Pattern; import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_FORMAT_SETTING; @@ -85,7 +84,7 @@ public class SecurityIndexManager implements ClusterStateListener { public static final String SECURITY_MAIN_TEMPLATE_7 = "security-index-template-7"; public static final String SECURITY_TOKENS_TEMPLATE_7 = "security-tokens-index-template-7"; public static final String SECURITY_VERSION_STRING = "security-version"; - public static final String TEMPLATE_VERSION_PATTERN = Pattern.quote("${security.template.version}"); + public static final String TEMPLATE_VERSION_VARIABLE = "security.template.version"; private static final Logger logger = LogManager.getLogger(SecurityIndexManager.class); @@ -434,7 +433,7 @@ public class SecurityIndexManager implements ClusterStateListener { private static byte[] readTemplateAsBytes(String templateName) { return TemplateUtils.loadTemplate("/" + templateName + ".json", Version.CURRENT.toString(), - SecurityIndexManager.TEMPLATE_VERSION_PATTERN).getBytes(StandardCharsets.UTF_8); + SecurityIndexManager.TEMPLATE_VERSION_VARIABLE).getBytes(StandardCharsets.UTF_8); } private static Tuple parseMappingAndSettingsFromTemplateBytes(byte[] template) throws IOException { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index 6396757eb48..daa33a9e7a6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -19,10 +19,10 @@ import java.util.function.Supplier; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.Version; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.client.Client; import org.elasticsearch.client.FilterClient; @@ -61,16 +61,16 @@ import org.hamcrest.Matchers; import org.junit.Before; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_MAIN_TEMPLATE_7; -import static org.elasticsearch.xpack.security.support.SecurityIndexManager.TEMPLATE_VERSION_PATTERN; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.TEMPLATE_VERSION_VARIABLE; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class SecurityIndexManagerTests extends ESTestCase { @@ -444,7 +444,7 @@ public class SecurityIndexManagerTests extends ESTestCase { private static String loadTemplate(String templateName) { final String resource = "/" + templateName + ".json"; - return TemplateUtils.loadTemplate(resource, Version.CURRENT.toString(), TEMPLATE_VERSION_PATTERN); + return TemplateUtils.loadTemplate(resource, Version.CURRENT.toString(), TEMPLATE_VERSION_VARIABLE); } public void testMappingVersionMatching() throws IOException { @@ -535,7 +535,7 @@ public class SecurityIndexManagerTests extends ESTestCase { private static IndexMetaData.Builder createIndexMetadata(String indexName, String templateString) throws IOException { String template = TemplateUtils.loadTemplate(templateString, Version.CURRENT.toString(), - SecurityIndexManager.TEMPLATE_VERSION_PATTERN); + SecurityIndexManager.TEMPLATE_VERSION_VARIABLE); PutIndexTemplateRequest request = new PutIndexTemplateRequest(); request.source(template, XContentType.JSON); IndexMetaData.Builder indexMetaData = IndexMetaData.builder(indexName); @@ -574,7 +574,7 @@ public class SecurityIndexManagerTests extends ESTestCase { private static IndexTemplateMetaData.Builder getIndexTemplateMetaData(String templateName, String templateString) throws IOException { String template = TemplateUtils.loadTemplate(templateString, Version.CURRENT.toString(), - SecurityIndexManager.TEMPLATE_VERSION_PATTERN); + SecurityIndexManager.TEMPLATE_VERSION_VARIABLE); PutIndexTemplateRequest request = new PutIndexTemplateRequest(); request.source(template, XContentType.JSON); IndexTemplateMetaData.Builder templateBuilder = IndexTemplateMetaData.builder(templateName) diff --git a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java index 516ab28c80f..3765c455621 100644 --- a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java +++ b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java @@ -39,4 +39,20 @@ public class RestSqlIT extends RestSqlTestCase { containsString("Cannot generate a query DSL for a special SQL command " + "(e.g.: DESCRIBE, SHOW), sql statement: [SHOW FUNCTIONS]")); } + + public void testErrorMessageForInvalidParamDataType() throws IOException { + expectBadRequest(() -> runTranslateSql( + "{\"query\":\"SELECT null WHERE 0 = ? \", \"mode\": \"odbc\", \"params\":[{\"type\":\"invalid\", \"value\":\"irrelevant\"}]}" + ), + containsString("Invalid parameter data type [invalid]") + ); + } + + public void testErrorMessageForInvalidParamSpec() throws IOException { + expectBadRequest(() -> runTranslateSql( + "{\"query\":\"SELECT null WHERE 0 = ? \", \"mode\": \"odbc\", \"params\":[{\"type\":\"SHAPE\", \"value\":false}]}" + ), + containsString("Cannot cast value [false] of type [BOOLEAN] to parameter type [SHAPE]") + ); + } } diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/ErrorsTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/ErrorsTestCase.java index bd295d09029..2bef0cd60ae 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/ErrorsTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/ErrorsTestCase.java @@ -106,6 +106,6 @@ public abstract class ErrorsTestCase extends CliIntegrationTestCase implements o } public static void assertFoundOneProblem(String commandResult) { - assertEquals(START + "Bad request [[3;33;22mFound 1 problem(s)", commandResult); + assertEquals(START + "Bad request [[3;33;22mFound 1 problem", commandResult); } } diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ErrorsTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ErrorsTestCase.java index d4f1cb5b305..87de389e9be 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ErrorsTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ErrorsTestCase.java @@ -20,7 +20,7 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast public void testSelectInvalidSql() throws Exception { try (Connection c = esJdbc()) { SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT * FRO").executeQuery()); - assertEquals("Found 1 problem(s)\nline 1:8: Cannot determine columns for [*]", e.getMessage()); + assertEquals("Found 1 problem\nline 1:8: Cannot determine columns for [*]", e.getMessage()); } } @@ -28,7 +28,7 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast public void testSelectFromMissingIndex() throws SQLException { try (Connection c = esJdbc()) { SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT * FROM test").executeQuery()); - assertEquals("Found 1 problem(s)\nline 1:15: Unknown index [test]", e.getMessage()); + assertEquals("Found 1 problem\nline 1:15: Unknown index [test]", e.getMessage()); } } @@ -42,8 +42,8 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast try (Connection c = esJdbc()) { SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT * FROM test").executeQuery()); // see https://github.com/elastic/elasticsearch/issues/34719 - //assertEquals("Found 1 problem(s)\nline 1:15: [test] doesn't have any types so it is incompatible with sql", e.getMessage()); - assertEquals("Found 1 problem(s)\nline 1:15: Unknown index [test]", e.getMessage()); + //assertEquals("Found 1 problem\nline 1:15: [test] doesn't have any types so it is incompatible with sql", e.getMessage()); + assertEquals("Found 1 problem\nline 1:15: Unknown index [test]", e.getMessage()); } } @@ -52,7 +52,7 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast index("test", body -> body.field("test", "test")); try (Connection c = esJdbc()) { SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT missing FROM test").executeQuery()); - assertEquals("Found 1 problem(s)\nline 1:8: Unknown column [missing]", e.getMessage()); + assertEquals("Found 1 problem\nline 1:8: Unknown column [missing]", e.getMessage()); } } @@ -61,7 +61,7 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast index("test", body -> body.field("foo", 1)); try (Connection c = esJdbc()) { SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT missing(foo) FROM test").executeQuery()); - assertEquals("Found 1 problem(s)\nline 1:8: Unknown function [missing]", e.getMessage()); + assertEquals("Found 1 problem\nline 1:8: Unknown function [missing]", e.getMessage()); } } @@ -71,7 +71,7 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast try (Connection c = esJdbc()) { SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT foo, SCORE(), COUNT(*) FROM test GROUP BY foo").executeQuery()); - assertEquals("Found 1 problem(s)\nline 1:13: Cannot use non-grouped column [SCORE()], expected [foo]", e.getMessage()); + assertEquals("Found 1 problem\nline 1:13: Cannot use non-grouped column [SCORE()], expected [foo]", e.getMessage()); } } @@ -82,7 +82,7 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT foo, COUNT(*) FROM test GROUP BY foo ORDER BY SCORE()").executeQuery()); assertEquals( - "Found 1 problem(s)\nline 1:54: Cannot order by non-grouped column [SCORE()], expected [foo] or an aggregate function", + "Found 1 problem\nline 1:54: Cannot order by non-grouped column [SCORE()], expected [foo] or an aggregate function", e.getMessage()); } } @@ -93,7 +93,7 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast try (Connection c = esJdbc()) { SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT COUNT(*) FROM test GROUP BY SCORE()").executeQuery()); - assertEquals("Found 1 problem(s)\nline 1:36: Cannot use [SCORE()] for grouping", e.getMessage()); + assertEquals("Found 1 problem\nline 1:36: Cannot use [SCORE()] for grouping", e.getMessage()); } } @@ -113,7 +113,7 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast try (Connection c = esJdbc()) { SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT SIN(SCORE()) FROM test").executeQuery()); - assertThat(e.getMessage(), startsWith("Found 1 problem(s)\nline 1:12: [SCORE()] cannot be an argument to a function")); + assertThat(e.getMessage(), startsWith("Found 1 problem\nline 1:12: [SCORE()] cannot be an argument to a function")); } } diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg-ordering.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/agg-ordering.csv-spec index ce96c34344a..070abdb68b3 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg-ordering.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg-ordering.csv-spec @@ -23,4 +23,50 @@ g:s | gender:s | s3:i | SUM(salary):i | s5:i M |M |2671054|2671054 |2671054 F |F |1666196|1666196 |1666196 null |null |487605 |487605 |487605 -; \ No newline at end of file +; + +histogramDateTimeWithCountAndOrder_1 +schema::h:ts|c:l +SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY h DESC, c ASC; + + h | c +------------------------+--------------- +1965-01-01T00:00:00.000Z|1 +1964-01-01T00:00:00.000Z|4 +1963-01-01T00:00:00.000Z|7 +1962-01-01T00:00:00.000Z|6 +1961-01-01T00:00:00.000Z|8 +1960-01-01T00:00:00.000Z|8 +1959-01-01T00:00:00.000Z|9 +1958-01-01T00:00:00.000Z|7 +1957-01-01T00:00:00.000Z|4 +1956-01-01T00:00:00.000Z|5 +1955-01-01T00:00:00.000Z|4 +1954-01-01T00:00:00.000Z|8 +1953-01-01T00:00:00.000Z|11 +1952-01-01T00:00:00.000Z|8 +null |10 +; + +histogramDateTimeWithCountAndOrder_2 +schema::h:ts|c:l +SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY c DESC, h ASC; + + h | c +------------------------+--------------- +1953-01-01T00:00:00.000Z|11 +null |10 +1959-01-01T00:00:00.000Z|9 +1952-01-01T00:00:00.000Z|8 +1954-01-01T00:00:00.000Z|8 +1960-01-01T00:00:00.000Z|8 +1961-01-01T00:00:00.000Z|8 +1958-01-01T00:00:00.000Z|7 +1963-01-01T00:00:00.000Z|7 +1962-01-01T00:00:00.000Z|6 +1956-01-01T00:00:00.000Z|5 +1955-01-01T00:00:00.000Z|4 +1957-01-01T00:00:00.000Z|4 +1964-01-01T00:00:00.000Z|4 +1965-01-01T00:00:00.000Z|1 +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg-ordering.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/agg-ordering.sql-spec index 087dbdad1d1..2937b34f0a5 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg-ordering.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg-ordering.sql-spec @@ -80,7 +80,7 @@ aggNotSpecifiedInTheAggregateAndGroupWithHavingWithLimitAndDirection SELECT gender, MIN(salary) AS min, COUNT(*) AS c FROM test_emp GROUP BY gender HAVING c > 1 ORDER BY MAX(salary) ASC, c DESC LIMIT 5; groupAndAggNotSpecifiedInTheAggregateWithHaving -SELECT gender, MIN(salary) AS min, COUNT(*) AS c FROM test_emp GROUP BY gender HAVING c > 1 ORDER BY gender, MAX(salary); +SELECT gender, MIN(salary) AS min, COUNT(*) AS c FROM test_emp GROUP BY gender HAVING c > 1 ORDER BY gender NULLS FIRST, MAX(salary); multipleAggsThatGetRewrittenWithAliasOnAMediumGroupBy SELECT languages, MAX(salary) AS max, MIN(salary) AS min FROM test_emp GROUP BY languages ORDER BY max; @@ -136,5 +136,26 @@ SELECT gender AS g, first_name AS f, last_name AS l FROM test_emp GROUP BY f, ge multipleGroupingsAndOrderingByGroups_8 SELECT gender AS g, first_name, last_name FROM test_emp GROUP BY g, last_name, first_name ORDER BY gender ASC, first_name DESC, last_name ASC; -multipleGroupingsAndOrderingByGroupsWithFunctions -SELECT first_name f, last_name l, gender g, CONCAT(first_name, last_name) c FROM test_emp GROUP BY gender, l, f, c ORDER BY gender, c DESC, first_name, last_name ASC; +multipleGroupingsAndOrderingByGroupsAndAggs_1 +SELECT gender, MIN(salary) AS min, COUNT(*) AS c, MAX(salary) AS max FROM test_emp GROUP BY gender HAVING c > 1 ORDER BY gender ASC NULLS FIRST, MAX(salary) DESC; + +multipleGroupingsAndOrderingByGroupsAndAggs_2 +SELECT gender, MIN(salary) AS min, COUNT(*) AS c, MAX(salary) AS max FROM test_emp GROUP BY gender HAVING c > 1 ORDER BY gender DESC NULLS LAST, MAX(salary) ASC; + +multipleGroupingsAndOrderingByGroupsWithFunctions_1 +SELECT first_name f, last_name l, gender g, CONCAT(first_name, last_name) c FROM test_emp GROUP BY gender, l, f, c ORDER BY gender NULLS FIRST, c DESC, first_name, last_name ASC; + +multipleGroupingsAndOrderingByGroupsWithFunctions_2 +SELECT first_name f, last_name l, gender g, CONCAT(first_name, last_name) c FROM test_emp GROUP BY gender, l, f, c ORDER BY c DESC, gender DESC NULLS LAST, first_name, last_name ASC; + +multipleGroupingsAndOrderingByGroupsAndAggregatesWithFunctions_1 +SELECT CONCAT('foo', gender) g, MAX(salary) AS max, MIN(salary) AS min FROM test_emp GROUP BY g ORDER BY 1 NULLS FIRST, 2, 3; + +multipleGroupingsAndOrderingByGroupsAndAggregatesWithFunctions_2 +SELECT CONCAT('foo', gender) g, MAX(salary) AS max, MIN(salary) AS min FROM test_emp GROUP BY g ORDER BY 1 DESC NULLS LAST, 2, 3; + +multipleGroupingsAndOrderingByGroupsAndAggregatesWithFunctions_3 +SELECT CONCAT('foo', gender) g, MAX(salary) AS max, MIN(salary) AS min FROM test_emp GROUP BY g ORDER BY 2, 1 NULLS FIRST, 3; + +multipleGroupingsAndOrderingByGroupsAndAggregatesWithFunctions_4 +SELECT CONCAT('foo', gender) g, MAX(salary) AS max, MIN(salary) AS min FROM test_emp GROUP BY g ORDER BY 3 DESC, 1 NULLS FIRST, 2; diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec index 2c1e1126ee1..da9fe1cd7c8 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec @@ -534,6 +534,19 @@ SELECT HISTOGRAM(YEAR(birth_date), 2) AS h, COUNT(*) as c FROM test_emp GROUP BY null |10 ; +histogramDateTimeWithScalars +schema::h:ts|c:l +SELECT HISTOGRAM(birth_date, INTERVAL 20 MONTHS + INTERVAL 30 MONTHS) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY c DESC; + + h | c +------------------------+--------------- +1957-09-06T00:00:00.000Z|31 +1953-07-29T00:00:00.000Z|24 +1961-10-15T00:00:00.000Z|20 +1949-06-20T00:00:00.000Z|15 +null |10 +; + histogramYearOnDateTimeWithScalars schema::year:i|c:l SELECT YEAR(CAST(birth_date + INTERVAL 5 YEARS AS DATE) + INTERVAL 20 MONTHS) AS year, COUNT(*) as c FROM test_emp GROUP BY 1; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/AnalysisException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/AnalysisException.java deleted file mode 100644 index 4d3a799467e..00000000000 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/AnalysisException.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.analysis; - -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.ql.tree.Location; -import org.elasticsearch.xpack.ql.tree.Node; -import org.elasticsearch.xpack.sql.SqlClientException; - -import static org.elasticsearch.common.logging.LoggerMessageFormat.format; - -public class AnalysisException extends SqlClientException { - - private final int line; - private final int column; - - public AnalysisException(Node source, String message, Object... args) { - super(message, args); - - Location loc = Location.EMPTY; - if (source != null && source.source() != null) { - loc = source.source().source(); - } - this.line = loc.getLineNumber(); - this.column = loc.getColumnNumber(); - } - - public AnalysisException(Node source, String message, Throwable cause) { - super(message, cause); - - Location loc = Location.EMPTY; - if (source != null && source.source() != null) { - loc = source.source().source(); - } - this.line = loc.getLineNumber(); - this.column = loc.getColumnNumber(); - } - - public int getLineNumber() { - return line; - } - - public int getColumnNumber() { - return column; - } - - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } - - @Override - public String getMessage() { - return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), super.getMessage()); - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index 64c5c6d61b2..bc6ac7eaf90 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.analysis.analyzer; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.xpack.ql.capabilities.Resolvables; +import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.AttributeMap; @@ -45,7 +46,6 @@ import org.elasticsearch.xpack.ql.type.InvalidMappedField; import org.elasticsearch.xpack.ql.type.UnsupportedEsField; import org.elasticsearch.xpack.ql.util.CollectionUtils; import org.elasticsearch.xpack.ql.util.Holder; -import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier.Failure; import org.elasticsearch.xpack.sql.expression.Foldables; import org.elasticsearch.xpack.sql.expression.SubQueryExpression; import org.elasticsearch.xpack.sql.expression.function.scalar.Cast; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerificationException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerificationException.java index 0eba4126ba8..7a754065f3b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerificationException.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerificationException.java @@ -5,31 +5,20 @@ */ package org.elasticsearch.xpack.sql.analysis.analyzer; -import org.elasticsearch.xpack.ql.tree.Location; -import org.elasticsearch.xpack.ql.util.StringUtils; -import org.elasticsearch.xpack.sql.analysis.AnalysisException; -import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier.Failure; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.ql.common.Failure; +import org.elasticsearch.xpack.sql.SqlClientException; import java.util.Collection; -import java.util.stream.Collectors; - -public class VerificationException extends AnalysisException { - - private final Collection failures; +public class VerificationException extends SqlClientException { protected VerificationException(Collection sources) { - super(null, StringUtils.EMPTY); - failures = sources; + super(Failure.failMessage(sources)); } @Override - public String getMessage() { - return failures.stream() - .map(f -> { - Location l = f.node().source().source(); - return "line " + l.getLineNumber() + ":" + l.getColumnNumber() + ": " + f.message(); - }) - .collect(Collectors.joining(StringUtils.NEW_LINE, "Found " + failures.size() + " problem(s)\n", StringUtils.EMPTY)); + public RestStatus status() { + return RestStatus.BAD_REQUEST; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index 6c1df11f070..1dc1d84ba6d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.analysis.analyzer; import org.elasticsearch.xpack.ql.capabilities.Unresolvable; +import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.AttributeMap; @@ -61,12 +62,11 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.Objects; import java.util.Set; import java.util.function.Consumer; import static java.util.stream.Collectors.toMap; -import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.ql.common.Failure.fail; import static org.elasticsearch.xpack.ql.util.CollectionUtils.combine; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.COMMAND; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.GROUPBY; @@ -89,52 +89,6 @@ public final class Verifier { this.metrics = metrics; } - static class Failure { - private final Node node; - private final String message; - - Failure(Node node, String message) { - this.node = node; - this.message = message; - } - - Node node() { - return node; - } - - String message() { - return message; - } - - @Override - public int hashCode() { - return Objects.hash(node); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - Verifier.Failure other = (Verifier.Failure) obj; - return Objects.equals(node, other.node); - } - - @Override - public String toString() { - return message; - } - } - - private static Failure fail(Node source, String message, Object... args) { - return new Failure(source, format(message, args)); - } - public Map, String> verifyFailures(LogicalPlan plan) { Collection failures = verify(plan); return failures.stream().collect(toMap(Failure::node, Failure::message)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 03769f725c9..d769ede2b50 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -593,36 +593,51 @@ public class Querier { this.sortingColumns = sortingColumns; } - // compare row based on the received attribute sort - // if a sort item is not in the list, it is assumed the sorting happened in ES - // and the results are left as is (by using the row ordering), otherwise it is sorted based on the given criteria. - // - // Take for example ORDER BY a, x, b, y - // a, b - are sorted in ES - // x, y - need to be sorted client-side - // sorting on x kicks in, only if the values for a are equal. - + /** + * Compare row based on the received attribute sort + *
    + *
  • + * If a tuple in {@code sortingColumns} has a null comparator, it is assumed the sorting + * happened in ES and the results are left as is (by using the row ordering), otherwise it is + * sorted based on the given criteria. + *
  • + *
  • + * If no tuple exists in {@code sortingColumns} for an output column, it means this column + * is not included at all in the ORDER BY + *
  • + *
+ * + * Take for example ORDER BY a, x, b, y + * a, b - are sorted in ES + * x, y - need to be sorted client-side + * sorting on x kicks in only if the values for a are equal. + * sorting on y kicks in only if the values for a, x and b are all equal + * + */ // thanks to @jpountz for the row ordering idea as a way to preserve ordering @SuppressWarnings("unchecked") @Override protected boolean lessThan(Tuple, Integer> l, Tuple, Integer> r) { for (Tuple tuple : sortingColumns) { - int i = tuple.v1().intValue(); + int columnIdx = tuple.v1().intValue(); Comparator comparator = tuple.v2(); - Object vl = l.v1().get(i); - Object vr = r.v1().get(i); + // Get the values for left and right rows at the current column index + Object vl = l.v1().get(columnIdx); + Object vr = r.v1().get(columnIdx); if (comparator != null) { int result = comparator.compare(vl, vr); - // if things are equals, move to the next comparator + // if things are not equal: return the comparison result, + // otherwise: move to the next comparator to solve the tie. if (result != 0) { return result > 0; } } - // no comparator means the existing order needs to be preserved + // no comparator means the rows are pre-ordered by ES for the column at + // the current index and the existing order needs to be preserved else { - // check the values - if they are equal move to the next comparator - // otherwise return the row order + // check the values - if they are not equal return the row order + // otherwise: move to the next comparator to solve the tie. if (Objects.equals(vl, vr) == false) { return l.v2().compareTo(r.v2()) > 0; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java index 453cdcde377..4fb4684e640 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java @@ -110,7 +110,7 @@ public abstract class SourceGenerator { source.sort("_doc"); return; } - for (Sort sortable : container.sort()) { + for (Sort sortable : container.sort().values()) { SortBuilder sortBuilder = null; if (sortable instanceof AttributeSort) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index fdb0f554ccd..a6aed7cbe44 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -129,7 +129,8 @@ import java.util.StringJoiner; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; -import static org.elasticsearch.xpack.ql.type.DataTypeConverter.converterFor; +import static org.elasticsearch.xpack.sql.type.SqlDataTypeConverter.canConvert; +import static org.elasticsearch.xpack.sql.type.SqlDataTypeConverter.converterFor; import static org.elasticsearch.xpack.sql.util.DateUtils.asTimeOnly; import static org.elasticsearch.xpack.sql.util.DateUtils.dateOfEscapedLiteral; import static org.elasticsearch.xpack.sql.util.DateUtils.dateTimeOfEscapedLiteral; @@ -700,6 +701,9 @@ abstract class ExpressionBuilder extends IdentifierBuilder { SqlTypedParamValue param = param(ctx.PARAM()); DataType dataType = SqlDataTypes.fromTypeName(param.type); Source source = source(ctx); + if (dataType == null) { + throw new ParsingException(source, "Invalid parameter data type [{}]", param.type); + } if (param.value == null) { // no conversion is required for null values return new Literal(source, null, dataType); @@ -717,6 +721,10 @@ abstract class ExpressionBuilder extends IdentifierBuilder { } // otherwise we need to make sure that xcontent-serialized value is converted to the correct type try { + if (canConvert(sourceType, dataType) == false) { + throw new ParsingException(source, "Cannot cast value [{}] of type [{}] to parameter type [{}]", param.value, sourceType, + dataType); + } return new Literal(source, converterFor(sourceType, dataType).convert(param.value), dataType); } catch (QlIllegalArgumentException ex) { throw new ParsingException(ex, source, "Unexpected actual parameter type [{}] for type [{}]", sourceType, param.type); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Planner.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Planner.java index ac293fd3384..e9ea5ae1bf0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Planner.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Planner.java @@ -5,10 +5,10 @@ */ package org.elasticsearch.xpack.sql.planner; +import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.ql.tree.Node; import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; -import org.elasticsearch.xpack.sql.planner.Verifier.Failure; import java.util.List; import java.util.Map; @@ -49,7 +49,7 @@ public class Planner { public Map, String> verifyMappingPlanFailures(PhysicalPlan plan) { List failures = Verifier.verifyMappingPlan(plan); - return failures.stream().collect(toMap(Failure::source, Failure::message)); + return failures.stream().collect(toMap(Failure::node, Failure::message)); } public PhysicalPlan verifyExecutingPlan(PhysicalPlan plan) { @@ -62,6 +62,6 @@ public class Planner { public Map, String> verifyExecutingPlanFailures(PhysicalPlan plan) { List failures = Verifier.verifyExecutingPlan(plan); - return failures.stream().collect(toMap(Failure::source, Failure::message)); + return failures.stream().collect(toMap(Failure::node, Failure::message)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/PlanningException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/PlanningException.java index 8a013fee2f9..3552057b9a2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/PlanningException.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/PlanningException.java @@ -6,12 +6,10 @@ package org.elasticsearch.xpack.sql.planner; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.ql.tree.Location; +import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.sql.SqlClientException; -import org.elasticsearch.xpack.sql.planner.Verifier.Failure; import java.util.Collection; -import java.util.stream.Collectors; public class PlanningException extends SqlClientException { public PlanningException(String message, Object... args) { @@ -19,20 +17,11 @@ public class PlanningException extends SqlClientException { } public PlanningException(Collection sources) { - super(extractMessage(sources)); + super(Failure.failMessage(sources)); } @Override public RestStatus status() { return RestStatus.BAD_REQUEST; } - - private static String extractMessage(Collection failures) { - return failures.stream() - .map(f -> { - Location l = f.source().source().source(); - return "line " + l.getLineNumber() + ":" + l.getColumnNumber() + ": " + f.message(); - }) - .collect(Collectors.joining("\n", "Found " + failures.size() + " problem(s)\n", "")); - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index 4c314442a53..a2effbe34ff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -67,6 +67,7 @@ import org.elasticsearch.xpack.sql.querydsl.container.ComputedRef; import org.elasticsearch.xpack.sql.querydsl.container.GlobalCountRef; import org.elasticsearch.xpack.sql.querydsl.container.GroupByRef; import org.elasticsearch.xpack.sql.querydsl.container.GroupByRef.Property; +import org.elasticsearch.xpack.sql.querydsl.container.GroupingFunctionSort; import org.elasticsearch.xpack.sql.querydsl.container.MetricAggRef; import org.elasticsearch.xpack.sql.querydsl.container.PivotColumnRef; import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer; @@ -682,37 +683,36 @@ class QueryFolder extends RuleExecutor { // TODO: might need to validate whether the target field or group actually exist if (group != null && group != Aggs.IMPLICIT_GROUP_KEY) { - // check whether the lookup matches a group - if (group.id().equals(lookup)) { - qContainer = qContainer.updateGroup(group.with(direction)); - } - // else it's a leafAgg - else { - qContainer = qContainer.updateGroup(group.with(direction)); - } + qContainer = qContainer.updateGroup(group.with(direction)); } + + // field + if (orderExpression instanceof FieldAttribute) { + qContainer = qContainer.addSort(lookup, + new AttributeSort((FieldAttribute) orderExpression, direction, missing)); + } + // scalar functions typically require script ordering + else if (orderExpression instanceof ScalarFunction) { + ScalarFunction sf = (ScalarFunction) orderExpression; + // nope, use scripted sorting + qContainer = qContainer.addSort(lookup, new ScriptSort(sf.asScript(), direction, missing)); + } + // histogram + else if (orderExpression instanceof Histogram) { + qContainer = qContainer.addSort(lookup, new GroupingFunctionSort(direction, missing)); + } + // score + else if (orderExpression instanceof Score) { + qContainer = qContainer.addSort(lookup, new ScoreSort(direction, missing)); + } + // agg function + else if (orderExpression instanceof AggregateFunction) { + qContainer = qContainer.addSort(lookup, + new AggregateSort((AggregateFunction) orderExpression, direction, missing)); + } + // unknown else { - // scalar functions typically require script ordering - if (orderExpression instanceof ScalarFunction) { - ScalarFunction sf = (ScalarFunction) orderExpression; - // nope, use scripted sorting - qContainer = qContainer.addSort(new ScriptSort(sf.asScript(), direction, missing)); - } - // score - else if (orderExpression instanceof Score) { - qContainer = qContainer.addSort(new ScoreSort(direction, missing)); - } - // field - else if (orderExpression instanceof FieldAttribute) { - qContainer = qContainer.addSort(new AttributeSort((FieldAttribute) orderExpression, direction, missing)); - } - // agg function - else if (orderExpression instanceof AggregateFunction) { - qContainer = qContainer.addSort(new AggregateSort((AggregateFunction) orderExpression, direction, missing)); - } else { - // unknown - throw new SqlIllegalArgumentException("unsupported sorting expression {}", orderExpression); - } + throw new SqlIllegalArgumentException("unsupported sorting expression {}", orderExpression); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Verifier.java index 6b9683d439d..fcd2d03b8c8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Verifier.java @@ -5,8 +5,8 @@ */ package org.elasticsearch.xpack.sql.planner; +import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.ql.expression.function.aggregate.InnerAggregate; -import org.elasticsearch.xpack.ql.tree.Node; import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.sql.plan.physical.PivotExec; import org.elasticsearch.xpack.sql.plan.physical.Unexecutable; @@ -14,53 +14,11 @@ import org.elasticsearch.xpack.sql.plan.physical.UnplannedExec; import java.util.ArrayList; import java.util.List; -import java.util.Objects; -import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.ql.common.Failure.fail; abstract class Verifier { - static class Failure { - private final Node source; - private final String message; - - Failure(Node source, String message) { - this.source = source; - this.message = message; - } - - Node source() { - return source; - } - - String message() { - return message; - } - - @Override - public int hashCode() { - return source.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - Verifier.Failure other = (Verifier.Failure) obj; - return Objects.equals(source, other.source); - } - } - - private static Failure fail(Node source, String message, Object... args) { - return new Failure(source, format(null, message, args)); - } - static List verifyMappingPlan(PhysicalPlan plan) { List failures = new ArrayList<>(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java index fbb4eedc57c..abb83392078 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java @@ -6,9 +6,7 @@ package org.elasticsearch.xpack.sql.plugin; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -26,8 +24,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestSqlClearCursorAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSqlClearCursorAction.class)); - @Override public List routes() { return emptyList(); @@ -37,7 +33,7 @@ public class RestSqlClearCursorAction extends BaseRestHandler { public List replacedRoutes() { return singletonList(new ReplacedRoute( POST, Protocol.CLEAR_CURSOR_REST_ENDPOINT, - POST, Protocol.CLEAR_CURSOR_DEPRECATED_REST_ENDPOINT, deprecationLogger)); + POST, Protocol.CLEAR_CURSOR_DEPRECATED_REST_ENDPOINT)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java index 9c4f413c170..0ca10447689 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java @@ -6,9 +6,7 @@ package org.elasticsearch.xpack.sql.plugin; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -36,8 +34,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestSqlQueryAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSqlQueryAction.class)); - @Override public List routes() { return emptyList(); @@ -48,10 +44,10 @@ public class RestSqlQueryAction extends BaseRestHandler { return unmodifiableList(asList( new ReplacedRoute( GET, Protocol.SQL_QUERY_REST_ENDPOINT, - GET, Protocol.SQL_QUERY_DEPRECATED_REST_ENDPOINT, deprecationLogger), + GET, Protocol.SQL_QUERY_DEPRECATED_REST_ENDPOINT), new ReplacedRoute( POST, Protocol.SQL_QUERY_REST_ENDPOINT, - POST, Protocol.SQL_QUERY_DEPRECATED_REST_ENDPOINT, deprecationLogger))); + POST, Protocol.SQL_QUERY_DEPRECATED_REST_ENDPOINT))); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlStatsAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlStatsAction.java index c4a0f5f570b..a549d407eee 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlStatsAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlStatsAction.java @@ -6,9 +6,7 @@ package org.elasticsearch.xpack.sql.plugin; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestActions; @@ -22,8 +20,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestSqlStatsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSqlStatsAction.class)); - @Override public List routes() { return emptyList(); @@ -33,7 +29,7 @@ public class RestSqlStatsAction extends BaseRestHandler { public List replacedRoutes() { return singletonList(new ReplacedRoute( GET, Protocol.SQL_STATS_REST_ENDPOINT, - GET, Protocol.SQL_STATS_DEPRECATED_REST_ENDPOINT, deprecationLogger)); + GET, Protocol.SQL_STATS_DEPRECATED_REST_ENDPOINT)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java index 0d4aeeedec8..da75f747ea1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.sql.plugin; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -30,8 +28,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; */ public class RestSqlTranslateAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSqlTranslateAction.class)); - @Override public List routes() { return emptyList(); @@ -42,10 +38,10 @@ public class RestSqlTranslateAction extends BaseRestHandler { return unmodifiableList(asList( new ReplacedRoute( GET, Protocol.SQL_TRANSLATE_REST_ENDPOINT, - GET, Protocol.SQL_TRANSLATE_DEPRECATED_REST_ENDPOINT, deprecationLogger), + GET, Protocol.SQL_TRANSLATE_DEPRECATED_REST_ENDPOINT), new ReplacedRoute( POST, Protocol.SQL_TRANSLATE_REST_ENDPOINT, - POST, Protocol.SQL_TRANSLATE_DEPRECATED_REST_ENDPOINT, deprecationLogger))); + POST, Protocol.SQL_TRANSLATE_DEPRECATED_REST_ENDPOINT))); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/GroupingFunctionSort.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/GroupingFunctionSort.java new file mode 100644 index 00000000000..f32c3548d03 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/GroupingFunctionSort.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.querydsl.container; + +import java.util.Objects; + +public class GroupingFunctionSort extends Sort { + + public GroupingFunctionSort(Direction direction, Missing missing) { + super(direction, missing); + } + + @Override + public int hashCode() { + return Objects.hash(direction(), missing()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + GroupingFunctionSort other = (GroupingFunctionSort) obj; + return Objects.equals(direction(), other.direction()) + && Objects.equals(missing(), other.missing()); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java index 8d40dda7e5a..cc79dbe95d2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.ql.expression.AttributeMap; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.FieldAttribute; -import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.ql.expression.gen.pipeline.ConstantInput; import org.elasticsearch.xpack.ql.expression.gen.pipeline.Pipe; @@ -43,15 +42,12 @@ import java.util.BitSet; import java.util.Collection; import java.util.Comparator; import java.util.LinkedHashMap; -import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; -import static java.util.Collections.emptySet; import static java.util.Collections.singletonMap; import static org.elasticsearch.xpack.ql.util.CollectionUtils.combine; @@ -82,7 +78,7 @@ public class QueryContainer { // at scrolling, their inputs (leaves) get updated private final AttributeMap scalarFunctions; - private final Set sort; + private final Map sort; private final int limit; private final boolean trackHits; private final boolean includeFrozen; @@ -106,7 +102,7 @@ public class QueryContainer { AttributeMap aliases, Map pseudoFunctions, AttributeMap scalarFunctions, - Set sort, + Map sort, int limit, boolean trackHits, boolean includeFrozen, @@ -117,7 +113,7 @@ public class QueryContainer { this.aliases = aliases == null || aliases.isEmpty() ? AttributeMap.emptyAttributeMap() : aliases; this.pseudoFunctions = pseudoFunctions == null || pseudoFunctions.isEmpty() ? emptyMap() : pseudoFunctions; this.scalarFunctions = scalarFunctions == null || scalarFunctions.isEmpty() ? AttributeMap.emptyAttributeMap() : scalarFunctions; - this.sort = sort == null || sort.isEmpty() ? emptySet() : sort; + this.sort = sort == null || sort.isEmpty() ? emptyMap() : sort; this.limit = limit; this.trackHits = trackHits; this.includeFrozen = includeFrozen; @@ -134,45 +130,48 @@ public class QueryContainer { return emptyList(); } - List> sortingColumns = new ArrayList<>(sort.size()); - - boolean aggSort = false; - for (Sort s : sort) { - Tuple tuple = new Tuple<>(Integer.valueOf(-1), null); - + for (Sort s : sort.values()) { if (s instanceof AggregateSort) { - AggregateSort as = (AggregateSort) s; - // find the relevant column of each aggregate function - AggregateFunction af = as.agg(); - - aggSort = true; - int atIndex = -1; - String id = Expressions.id(af); - - for (int i = 0; i < fields.size(); i++) { - Tuple field = fields.get(i); - if (field.v2().equals(id)) { - atIndex = i; - break; - } - } - if (atIndex == -1) { - throw new SqlIllegalArgumentException("Cannot find backing column for ordering aggregation [{}]", s); - } - // assemble a comparator for it - Comparator comp = s.direction() == Sort.Direction.ASC ? Comparator.naturalOrder() : Comparator.reverseOrder(); - comp = s.missing() == Sort.Missing.FIRST ? Comparator.nullsFirst(comp) : Comparator.nullsLast(comp); - - tuple = new Tuple<>(Integer.valueOf(atIndex), comp); + customSort = Boolean.TRUE; + break; } - sortingColumns.add(tuple); + } + + // If no custom sort is used break early + if (customSort == null) { + customSort = Boolean.FALSE; + return emptyList(); + } + + List> sortingColumns = new ArrayList<>(sort.size()); + for (Map.Entry entry : sort.entrySet()) { + String expressionId = entry.getKey(); + Sort s = entry.getValue(); + + int atIndex = -1; + for (int i = 0; i < fields.size(); i++) { + Tuple field = fields.get(i); + if (field.v2().equals(expressionId)) { + atIndex = i; + break; + } + } + if (atIndex == -1) { + throw new SqlIllegalArgumentException("Cannot find backing column for ordering aggregation [{}]", s); + } + + // assemble a comparator for it, if it's not an AggregateSort + // then it's pre-sorted by ES so use null + Comparator comp = null; + if (s instanceof AggregateSort) { + comp = s.direction() == Sort.Direction.ASC ? Comparator.naturalOrder() : Comparator.reverseOrder(); + comp = s.missing() == Sort.Missing.FIRST ? Comparator.nullsFirst(comp) : Comparator.nullsLast(comp); + } + + sortingColumns.add(new Tuple<>(Integer.valueOf(atIndex), comp)); } - if (customSort == null) { - customSort = Boolean.valueOf(aggSort); - } - - return aggSort ? sortingColumns : emptyList(); + return sortingColumns; } /** @@ -230,7 +229,7 @@ public class QueryContainer { return pseudoFunctions; } - public Set sort() { + public Map sort() { return sort; } @@ -304,10 +303,10 @@ public class QueryContainer { return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, procs, sort, limit, trackHits, includeFrozen, minPageSize); } - public QueryContainer addSort(Sort sortable) { - Set sort = new LinkedHashSet<>(this.sort); - sort.add(sortable); - return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen, + public QueryContainer addSort(String expressionId, Sort sortable) { + Map newSort = new LinkedHashMap<>(this.sort); + newSort.put(expressionId, sortable); + return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, newSort, limit, trackHits, includeFrozen, minPageSize); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java index 9e56f46949f..ce861d8f81a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java @@ -17,7 +17,7 @@ import org.elasticsearch.license.License; import org.elasticsearch.license.License.OperationMode; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.transport.Netty4Plugin; @@ -154,7 +154,7 @@ public class SqlLicenseIT extends AbstractLicensesIntegrationTestCase { .query("SELECT * FROM test").get(); SearchSourceBuilder source = response.source(); assertThat(source.docValueFields(), Matchers.contains( - new DocValueFieldsContext.FieldAndFormat("count", null))); + new FetchDocValuesContext.FieldAndFormat("count", null))); FetchSourceContext fetchSource = source.fetchSource(); assertThat(fetchSource.includes(), Matchers.arrayContaining("data")); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java index 742db4eca5d..251028be6b2 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.action; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.sort.SortBuilders; @@ -35,7 +35,7 @@ public class SqlTranslateActionIT extends AbstractSqlIntegTestCase { assertTrue(fetch.fetchSource()); assertArrayEquals(new String[] { "data", "count" }, fetch.includes()); assertEquals( - singletonList(new DocValueFieldsContext.FieldAndFormat("date", "epoch_millis")), + singletonList(new FetchDocValuesContext.FieldAndFormat("date", "epoch_millis")), source.docValueFields()); assertEquals(singletonList(SortBuilders.fieldSort("count").missing("_last").unmappedType("long")), source.sorts()); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java index 14b1da83825..9c5023ff60c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java @@ -144,17 +144,17 @@ public class FieldAttributeTests extends ESTestCase { } public void testDottedFieldPath() { - assertThat(error("some"), is("Found 1 problem(s)\nline 1:8: Cannot use field [some] type [object] only its subfields")); + assertThat(error("some"), is("Found 1 problem\nline 1:8: Cannot use field [some] type [object] only its subfields")); } public void testDottedFieldPathDeeper() { assertThat(error("some.dotted"), - is("Found 1 problem(s)\nline 1:8: Cannot use field [some.dotted] type [object] only its subfields")); + is("Found 1 problem\nline 1:8: Cannot use field [some.dotted] type [object] only its subfields")); } public void testDottedFieldPathTypo() { assertThat(error("some.dotted.fild"), - is("Found 1 problem(s)\nline 1:8: Unknown column [some.dotted.fild], did you mean [some.dotted.field]?")); + is("Found 1 problem\nline 1:8: Unknown column [some.dotted.fild], did you mean [some.dotted.field]?")); } public void testStarExpansionExcludesObjectAndUnsupportedTypes() { @@ -177,13 +177,13 @@ public class FieldAttributeTests extends ESTestCase { VerificationException ex = expectThrows(VerificationException.class, () -> plan("SELECT test.bar FROM test")); assertEquals( - "Found 1 problem(s)\nline 1:8: Reference [test.bar] is ambiguous (to disambiguate use quotes or qualifiers); " + "Found 1 problem\nline 1:8: Reference [test.bar] is ambiguous (to disambiguate use quotes or qualifiers); " + "matches any of [\"test\".\"bar\", \"test\".\"test.bar\"]", ex.getMessage()); ex = expectThrows(VerificationException.class, () -> plan("SELECT test.test FROM test")); assertEquals( - "Found 1 problem(s)\nline 1:8: Reference [test.test] is ambiguous (to disambiguate use quotes or qualifiers); " + "Found 1 problem\nline 1:8: Reference [test.test] is ambiguous (to disambiguate use quotes or qualifiers); " + "matches any of [\"test\".\"test\", \"test\".\"test.test\"]", ex.getMessage()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index a3d55a19ec1..bb54f5c7c71 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -50,9 +50,11 @@ public class VerifierErrorMessagesTests extends ESTestCase { private String error(IndexResolution getIndexResult, String sql) { Analyzer analyzer = new Analyzer(SqlTestUtils.TEST_CFG, new SqlFunctionRegistry(), getIndexResult, new Verifier(new Metrics())); VerificationException e = expectThrows(VerificationException.class, () -> analyzer.analyze(parser.createStatement(sql), true)); - assertTrue(e.getMessage().startsWith("Found ")); - String header = "Found 1 problem(s)\nline "; - return e.getMessage().substring(header.length()); + String message = e.getMessage(); + assertTrue(message.startsWith("Found ")); + String pattern = "\nline "; + int index = message.indexOf(pattern); + return message.substring(index + pattern.length()); } private LogicalPlan accept(String sql) { @@ -455,6 +457,8 @@ public class VerifierErrorMessagesTests extends ESTestCase { public void testGroupByAggregate() { assertEquals("1:36: Cannot use an aggregate [AVG] for grouping", error("SELECT AVG(int) FROM test GROUP BY AVG(int)")); + assertEquals("1:65: Cannot use an aggregate [AVG] for grouping", + error("SELECT ROUND(AVG(int),2), AVG(int), COUNT(*) FROM test GROUP BY AVG(int) ORDER BY AVG(int)")); } public void testStarOnNested() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java index a30a89addb8..64153a92f12 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java @@ -66,6 +66,70 @@ public class QuerierTests extends ESTestCase { } } + @SuppressWarnings("rawtypes") + public void testAggSorting_TwoFields_One_Presorted() { + List> tuples = new ArrayList<>(2); + tuples.add(new Tuple<>(0, null)); + tuples.add(new Tuple<>(1, Comparator.reverseOrder())); + Querier.AggSortingQueue queue = new AggSortingQueue(20, tuples); + + for (int i = 1; i <= 100; i++) { + queue.insertWithOverflow(new Tuple<>(Arrays.asList(i <= 5 ? null : 100 - i + 1, i), i)); + } + List> results = queue.asList(); + + assertEquals(20, results.size()); + for (int i = 0; i < 20; i++) { + assertEquals(i < 5 ? null : 100 - i, results.get(i).get(0)); + assertEquals(i < 5 ? 5 - i : i + 1, results.get(i).get(1)); + } + } + + @SuppressWarnings({"rawtypes", "unchecked"}) + public void testAggSorting_FourFields() { + List comparators = Arrays. asList( + Comparator.naturalOrder(), + Comparator.naturalOrder(), + Comparator.reverseOrder(), + Comparator.naturalOrder() + ); + List> tuples = new ArrayList<>(4); + tuples.add(new Tuple<>(0, null)); + tuples.add(new Tuple<>(1, comparators.get(1))); + tuples.add(new Tuple<>(2, null)); + tuples.add(new Tuple<>(3, comparators.get(3))); + Querier.AggSortingQueue queue = new AggSortingQueue(35, tuples); + + List> expected = new ArrayList<>(128); + for (int i = 0; i < 128; i++) { + int col1 = i / 16; + int col2 = 15 - (i / 8); + int col3 = 32 - (i / 4); + int col4 = 127 - i; + + expected.add(Arrays.asList(col1, col2, col3, col4)); + queue.insertWithOverflow(new Tuple<>(Arrays.asList(col1, col2, col3, col4), i)); + } + + expected.sort((o1, o2) -> { + for (int i = 0; i < 4; i++) { + int result = comparators.get(i).compare(o1.get(i), o2.get(i)); + if (result != 0) { + return result; + } + } + return 0; + }); + List> results = queue.asList(); + + assertEquals(35, results.size()); + for (int i = 0; i < 35; i++) { + for (int j = 0; j < 4; j++) { + assertEquals(expected.get(i).get(j), results.get(i).get(j)); + } + } + } + @SuppressWarnings("rawtypes") public void testAggSorting_Randomized() { // Initialize comparators for fields (columns) @@ -76,7 +140,7 @@ public class QuerierTests extends ESTestCase { boolean order = randomBoolean(); ordering[j] = order; Comparator comp = order ? Comparator.naturalOrder() : Comparator.reverseOrder(); - tuples.add(new Tuple(j, comp)); + tuples.add(new Tuple<>(j, comp)); } // Insert random no of documents (rows) with random 0/1 values for each field diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java index 64155093b87..2c2052264a9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java @@ -97,7 +97,7 @@ public class SourceGeneratorTests extends ESTestCase { public void testSortScoreSpecified() { QueryContainer container = new QueryContainer() - .addSort(new ScoreSort(Direction.DESC, null)); + .addSort("id", new ScoreSort(Direction.DESC, null)); SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); assertEquals(singletonList(scoreSort()), sourceBuilder.sorts()); } @@ -106,14 +106,14 @@ public class SourceGeneratorTests extends ESTestCase { FieldSortBuilder sortField = fieldSort("test").unmappedType("keyword"); QueryContainer container = new QueryContainer() - .addSort(new AttributeSort(new FieldAttribute(Source.EMPTY, "test", new KeywordEsField("test")), Direction.ASC, - Missing.LAST)); + .addSort("id", new AttributeSort(new FieldAttribute(Source.EMPTY, "test", new KeywordEsField("test")), + Direction.ASC, Missing.LAST)); SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); assertEquals(singletonList(sortField.order(SortOrder.ASC).missing("_last")), sourceBuilder.sorts()); container = new QueryContainer() - .addSort(new AttributeSort(new FieldAttribute(Source.EMPTY, "test", new KeywordEsField("test")), Direction.DESC, - Missing.FIRST)); + .addSort("id", new AttributeSort(new FieldAttribute(Source.EMPTY, "test", new KeywordEsField("test")), + Direction.DESC, Missing.FIRST)); sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); assertEquals(singletonList(sortField.order(SortOrder.DESC).missing("_first")), sourceBuilder.sorts()); } @@ -137,4 +137,4 @@ public class SourceGeneratorTests extends ESTestCase { SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); assertNull(sourceBuilder.sorts()); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/PostOptimizerVerifierTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/PostOptimizerVerifierTests.java index dae2efb865b..0f1072f462f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/PostOptimizerVerifierTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/PostOptimizerVerifierTests.java @@ -61,7 +61,7 @@ public class PostOptimizerVerifierTests extends ESTestCase { private String error(IndexResolution getIndexResult, String sql) { PlanningException e = expectThrows(PlanningException.class, () -> plan(sql)); assertTrue(e.getMessage().startsWith("Found ")); - String header = "Found 1 problem(s)\nline "; + String header = "Found 1 problem\nline "; return e.getMessage().substring(header.length()); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index b5138ae9e8c..aa405b2079e 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -997,6 +997,21 @@ public class QueryTranslatorTests extends ESTestCase { + "\"fixed_interval\":\"62208000000ms\",\"time_zone\":\"Z\"}}}]}")); } + public void testGroupByHistogramWithScalarsQueryTranslator() { + PhysicalPlan p = optimizeAndPlan("SELECT MAX(int), HISTOGRAM(date, INTERVAL 5 YEARS - INTERVAL 6 MONTHS) AS h " + + "FROM test GROUP BY h"); + assertEquals(EsQueryExec.class, p.getClass()); + EsQueryExec eqe = (EsQueryExec) p; + assertEquals(2, eqe.output().size()); + assertEquals("MAX(int)", eqe.output().get(0).qualifiedName()); + assertEquals(INTEGER, eqe.output().get(0).dataType()); + assertEquals("h", eqe.output().get(1).qualifiedName()); + assertEquals(DATETIME, eqe.output().get(1).dataType()); + assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), + containsString("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"value_type\":\"date\"," + + "\"order\":\"asc\",\"fixed_interval\":\"139968000000ms\",\"time_zone\":\"Z\"}}}]}")); + } + public void testGroupByYearQueryTranslator() { PhysicalPlan p = optimizeAndPlan("SELECT YEAR(date) FROM test GROUP BY YEAR(date)"); assertEquals(EsQueryExec.class, p.getClass()); diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index 3f133ee539e..5ed2b8c570a 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -26,7 +26,7 @@ import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; -import org.elasticsearch.xpack.core.ml.notifications.AuditorField; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.watcher.support.WatcherIndexTemplateRegistryField; import org.junit.After; @@ -89,7 +89,7 @@ public class XPackRestIT extends ESClientYamlSuiteTestCase { List templates = new ArrayList<>(); templates.addAll( Arrays.asList( - AuditorField.NOTIFICATIONS_INDEX, + NotificationsIndex.NOTIFICATIONS_INDEX, MlMetaIndex.INDEX_NAME, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX, AnomalyDetectorsIndex.jobResultsIndexPrefix(), diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/cat.ml_data_frame_analytics.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/cat.ml_data_frame_analytics.json new file mode 100644 index 00000000000..f8e3f4c46ea --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/cat.ml_data_frame_analytics.json @@ -0,0 +1,89 @@ +{ + "cat.ml_data_frame_analytics":{ + "documentation":{ + "url":"http://www.elastic.co/guide/en/elasticsearch/reference/current/get-dfanalytics-stats.html" + }, + "stability":"stable", + "url":{ + "paths":[ + { + "path":"/_cat/ml/data_frame/analytics", + "methods":[ + "GET" + ] + }, + { + "path":"/_cat/ml/data_frame/analytics/{id}", + "methods":[ + "GET" + ], + "parts":{ + "id":{ + "type":"string", + "description":"The ID of the data frame analytics to fetch" + } + } + } + ] + }, + "params":{ + "allow_no_match":{ + "type":"boolean", + "required":false, + "description":"Whether to ignore if a wildcard expression matches no configs. (This includes `_all` string or when no configs have been specified)" + }, + "bytes":{ + "type":"enum", + "description":"The unit in which to display byte values", + "options":[ + "b", + "k", + "kb", + "m", + "mb", + "g", + "gb", + "t", + "tb", + "p", + "pb" + ] + }, + "format":{ + "type":"string", + "description":"a short version of the Accept header, e.g. json, yaml" + }, + "h":{ + "type":"list", + "description":"Comma-separated list of column names to display" + }, + "help":{ + "type":"boolean", + "description":"Return help information", + "default":false + }, + "s":{ + "type":"list", + "description":"Comma-separated list of column names or column aliases to sort by" + }, + "time":{ + "type":"enum", + "description":"The unit in which to display time values", + "options":[ + "d (Days)", + "h (Hours)", + "m (Minutes)", + "s (Seconds)", + "ms (Milliseconds)", + "micros (Microseconds)", + "nanos (Nanoseconds)" + ] + }, + "v":{ + "type":"boolean", + "description":"Verbose mode. Display column headers", + "default":false + } + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/eql.search.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/eql.search.json index a08dceaccbd..68c105a41d7 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/eql.search.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/eql.search.json @@ -1,7 +1,7 @@ { "eql.search":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-search.html" + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/eql.html" }, "stability": "beta", "url":{ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.execute_lifecycle.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.execute_lifecycle.json index 6538dabd230..963a6e0577b 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.execute_lifecycle.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.execute_lifecycle.json @@ -1,7 +1,7 @@ { "slm.execute_lifecycle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-execute-policy.html" + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-execute-lifecycle.html" }, "stability":"stable", "url":{ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.get_status.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.get_status.json index 163ad5558c3..4ea6089ce43 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.get_status.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.get_status.json @@ -1,7 +1,7 @@ { "slm.get_status":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-get-status.html" + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-get-status.html" }, "stability":"stable", "url":{ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.start.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.start.json index 21ae3d50978..0ceb74e9279 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.start.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.start.json @@ -1,7 +1,7 @@ { "slm.start":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-start.html" + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-start.html" }, "stability":"stable", "url":{ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.stop.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.stop.json index 63b74ab9c2f..b052f4bf975 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.stop.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/slm.stop.json @@ -1,7 +1,7 @@ { "slm.stop":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-stop.html" + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-stop.html" }, "stability":"stable", "url":{ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/analytics/top_metrics.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/analytics/top_metrics.yml new file mode 100644 index 00000000000..556026a50c6 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/analytics/top_metrics.yml @@ -0,0 +1,371 @@ +--- +"sort by long field": + - do: + bulk: + index: test + refresh: true + body: + - '{"index": {}}' + - '{"s": 1, "v": 3.1415}' + - '{"index": {}}' + - '{"s": 2, "v": 1}' + - '{"index": {}}' + - '{"s": 3, "v": 2.71828}' + + - do: + search: + size: 0 + body: + aggs: + tm: + top_metrics: + metric: + field: v + sort: + s: desc + - match: { aggregations.tm.top.0.metrics.v: 2.718280076980591 } + - match: { aggregations.tm.top.0.sort: [3] } + + - do: + search: + body: + aggs: + tm: + top_metrics: + metric: + field: v + sort: + s: asc + - match: { aggregations.tm.top.0.metrics.v: 3.1414999961853027 } + - match: { aggregations.tm.top.0.sort: [1] } + + - do: + search: + body: + aggs: + tm: + top_metrics: + metric: + field: v + sort: + s: + order: asc + numeric_type: date + - match: { aggregations.tm.top.0.metrics.v: 3.1414999961853027 } + - match: { aggregations.tm.top.0.sort: [1] } + +--- +"sort by double field": + - do: + indices.create: + index: test + body: + mappings: + properties: + s: + type: scaled_float + scaling_factor: 10 + - do: + bulk: + index: test + refresh: true + body: + - '{"index": {}}' + - '{"s": 1.0, "v": 3.1415}' + - '{"index": {}}' + - '{"s": 2.0, "v": 1}' + - '{"index": {}}' + - '{"s": 3.0, "v": 2.71828}' + + - do: + search: + size: 0 + body: + aggs: + tm: + top_metrics: + metric: + field: v + sort: + s: desc + - match: { aggregations.tm.top.0.metrics.v: 2.718280076980591 } + - match: { aggregations.tm.top.0.sort: [3.0] } + + - do: + search: + body: + aggs: + tm: + top_metrics: + metric: + field: v + sort: + s: asc + - match: { aggregations.tm.top.0.metrics.v: 3.1414999961853027 } + - match: { aggregations.tm.top.0.sort: [1.0] } + +--- +"sort by scaled float field": + - do: + indices.create: + index: test + body: + mappings: + properties: + s: + type: scaled_float + scaling_factor: 10 + - do: + bulk: + index: test + refresh: true + body: + - '{"index": {}}' + - '{"s": 1, "v": 3.1415}' + - '{"index": {}}' + - '{"s": 2, "v": 1}' + - '{"index": {}}' + - '{"s": 3, "v": 2.71828}' + + - do: + search: + size: 0 + body: + aggs: + tm: + top_metrics: + metric: + field: v + sort: + s: desc + - match: { aggregations.tm.top.0.metrics.v: 2.718280076980591 } + - match: { aggregations.tm.top.0.sort: [3.0] } + + - do: + search: + body: + aggs: + tm: + top_metrics: + metric: + field: v + sort: + s: asc + - match: { aggregations.tm.top.0.metrics.v: 3.1414999961853027 } + - match: { aggregations.tm.top.0.sort: [1.0] } + +--- +"sort by keyword field fails": + - do: + bulk: + index: test + refresh: true + body: + - '{"index": {}}' + - '{"s": "cow", "v": 3.1415}' + + - do: + catch: bad_request + search: + size: 0 + body: + aggs: + tm: + top_metrics: + metric: + field: v + sort: s.keyword + - match: { error.root_cause.0.reason: "error building sort for field [s.keyword] of type [keyword] in index [test]: only supported on numeric fields" } + +--- +"sort by score": + - do: + bulk: + index: test + refresh: true + body: + - '{"index": {}}' + - '{"s": "big cat", "v": 3.1415}' + - '{"index": {}}' + - '{"s": "cat", "v": 1}' + - '{"index": {}}' + - '{"s": "the small dog", "v": 2.71828}' + + - do: + search: + size: 0 + body: + query: + match: + s: big cat + aggs: + tm: + top_metrics: + metric: + field: v + sort: _score + - match: { aggregations.tm.top.0.metrics.v: 3.1414999961853027 } + - match: { aggregations.tm.top.0.sort: [1.450832724571228] } + +--- +"sort by numeric script": + - do: + bulk: + index: test + refresh: true + body: + - '{"index": {}}' + - '{"s": "cow", "v": 3.1415}' + + - do: + search: + size: 0 + body: + aggs: + tm: + top_metrics: + metric: + field: v + sort: + _script: + type: number + script: + source: doc['s.keyword'].value.length() + - match: { aggregations.tm.top.0.metrics.v: 3.1414999961853027 } + - match: { aggregations.tm.top.0.sort: [3.0] } + +--- +"sort by string script fails": + - do: + bulk: + index: test + refresh: true + body: + - '{"index": {}}' + - '{"s": "cow", "v": 3.1415}' + + - do: + catch: bad_request + search: + size: 0 + body: + aggs: + tm: + top_metrics: + metric: + field: v + sort: + _script: + type: string + script: + source: doc['s'].value + - match: { error.root_cause.0.reason: "error building sort for [_script]: script sorting only supported on [numeric] scripts but was [string]" } + +--- +"sort by geo_distance": + - do: + indices.create: + index: test + body: + mappings: + properties: + location: + type: geo_point + - do: + bulk: + index: test + refresh: true + body: + - '{"index": {}}' + - '{"location": {"lat" : 40.7128, "lon" : -74.0060}, "name": "New York", "population": 8623000}' + - '{"index": {}}' + - '{"location": {"lat" : 34.0522, "lon" : -118.2437}, "name": "Los Angeles", "population": 4000000}' + - '{"index": {}}' + - '{"location": {"lat" : 41.8781, "lon" : -87.6298}, "name": "Chicago", "population": 2716000}' + + - do: + search: + size: 0 + body: + aggs: + pop: + top_metrics: + metric: + field: population + sort: + _geo_distance: + location: "35.7796, -78.6382" + - match: { aggregations.pop.top.0.metrics.population: 8623000 } + - match: { aggregations.pop.top.0.sort: [681335.0456554737] } + +--- +"inside terms": + - do: + indices.create: + index: test + body: + mappings: + properties: + ip: + type: ip + date: + type: date + - do: + bulk: + index: test + refresh: true + body: + - '{"index": {}}' + - '{"ip": "192.168.0.1", "date": "2020-01-01T01:01:01", "v": 1}' + - '{"index": {}}' + - '{"ip": "192.168.0.1", "date": "2020-01-01T02:01:01", "v": 2}' + - '{"index": {}}' + - '{"ip": "192.168.0.2", "date": "2020-01-01T02:01:01", "v": 3}' + + - do: + search: + size: 0 + body: + aggs: + ip: + terms: + field: ip + aggs: + tm: + top_metrics: + metric: + field: v + sort: + date: desc + - length: { aggregations.ip.buckets: 2 } + - match: { aggregations.ip.buckets.0.key: 192.168.0.1 } + - match: { aggregations.ip.buckets.0.tm.top.0.metrics.v: 2 } + - match: { aggregations.ip.buckets.0.tm.top.0.sort: ['2020-01-01T02:01:01.000Z'] } + - match: { aggregations.ip.buckets.1.key: 192.168.0.2 } + - match: { aggregations.ip.buckets.1.tm.top.0.metrics.v: 3 } + - match: { aggregations.ip.buckets.1.tm.top.0.sort: ['2020-01-01T02:01:01.000Z'] } + + - do: + search: + size: 0 + body: + aggs: + ip: + terms: + field: ip + order: + tm.v: desc + aggs: + tm: + top_metrics: + metric: + field: v + sort: + date: desc + - length: { aggregations.ip.buckets: 2 } + - match: { aggregations.ip.buckets.0.key: 192.168.0.2 } + - match: { aggregations.ip.buckets.0.tm.top.0.metrics.v: 3 } + - match: { aggregations.ip.buckets.0.tm.top.0.sort: ['2020-01-01T02:01:01.000Z'] } + - match: { aggregations.ip.buckets.1.key: 192.168.0.1 } + - match: { aggregations.ip.buckets.1.tm.top.0.metrics.v: 2 } + - match: { aggregations.ip.buckets.1.tm.top.0.sort: ['2020-01-01T02:01:01.000Z'] } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/flattened/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/flattened/10_basic.yml index c750abb7895..c7ad6e7c23a 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/flattened/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/flattened/10_basic.yml @@ -1,8 +1,8 @@ --- "Test exists query on flattened object field": - skip: - version: " - 7.99.99" - reason: "Flat object fields are currently only implemented in 8.0." + version: " - 7.2.99" + reason: "Flat object fields were implemented in 7.3." - do: indices.create: @@ -54,8 +54,8 @@ --- "Test query string query on flattened object field": - skip: - version: " - 7.99.99" - reason: "Flat object fields are currently only implemented in 8.0." + version: " - 7.2.99" + reason: "Flat object fields were implemented in 7.3." - do: indices.create: diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/indices.freeze/30_usage.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/indices.freeze/30_usage.yml index 9135c19f679..e254b2b49b9 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/indices.freeze/30_usage.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/indices.freeze/30_usage.yml @@ -10,8 +10,8 @@ setup: --- "Usage stats on frozen indices": - skip: - version: " - 7.9.99" - reason: "frozen indices have usage stats starting in version 8.0.0" + version: " - 7.3.99" + reason: "frozen indices have usage stats starting in version 7.4" - do: index: diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_cat_apis.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_cat_apis.yml new file mode 100644 index 00000000000..4f45d2ec9ac --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_cat_apis.yml @@ -0,0 +1,86 @@ +setup: + - skip: + features: headers + - do: + indices.create: + index: index-source + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_data_frame_analytics: + id: dfa-outlier-detection-job + body: > + { + "source": { "index": "index-source" }, + "dest": { "index": "index-dest-od" }, + "analysis": {"outlier_detection": {}} + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_data_frame_analytics: + id: dfa-regression-job + body: > + { + "source": { "index": "index-source" }, + "dest": { "index": "index-dest-r" }, + "analysis": { "regression": { "dependent_variable": "dep_var" } } + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_data_frame_analytics: + id: dfa-classification-job + body: > + { + "source": { "index": "index-source" }, + "dest": { "index": "index-dest-c" }, + "analysis": { "classification": { "dependent_variable": "dep_var" } } + } + +--- +"Test cat data frame analytics single job": + + - do: + cat.ml_data_frame_analytics: + id: dfa-outlier-detection-job + - match: + $body: | + / #id type create_time state + ^ (dfa\-outlier\-detection\-job \s+ outlier_detection \s+ [^\s]+ \s+ stopped \n)+ $/ + +--- +"Test cat data frame analytics single job with header": + - do: + cat.ml_data_frame_analytics: + id: dfa-outlier-detection-job + v: true + - match: + $body: | + /^ id \s+ type \s+ create_time \s+ state \n + (dfa\-outlier\-detection\-job \s+ outlier_detection \s+ [^\s]+ \s+ stopped \n)+ $/ + +--- +"Test cat data frame analytics all jobs with header": + - do: + cat.ml_data_frame_analytics: + v: true + - match: + $body: | + /^ id \s+ type \s+ create_time \s+ state \n + (dfa\-classification\-job \s+ classification \s+ [^\s]+ \s+ stopped \n)+ + (dfa\-outlier\-detection\-job \s+ outlier_detection \s+ [^\s]+ \s+ stopped \n)+ + (dfa\-regression\-job \s+ regression \s+ [^\s]+ \s+ stopped \n)+ $/ + +--- +"Test cat data frame analytics all jobs with header and column selection": + - do: + cat.ml_data_frame_analytics: + v: true + h: id,t,s,p,source_index,dest_index + - match: + $body: | + /^ id \s+ t \s+ s \s+ p \s+ source_index \s+ dest_index \n + (dfa\-classification\-job \s+ classification \s+ stopped \s+ reindexing:0,loading_data:0,analyzing:0,writing_results:0 \s+ index-source \s+ index-dest-c \n)+ + (dfa\-outlier\-detection\-job \s+ outlier_detection \s+ stopped \s+ reindexing:0,loading_data:0,analyzing:0,writing_results:0 \s+ index-source \s+ index-dest-od \n)+ + (dfa\-regression\-job \s+ regression \s+ stopped \s+ reindexing:0,loading_data:0,analyzing:0,writing_results:0 \s+ index-source \s+ index-dest-r \n)+ $/ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_stats.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_stats.yml index fe385763091..cdf6d463123 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_stats.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_stats.yml @@ -247,7 +247,6 @@ teardown: - gte: { transforms.0.stats.search_time_in_ms: 0 } - lte: { transforms.0.stats.search_total: 1 } - match: { transforms.0.stats.search_failures: 0 } - - match: { transforms.0.stats.exponential_avg_checkpoint_duration_ms: 0.0 } - match: { transforms.0.stats.exponential_avg_documents_indexed: 0.0 } - match: { transforms.0.stats.exponential_avg_documents_processed: 0.0 } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/voting_only_node/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/voting_only_node/10_basic.yml index bba86b2ac0c..f7c4e97c036 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/voting_only_node/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/voting_only_node/10_basic.yml @@ -2,8 +2,8 @@ "cluster stats with voting only node stats": - skip: - version: " - 7.99.99" - reason: "voting only nodes are added in v8.0.0" + version: " - 7.2.99" + reason: "voting only nodes are added in v7.3" - do: cluster.stats: {} diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java index 8c80616fcd7..dfeb203d3ca 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java @@ -29,11 +29,9 @@ import static org.hamcrest.Matchers.oneOf; public class TransformGetAndGetStatsIT extends TransformRestTestCase { private static final String TEST_USER_NAME = "transform_user"; - private static final String BASIC_AUTH_VALUE_TRANSFORM_USER = - basicAuthHeaderValue(TEST_USER_NAME, TEST_PASSWORD_SECURE_STRING); + private static final String BASIC_AUTH_VALUE_TRANSFORM_USER = basicAuthHeaderValue(TEST_USER_NAME, TEST_PASSWORD_SECURE_STRING); private static final String TEST_ADMIN_USER_NAME = "transform_admin"; - private static final String BASIC_AUTH_VALUE_TRANSFORM_ADMIN = - basicAuthHeaderValue(TEST_ADMIN_USER_NAME, TEST_PASSWORD_SECURE_STRING); + private static final String BASIC_AUTH_VALUE_TRANSFORM_ADMIN = basicAuthHeaderValue(TEST_ADMIN_USER_NAME, TEST_PASSWORD_SECURE_STRING); private static boolean indicesCreated = false; @@ -101,13 +99,13 @@ public class TransformGetAndGetStatsIT extends TransformRestTestCase { stats = entityAsMap(client().performRequest(getRequest)); assertEquals(3, XContentMapValues.extractValue("count", stats)); - List> transformsStats = (List>)XContentMapValues.extractValue("transforms", stats); + List> transformsStats = (List>) XContentMapValues.extractValue("transforms", stats); // Verify that both transforms have valid stats for (Map transformStats : transformsStats) { - Map stat = (Map)transformStats.get("stats"); - assertThat("documents_processed is not > 0.", ((Integer)stat.get("documents_processed")), greaterThan(0)); - assertThat("search_total is not > 0.", ((Integer)stat.get("search_total")), greaterThan(0)); - assertThat("pages_processed is not > 0.", ((Integer)stat.get("pages_processed")), greaterThan(0)); + Map stat = (Map) transformStats.get("stats"); + assertThat("documents_processed is not > 0.", ((Integer) stat.get("documents_processed")), greaterThan(0)); + assertThat("search_total is not > 0.", ((Integer) stat.get("search_total")), greaterThan(0)); + assertThat("pages_processed is not > 0.", ((Integer) stat.get("pages_processed")), greaterThan(0)); /* TODO progress is now checkpoint progress and it may be that no checkpoint is in progress here Map progress = (Map)XContentMapValues.extractValue("checkpointing.next.checkpoint_progress", transformStats); @@ -122,7 +120,7 @@ public class TransformGetAndGetStatsIT extends TransformRestTestCase { stats = entityAsMap(client().performRequest(getRequest)); assertEquals(1, XContentMapValues.extractValue("count", stats)); - transformsStats = (List>)XContentMapValues.extractValue("transforms", stats); + transformsStats = (List>) XContentMapValues.extractValue("transforms", stats); assertEquals(1, transformsStats.size()); assertEquals("stopped", XContentMapValues.extractValue("state", transformsStats.get(0))); assertNull(XContentMapValues.extractValue("checkpointing.next.position", transformsStats.get(0))); @@ -133,12 +131,11 @@ public class TransformGetAndGetStatsIT extends TransformRestTestCase { stats = entityAsMap(client().performRequest(getRequest)); assertEquals(1, XContentMapValues.extractValue("count", stats)); - transformsStats = (List>)XContentMapValues.extractValue("transforms", stats); + transformsStats = (List>) XContentMapValues.extractValue("transforms", stats); assertEquals(1, transformsStats.size()); assertThat(XContentMapValues.extractValue("state", transformsStats.get(0)), oneOf("started", "indexing")); assertEquals(1, XContentMapValues.extractValue("checkpointing.last.checkpoint", transformsStats.get(0))); - // check all the different ways to retrieve all transforms getRequest = createRequestWithAuth("GET", getTransformEndpoint(), authHeader); Map transforms = entityAsMap(client().performRequest(getRequest)); @@ -165,12 +162,13 @@ public class TransformGetAndGetStatsIT extends TransformRestTestCase { stopTransform("pivot_stats_1", false); // Get rid of the first transform task, but keep the configuration - client().performRequest(new Request("POST", "_tasks/_cancel?actions="+TransformField.TASK_NAME+"*")); + client().performRequest(new Request("POST", "_tasks/_cancel?actions=" + TransformField.TASK_NAME + "*")); // Verify that the task is gone - Map tasks = - entityAsMap(client().performRequest(new Request("GET", "_tasks?actions="+TransformField.TASK_NAME+"*"))); - assertTrue(((Map)XContentMapValues.extractValue("nodes", tasks)).isEmpty()); + Map tasks = entityAsMap( + client().performRequest(new Request("GET", "_tasks?actions=" + TransformField.TASK_NAME + "*")) + ); + assertTrue(((Map) XContentMapValues.extractValue("nodes", tasks)).isEmpty()); createPivotReviewsTransform("pivot_stats_2", "pivot_reviews_stats_2", null); startAndWaitForTransform("pivot_stats_2", "pivot_reviews_stats_2"); @@ -178,13 +176,13 @@ public class TransformGetAndGetStatsIT extends TransformRestTestCase { Request getRequest = createRequestWithAuth("GET", getTransformEndpoint() + "_stats", BASIC_AUTH_VALUE_TRANSFORM_ADMIN); Map stats = entityAsMap(client().performRequest(getRequest)); assertEquals(2, XContentMapValues.extractValue("count", stats)); - List> transformsStats = (List>)XContentMapValues.extractValue("transforms", stats); + List> transformsStats = (List>) XContentMapValues.extractValue("transforms", stats); // Verify that both transforms, the one with the task and the one without have statistics for (Map transformStats : transformsStats) { - Map stat = (Map)transformStats.get("stats"); - assertThat(((Integer)stat.get("documents_processed")), greaterThan(0)); - assertThat(((Integer)stat.get("search_total")), greaterThan(0)); - assertThat(((Integer)stat.get("pages_processed")), greaterThan(0)); + Map stat = (Map) transformStats.get("stats"); + assertThat(((Integer) stat.get("documents_processed")), greaterThan(0)); + assertThat(((Integer) stat.get("search_total")), greaterThan(0)); + assertThat(((Integer) stat.get("pages_processed")), greaterThan(0)); } } @@ -202,13 +200,13 @@ public class TransformGetAndGetStatsIT extends TransformRestTestCase { Request getRequest = createRequestWithAuth("GET", getTransformEndpoint() + transformId + "/_stats", authHeader); Map stats = entityAsMap(client().performRequest(getRequest)); assertEquals(1, XContentMapValues.extractValue("count", stats)); - List> transformsStats = (List>)XContentMapValues.extractValue("transforms", stats); + List> transformsStats = (List>) XContentMapValues.extractValue("transforms", stats); // Verify that the transform has stats and the total docs process matches the expected for (Map transformStats : transformsStats) { - Map stat = (Map)transformStats.get("stats"); - assertThat("documents_processed is not > 0.", ((Integer)stat.get("documents_processed")), greaterThan(0)); - assertThat("search_total is not > 0.", ((Integer)stat.get("search_total")), greaterThan(0)); - assertThat("pages_processed is not > 0.", ((Integer)stat.get("pages_processed")), greaterThan(0)); + Map stat = (Map) transformStats.get("stats"); + assertThat("documents_processed is not > 0.", ((Integer) stat.get("documents_processed")), greaterThan(0)); + assertThat("search_total is not > 0.", ((Integer) stat.get("search_total")), greaterThan(0)); + assertThat("pages_processed is not > 0.", ((Integer) stat.get("pages_processed")), greaterThan(0)); /* TODO progress is now checkpoint progress and it may be that no checkpoint is in progress here Map progress = (Map)XContentMapValues.extractValue("checkpointing.next.checkpoint_progress", transformStats); @@ -226,8 +224,12 @@ public class TransformGetAndGetStatsIT extends TransformRestTestCase { String transformSrc = "reviews_cont_pivot_test"; createReviewsIndex(transformSrc); final Request createTransformRequest = createRequestWithAuth("PUT", getTransformEndpoint() + transformId, null); - String config = "{ \"dest\": {\"index\":\"" + transformDest + "\"}," - + " \"source\": {\"index\":\"" + transformSrc + "\"}," + String config = "{ \"dest\": {\"index\":\"" + + transformDest + + "\"}," + + " \"source\": {\"index\":\"" + + transformSrc + + "\"}," + " \"frequency\": \"1s\"," + " \"sync\": {\"time\":{\"field\": \"timestamp\", \"delay\": \"1s\"}}," + " \"pivot\": {" @@ -251,20 +253,28 @@ public class TransformGetAndGetStatsIT extends TransformRestTestCase { Request getRequest = createRequestWithAuth("GET", getTransformEndpoint() + transformId + "/_stats", null); Map stats = entityAsMap(client().performRequest(getRequest)); - List> transformsStats = (List>)XContentMapValues.extractValue("transforms", stats); + List> transformsStats = (List>) XContentMapValues.extractValue("transforms", stats); assertEquals(1, transformsStats.size()); - // No continuous checkpoints have been seen and thus all exponential averages should be 0.0 + // No continuous checkpoints have been seen and thus all exponential averages should be equal to the batch stats for (Map transformStats : transformsStats) { - transformStats = (Map)transformStats.get("stats"); - assertThat("exponential_avg_checkpoint_duration_ms is not 0.0", - transformStats.get("exponential_avg_checkpoint_duration_ms"), - equalTo(0.0)); - assertThat("exponential_avg_documents_indexed is not 0.0", - transformStats.get("exponential_avg_documents_indexed"), - equalTo(0.0)); - assertThat("exponential_avg_documents_processed is not 0.0", + transformStats = (Map) transformStats.get("stats"); + assertThat(transformStats.get("documents_processed"), equalTo(1000)); + assertThat(transformStats.get("documents_indexed"), equalTo(27)); + assertThat( + "exponential_avg_checkpoint_duration_ms is not 0.0", + (Double) transformStats.get("exponential_avg_checkpoint_duration_ms"), + greaterThan(0.0) + ); + assertThat( + "exponential_avg_documents_indexed does not match documents_indexed", + (Double) transformStats.get("exponential_avg_documents_indexed"), + equalTo(((Integer) transformStats.get("documents_indexed")).doubleValue()) + ); + assertThat( + "exponential_avg_documents_processed does not match documents_processed", transformStats.get("exponential_avg_documents_processed"), - equalTo(0.0)); + equalTo(((Integer) transformStats.get("documents_processed")).doubleValue()) + ); } int numDocs = 10; @@ -296,23 +306,27 @@ public class TransformGetAndGetStatsIT extends TransformRestTestCase { // We should now have exp avgs since we have processed a continuous checkpoint assertBusy(() -> { Map statsResponse = entityAsMap(client().performRequest(getRequest)); - List> contStats = (List>)XContentMapValues.extractValue("transforms", statsResponse); + List> contStats = (List>) XContentMapValues.extractValue("transforms", statsResponse); assertEquals(1, contStats.size()); for (Map transformStats : contStats) { - Map statsObj = (Map)transformStats.get("stats"); - assertThat("exponential_avg_checkpoint_duration_ms is 0", - (Double)statsObj.get("exponential_avg_checkpoint_duration_ms"), - greaterThan(0.0)); - assertThat("exponential_avg_documents_indexed is 0", - (Double)statsObj.get("exponential_avg_documents_indexed"), - greaterThan(0.0)); - assertThat("exponential_avg_documents_processed is 0", - (Double)statsObj.get("exponential_avg_documents_processed"), - greaterThan(0.0)); - Map checkpointing = (Map)transformStats.get("checkpointing"); - assertThat("changes_last_detected_at is null", - checkpointing.get("changes_last_detected_at"), - is(notNullValue())); + Map statsObj = (Map) transformStats.get("stats"); + assertThat( + "exponential_avg_checkpoint_duration_ms is 0", + (Double) statsObj.get("exponential_avg_checkpoint_duration_ms"), + greaterThan(0.0) + ); + assertThat( + "exponential_avg_documents_indexed is 0", + (Double) statsObj.get("exponential_avg_documents_indexed"), + greaterThan(0.0) + ); + assertThat( + "exponential_avg_documents_processed is 0", + (Double) statsObj.get("exponential_avg_documents_processed"), + greaterThan(0.0) + ); + Map checkpointing = (Map) transformStats.get("checkpointing"); + assertThat("changes_last_detected_at is null", checkpointing.get("changes_last_detected_at"), is(notNullValue())); } }, 120, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformUsageIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformUsageIT.java index 1bc51981448..ede378b3b3e 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformUsageIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformUsageIT.java @@ -70,13 +70,21 @@ public class TransformUsageIT extends TransformRestTestCase { Request getRequest = new Request("GET", getTransformEndpoint() + "test_usage/_stats"); Map stats = entityAsMap(client().performRequest(getRequest)); - Map expectedStats = new HashMap<>(); + Map expectedStats = new HashMap<>(); for (String statName : PROVIDED_STATS) { @SuppressWarnings("unchecked") - List specificStatistic = ((List) XContentMapValues.extractValue("transforms.stats." + statName, stats)); + List specificStatistic = (List) (XContentMapValues.extractValue("transforms.stats." + statName, stats)); assertNotNull(specificStatistic); - Integer statistic = (specificStatistic).get(0); - expectedStats.put(statName, statistic); + expectedStats.put(statName, extractStatsAsDouble(specificStatistic.get(0))); + } + + getRequest = new Request("GET", getTransformEndpoint() + "test_usage_continuous/_stats"); + stats = entityAsMap(client().performRequest(getRequest)); + for (String statName : PROVIDED_STATS) { + @SuppressWarnings("unchecked") + List specificStatistic = (List) (XContentMapValues.extractValue("transforms.stats." + statName, stats)); + assertNotNull(specificStatistic); + expectedStats.compute(statName, (key, value) -> value + extractStatsAsDouble(specificStatistic.get(0))); } // Simply because we wait for continuous to reach checkpoint 1, does not mean that the statistics are written yet. @@ -96,8 +104,9 @@ public class TransformUsageIT extends TransformRestTestCase { } assertEquals( "Incorrect stat " + statName, - expectedStats.get(statName) * 2, - XContentMapValues.extractValue("transform.stats." + statName, statsMap) + expectedStats.get(statName).doubleValue(), + extractStatsAsDouble(XContentMapValues.extractValue("transform.stats." + statName, statsMap)), + 0.0001 ); } // Refresh the index so that statistics are searchable @@ -112,4 +121,14 @@ public class TransformUsageIT extends TransformRestTestCase { assertEquals(3, XContentMapValues.extractValue("transform.transforms._all", usageAsMap)); assertEquals(3, XContentMapValues.extractValue("transform.transforms.stopped", usageAsMap)); } + + private double extractStatsAsDouble(Object statsObject) { + if (statsObject instanceof Integer) { + return ((Integer) statsObject).doubleValue(); + } else if (statsObject instanceof Double) { + return (Double) statsObject; + } + fail("unexpected value type for stats"); + return 0; + } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformFeatureSet.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformFeatureSet.java index 7900fffb41f..fba806a3970 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformFeatureSet.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformFeatureSet.java @@ -36,8 +36,8 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; -import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import java.util.ArrayList; import java.util.Arrays; @@ -67,7 +67,10 @@ public class TransformFeatureSet implements XPackFeatureSet { TransformIndexerStats.INDEX_TOTAL.getPreferredName(), TransformIndexerStats.SEARCH_TOTAL.getPreferredName(), TransformIndexerStats.INDEX_FAILURES.getPreferredName(), - TransformIndexerStats.SEARCH_FAILURES.getPreferredName(), }; + TransformIndexerStats.SEARCH_FAILURES.getPreferredName(), + TransformIndexerStats.EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS.getPreferredName(), + TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_INDEXED.getPreferredName(), + TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_PROCESSED.getPreferredName(), }; @Inject public TransformFeatureSet(Settings settings, ClusterService clusterService, Client client, @Nullable XPackLicenseState licenseState) { @@ -166,29 +169,32 @@ public class TransformFeatureSet implements XPackFeatureSet { } static TransformIndexerStats parseSearchAggs(SearchResponse searchResponse) { - List statisticsList = new ArrayList<>(PROVIDED_STATS.length); + List statisticsList = new ArrayList<>(PROVIDED_STATS.length); for (String statName : PROVIDED_STATS) { Aggregation agg = searchResponse.getAggregations().get(statName); if (agg instanceof NumericMetricsAggregation.SingleValue) { - statisticsList.add((long) ((NumericMetricsAggregation.SingleValue) agg).value()); + statisticsList.add(((NumericMetricsAggregation.SingleValue) agg).value()); } else { - statisticsList.add(0L); + statisticsList.add(0.0); } } return new TransformIndexerStats( - statisticsList.get(0), // numPages - statisticsList.get(1), // numInputDocuments - statisticsList.get(2), // numOutputDocuments - statisticsList.get(3), // numInvocations - statisticsList.get(4), // indexTime - statisticsList.get(5), // searchTime - statisticsList.get(6), // indexTotal - statisticsList.get(7), // searchTotal - statisticsList.get(8), // indexFailures - statisticsList.get(9) - ); // searchFailures + statisticsList.get(0).longValue(), // numPages + statisticsList.get(1).longValue(), // numInputDocuments + statisticsList.get(2).longValue(), // numOutputDocuments + statisticsList.get(3).longValue(), // numInvocations + statisticsList.get(4).longValue(), // indexTime + statisticsList.get(5).longValue(), // searchTime + statisticsList.get(6).longValue(), // indexTotal + statisticsList.get(7).longValue(), // searchTotal + statisticsList.get(8).longValue(), // indexFailures + statisticsList.get(9).longValue(), // searchFailures + statisticsList.get(10), // exponential_avg_checkpoint_duration_ms + statisticsList.get(11), // exponential_avg_documents_indexed + statisticsList.get(12) // exponential_avg_documents_processed + ); } static void getStatisticSummations(Client client, ActionListener statsListener) { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestDeleteTransformActionDeprecated.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestDeleteTransformActionDeprecated.java index 7340c658116..45700b5a203 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestDeleteTransformActionDeprecated.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestDeleteTransformActionDeprecated.java @@ -36,7 +36,7 @@ public class RestDeleteTransformActionDeprecated extends BaseRestHandler { @Override public List deprecatedRoutes() { return singletonList(new DeprecatedRoute(DELETE, TransformField.REST_BASE_PATH_TRANSFORMS_BY_ID_DEPRECATED, - TransformMessages.REST_DEPRECATED_ENDPOINT, deprecationLogger)); + TransformMessages.REST_DEPRECATED_ENDPOINT)); } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestGetTransformActionDeprecated.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestGetTransformActionDeprecated.java index 0f47320688d..a7ac8a4a0f0 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestGetTransformActionDeprecated.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestGetTransformActionDeprecated.java @@ -40,9 +40,9 @@ public class RestGetTransformActionDeprecated extends BaseRestHandler { public List deprecatedRoutes() { return unmodifiableList(asList( new DeprecatedRoute(GET, TransformField.REST_BASE_PATH_TRANSFORMS_DEPRECATED, - TransformMessages.REST_DEPRECATED_ENDPOINT, deprecationLogger), + TransformMessages.REST_DEPRECATED_ENDPOINT), new DeprecatedRoute(GET, TransformField.REST_BASE_PATH_TRANSFORMS_BY_ID_DEPRECATED, - TransformMessages.REST_DEPRECATED_ENDPOINT, deprecationLogger))); + TransformMessages.REST_DEPRECATED_ENDPOINT))); } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestGetTransformStatsActionDeprecated.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestGetTransformStatsActionDeprecated.java index 9b34769ca0e..c66a08467b9 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestGetTransformStatsActionDeprecated.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestGetTransformStatsActionDeprecated.java @@ -40,9 +40,9 @@ public class RestGetTransformStatsActionDeprecated extends BaseRestHandler { public List deprecatedRoutes() { return unmodifiableList(asList( new DeprecatedRoute(GET, TransformField.REST_BASE_PATH_TRANSFORMS_DEPRECATED + "_stats", - TransformMessages.REST_DEPRECATED_ENDPOINT, deprecationLogger), + TransformMessages.REST_DEPRECATED_ENDPOINT), new DeprecatedRoute(GET, TransformField.REST_BASE_PATH_TRANSFORMS_BY_ID_DEPRECATED + "_stats", - TransformMessages.REST_DEPRECATED_ENDPOINT, deprecationLogger))); + TransformMessages.REST_DEPRECATED_ENDPOINT))); } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestPreviewTransformActionDeprecated.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestPreviewTransformActionDeprecated.java index 97f66c29c27..5f61a7da460 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestPreviewTransformActionDeprecated.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestPreviewTransformActionDeprecated.java @@ -38,7 +38,7 @@ public class RestPreviewTransformActionDeprecated extends BaseRestHandler { @Override public List deprecatedRoutes() { return singletonList(new DeprecatedRoute(POST, TransformField.REST_BASE_PATH_TRANSFORMS_DEPRECATED + "_preview", - TransformMessages.REST_DEPRECATED_ENDPOINT, deprecationLogger)); + TransformMessages.REST_DEPRECATED_ENDPOINT)); } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestPutTransformActionDeprecated.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestPutTransformActionDeprecated.java index 9ceb7da78a7..05353636ba8 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestPutTransformActionDeprecated.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestPutTransformActionDeprecated.java @@ -38,7 +38,7 @@ public class RestPutTransformActionDeprecated extends BaseRestHandler { @Override public List deprecatedRoutes() { return singletonList(new DeprecatedRoute(PUT, TransformField.REST_BASE_PATH_TRANSFORMS_BY_ID_DEPRECATED, - TransformMessages.REST_DEPRECATED_ENDPOINT, deprecationLogger)); + TransformMessages.REST_DEPRECATED_ENDPOINT)); } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestStartTransformActionDeprecated.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestStartTransformActionDeprecated.java index 322b5f3eb45..f5436f0c1c0 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestStartTransformActionDeprecated.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestStartTransformActionDeprecated.java @@ -37,7 +37,7 @@ public class RestStartTransformActionDeprecated extends BaseRestHandler { @Override public List deprecatedRoutes() { return singletonList(new DeprecatedRoute(POST, TransformField.REST_BASE_PATH_TRANSFORMS_BY_ID_DEPRECATED + "_start", - TransformMessages.REST_DEPRECATED_ENDPOINT, deprecationLogger)); + TransformMessages.REST_DEPRECATED_ENDPOINT)); } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestStopTransformActionDeprecated.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestStopTransformActionDeprecated.java index b308fc98015..f15ffcad483 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestStopTransformActionDeprecated.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestStopTransformActionDeprecated.java @@ -36,7 +36,7 @@ public class RestStopTransformActionDeprecated extends BaseRestHandler { @Override public List deprecatedRoutes() { return singletonList(new DeprecatedRoute(POST, TransformField.REST_BASE_PATH_TRANSFORMS_BY_ID_DEPRECATED + "_stop", - TransformMessages.REST_DEPRECATED_ENDPOINT, deprecationLogger)); + TransformMessages.REST_DEPRECATED_ENDPOINT)); } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestUpdateTransformActionDeprecated.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestUpdateTransformActionDeprecated.java index ff9752f7767..7a8cc86fe93 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestUpdateTransformActionDeprecated.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/compat/RestUpdateTransformActionDeprecated.java @@ -38,7 +38,7 @@ public class RestUpdateTransformActionDeprecated extends BaseRestHandler { @Override public List deprecatedRoutes() { return singletonList(new DeprecatedRoute(POST, TransformField.REST_BASE_PATH_TRANSFORMS_BY_ID_DEPRECATED + "_update", - TransformMessages.REST_DEPRECATED_ENDPOINT, deprecationLogger)); + TransformMessages.REST_DEPRECATED_ENDPOINT)); } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java index 6b282131e9d..60afee087fb 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java @@ -19,7 +19,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.script.ScriptException; @@ -42,7 +41,6 @@ import org.elasticsearch.xpack.core.transform.utils.ExceptionsHelper; import org.elasticsearch.xpack.transform.checkpoint.CheckpointProvider; import org.elasticsearch.xpack.transform.notifications.TransformAuditor; import org.elasticsearch.xpack.transform.persistence.TransformConfigManager; -import org.elasticsearch.xpack.transform.transforms.pivot.AggregationResultUtils; import org.elasticsearch.xpack.transform.transforms.pivot.Pivot; import org.elasticsearch.xpack.transform.utils.ExceptionRootCauseFinder; @@ -287,7 +285,7 @@ public abstract class TransformIndexer extends AsyncTwoPhaseIndexer 1) { + + if (lastCheckpoint != null) { long docsIndexed = 0; long docsProcessed = 0; // This should not happen as we simply create a new one when we reach continuous checkpoints @@ -477,37 +474,54 @@ public abstract class TransformIndexer extends AsyncTwoPhaseIndexer context.getNumFailureRetries()) { - failIndexer( - "task encountered more than " - + context.getNumFailureRetries() - + " failures; latest failure: " - + ExceptionRootCauseFinder.getDetailedMessage(unwrappedException) - ); - } else { - // Since our schedule fires again very quickly after failures it is possible to run into the same failure numerous - // times in a row, very quickly. We do not want to spam the audit log with repeated failures, so only record the first one - if (e.getMessage().equals(lastAuditedExceptionMessage) == false) { - String message = ExceptionRootCauseFinder.getDetailedMessage(unwrappedException); + return; + } - auditor.warning( - getJobId(), - "Transform encountered an exception: " + message + " Will attempt again at next scheduled trigger." - ); - lastAuditedExceptionMessage = message; - } + if (unwrappedException instanceof ScriptException) { + handleScriptException((ScriptException) unwrappedException); + return; + } + + if (unwrappedException instanceof BulkIndexingException && ((BulkIndexingException) unwrappedException).isIrrecoverable()) { + handleIrrecoverableBulkIndexingException((BulkIndexingException) unwrappedException); + return; + } + + // irrecoverable error without special handling + if (unwrappedException instanceof ElasticsearchException) { + ElasticsearchException elasticsearchException = (ElasticsearchException) unwrappedException; + if (ExceptionRootCauseFinder.IRRECOVERABLE_REST_STATUSES.contains(elasticsearchException.status())) { + failIndexer("task encountered irrecoverable failure: " + elasticsearchException.getDetailedMessage()); + return; } + } + + if (unwrappedException instanceof IllegalArgumentException) { + failIndexer("task encountered irrecoverable failure: " + e.getMessage()); + return; + } + + if (context.getAndIncrementFailureCount() > context.getNumFailureRetries()) { + failIndexer( + "task encountered more than " + + context.getNumFailureRetries() + + " failures; latest failure: " + + ExceptionRootCauseFinder.getDetailedMessage(unwrappedException) + ); + return; + } + + // Since our schedule fires again very quickly after failures it is possible to run into the same failure numerous + // times in a row, very quickly. We do not want to spam the audit log with repeated failures, so only record the first one + if (e.getMessage().equals(lastAuditedExceptionMessage) == false) { + String message = ExceptionRootCauseFinder.getDetailedMessage(unwrappedException); + + auditor.warning( + getJobId(), + "Transform encountered an exception: " + message + " Will attempt again at next scheduled trigger." + ); + lastAuditedExceptionMessage = message; + } } /** @@ -901,8 +915,12 @@ public abstract class TransformIndexer extends AsyncTwoPhaseIndexer IRRECOVERABLE_REST_STATUSES = new HashSet<>( + Arrays.asList( + RestStatus.GONE, + RestStatus.NOT_IMPLEMENTED, + RestStatus.NOT_FOUND, + RestStatus.BAD_REQUEST, + RestStatus.UNAUTHORIZED, + RestStatus.FORBIDDEN, + RestStatus.METHOD_NOT_ALLOWED, + RestStatus.NOT_ACCEPTABLE + ) + ); + /** * Unwrap the exception stack and return the most likely cause. * @@ -61,17 +79,22 @@ public final class ExceptionRootCauseFinder { /** * Return the first irrecoverableException from a collection of bulk responses if there are any. * - * @param failures a collection of bulk item responses + * @param failures a collection of bulk item responses with failures * @return The first exception considered irrecoverable if there are any, null if no irrecoverable exception found */ public static Throwable getFirstIrrecoverableExceptionFromBulkResponses(Collection failures) { for (BulkItemResponse failure : failures) { Throwable unwrappedThrowable = org.elasticsearch.ExceptionsHelper.unwrapCause(failure.getFailure().getCause()); - if (unwrappedThrowable instanceof MapperParsingException - || unwrappedThrowable instanceof IllegalArgumentException - || unwrappedThrowable instanceof ResourceNotFoundException) { + if (unwrappedThrowable instanceof IllegalArgumentException) { return unwrappedThrowable; } + + if (unwrappedThrowable instanceof ElasticsearchException) { + ElasticsearchException elasticsearchException = (ElasticsearchException) unwrappedThrowable; + if (IRRECOVERABLE_REST_STATUSES.contains(elasticsearchException.status())) { + return elasticsearchException; + } + } } return null; diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformFeatureSetTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformFeatureSetTests.java index 68e8e43916e..b34b9d803a4 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformFeatureSetTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformFeatureSetTests.java @@ -135,8 +135,11 @@ public class TransformFeatureSetTests extends ESTestCase { 7, // indexTotal 8, // searchTotal 9, // indexFailures - 10 - ); // searchFailures + 10, // searchFailures + 11.0, // exponential_avg_checkpoint_duration_ms + 12.0, // exponential_avg_documents_indexed + 13.0 // exponential_avg_documents_processed + ); int currentStat = 1; List aggs = new ArrayList<>(PROVIDED_STATS.length); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java new file mode 100644 index 00000000000..7e5cf02ce62 --- /dev/null +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.transform.utils; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.DocWriteRequest.OpType; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.translog.TranslogException; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +public class ExceptionRootCauseFinderTests extends ESTestCase { + public void testFetFirstIrrecoverableExceptionFromBulkResponses() { + Map bulkItemResponses = new HashMap<>(); + + int id = 1; + // 1 + bulkItemResponses.put( + id, + new BulkItemResponse( + id++, + OpType.INDEX, + new BulkItemResponse.Failure("the_index", "type", "id", new MapperParsingException("mapper parsing error")) + ) + ); + // 2 + bulkItemResponses.put( + id, + new BulkItemResponse( + id++, + OpType.INDEX, + new BulkItemResponse.Failure("the_index", "type", "id", new ResourceNotFoundException("resource not found error")) + ) + ); + // 3 + bulkItemResponses.put( + id, + new BulkItemResponse( + id++, + OpType.INDEX, + new BulkItemResponse.Failure("the_index", "type", "id", new IllegalArgumentException("illegal argument error")) + ) + ); + // 4 not irrecoverable + bulkItemResponses.put( + id, + new BulkItemResponse( + id++, + OpType.INDEX, + new BulkItemResponse.Failure("the_index", "type", "id", new EsRejectedExecutionException("es rejected execution")) + ) + ); + // 5 not irrecoverable + bulkItemResponses.put( + id, + new BulkItemResponse( + id++, + OpType.INDEX, + new BulkItemResponse.Failure( + "the_index", + "type", + "id", + new TranslogException(new ShardId("the_index", "uid", 0), "translog error") + ) + ) + ); + // 6 + bulkItemResponses.put( + id, + new BulkItemResponse( + id++, + OpType.INDEX, + new BulkItemResponse.Failure( + "the_index", + "type", + "id", + new ElasticsearchSecurityException("Authentication required", RestStatus.UNAUTHORIZED) + ) + ) + ); + // 7 + bulkItemResponses.put( + id, + new BulkItemResponse( + id++, + OpType.INDEX, + new BulkItemResponse.Failure( + "the_index", + "type", + "id", + new ElasticsearchSecurityException("current license is non-compliant for [transform]", RestStatus.FORBIDDEN) + ) + ) + ); + // 8 not irrecoverable + bulkItemResponses.put( + id, + new BulkItemResponse( + id++, + OpType.INDEX, + new BulkItemResponse.Failure( + "the_index", + "type", + "id", + new ElasticsearchSecurityException("overloaded, to many requests", RestStatus.TOO_MANY_REQUESTS) + ) + ) + ); + // 9 not irrecoverable + bulkItemResponses.put( + id, + new BulkItemResponse( + id++, + OpType.INDEX, + new BulkItemResponse.Failure( + "the_index", + "type", + "id", + new ElasticsearchSecurityException("internal error", RestStatus.INTERNAL_SERVER_ERROR) + ) + ) + ); + + assertFirstException(bulkItemResponses.values(), MapperParsingException.class, "mapper parsing error"); + bulkItemResponses.remove(1); + assertFirstException(bulkItemResponses.values(), ResourceNotFoundException.class, "resource not found error"); + bulkItemResponses.remove(2); + assertFirstException(bulkItemResponses.values(), IllegalArgumentException.class, "illegal argument error"); + bulkItemResponses.remove(3); + assertFirstException(bulkItemResponses.values(), ElasticsearchSecurityException.class, "Authentication required"); + bulkItemResponses.remove(6); + assertFirstException( + bulkItemResponses.values(), + ElasticsearchSecurityException.class, + "current license is non-compliant for [transform]" + ); + bulkItemResponses.remove(7); + + assertNull(ExceptionRootCauseFinder.getFirstIrrecoverableExceptionFromBulkResponses(bulkItemResponses.values())); + } + + private static void assertFirstException(Collection bulkItemResponses, Class expectedClass, String message) { + Throwable t = ExceptionRootCauseFinder.getFirstIrrecoverableExceptionFromBulkResponses(bulkItemResponses); + assertNotNull(t); + assertEquals(t.getClass(), expectedClass); + assertEquals(t.getMessage(), message); + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVIndexFieldData.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVIndexFieldData.java index 04922c41314..eebdc0b8b48 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVIndexFieldData.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVIndexFieldData.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.vectors.query; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.SortField; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -19,7 +20,10 @@ import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; public class VectorDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData { @@ -35,6 +39,12 @@ public class VectorDVIndexFieldData extends DocValuesIndexFieldData implements I throw new IllegalArgumentException("can't sort on the vector field"); } + @Override + public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested, + SortOrder sortOrder, DocValueFormat format) { + throw new IllegalArgumentException("only supported on numeric fields"); + } + @Override public VectorDVAtomicFieldData load(LeafReaderContext context) { return new VectorDVAtomicFieldData(context.reader(), fieldName, isDense); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java index 1582fd01962..8e9195fca18 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java @@ -6,8 +6,6 @@ package org.elasticsearch.xpack.watcher.rest.action; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestRequest; @@ -34,8 +32,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; */ public class RestAckWatchAction extends WatcherRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestAckWatchAction.class)); - @Override public List routes() { return emptyList(); @@ -44,14 +40,14 @@ public class RestAckWatchAction extends WatcherRestHandler { @Override public List replacedRoutes() { return unmodifiableList(asList( - new ReplacedRoute(POST, "/_watcher/watch/{id}/_ack", POST, URI_BASE + "/watcher/watch/{id}/_ack", deprecationLogger), - new ReplacedRoute(PUT, "/_watcher/watch/{id}/_ack", PUT, URI_BASE + "/watcher/watch/{id}/_ack", deprecationLogger), + new ReplacedRoute(POST, "/_watcher/watch/{id}/_ack", POST, URI_BASE + "/watcher/watch/{id}/_ack"), + new ReplacedRoute(PUT, "/_watcher/watch/{id}/_ack", PUT, URI_BASE + "/watcher/watch/{id}/_ack"), new ReplacedRoute( POST, "/_watcher/watch/{id}/_ack/{actions}", - POST, URI_BASE + "/watcher/watch/{id}/_ack/{actions}", deprecationLogger), + POST, URI_BASE + "/watcher/watch/{id}/_ack/{actions}"), new ReplacedRoute( PUT, "/_watcher/watch/{id}/_ack/{actions}", - PUT, URI_BASE + "/watcher/watch/{id}/_ack/{actions}", deprecationLogger))); + PUT, URI_BASE + "/watcher/watch/{id}/_ack/{actions}"))); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java index ba9ac0b64a5..d39949cf055 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java @@ -6,8 +6,6 @@ package org.elasticsearch.xpack.watcher.rest.action; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestRequest; @@ -34,8 +32,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; */ public class RestActivateWatchAction extends WatcherRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestActivateWatchAction.class)); - @Override public List routes() { return emptyList(); @@ -44,8 +40,8 @@ public class RestActivateWatchAction extends WatcherRestHandler { @Override public List replacedRoutes() { return unmodifiableList(asList( - new ReplacedRoute(POST, "/_watcher/watch/{id}/_activate", POST, URI_BASE + "/watcher/watch/{id}/_activate", deprecationLogger), - new ReplacedRoute(PUT, "/_watcher/watch/{id}/_activate", PUT, URI_BASE + "/watcher/watch/{id}/_activate", deprecationLogger))); + new ReplacedRoute(POST, "/_watcher/watch/{id}/_activate", POST, URI_BASE + "/watcher/watch/{id}/_activate"), + new ReplacedRoute(PUT, "/_watcher/watch/{id}/_activate", PUT, URI_BASE + "/watcher/watch/{id}/_activate"))); } @Override @@ -79,10 +75,10 @@ public class RestActivateWatchAction extends WatcherRestHandler { return unmodifiableList(asList( new ReplacedRoute( POST, "/_watcher/watch/{id}/_deactivate", - POST, URI_BASE + "/watcher/watch/{id}/_deactivate", deprecationLogger), + POST, URI_BASE + "/watcher/watch/{id}/_deactivate"), new ReplacedRoute( PUT, "/_watcher/watch/{id}/_deactivate", - PUT, URI_BASE + "/watcher/watch/{id}/_deactivate", deprecationLogger))); + PUT, URI_BASE + "/watcher/watch/{id}/_deactivate"))); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java index 18dac6ac8b9..99ce8523c94 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java @@ -6,8 +6,6 @@ package org.elasticsearch.xpack.watcher.rest.action; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestRequest; @@ -29,8 +27,6 @@ import static org.elasticsearch.rest.RestStatus.OK; public class RestDeleteWatchAction extends WatcherRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteWatchAction.class)); - @Override public List routes() { return emptyList(); @@ -39,7 +35,7 @@ public class RestDeleteWatchAction extends WatcherRestHandler { @Override public List replacedRoutes() { return singletonList( - new ReplacedRoute(DELETE, "/_watcher/watch/{id}", DELETE, URI_BASE + "/watcher/watch/{id}", deprecationLogger)); + new ReplacedRoute(DELETE, "/_watcher/watch/{id}", DELETE, URI_BASE + "/watcher/watch/{id}")); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchAction.java index d33c7c6cb0c..e2e102cf559 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchAction.java @@ -6,11 +6,9 @@ package org.elasticsearch.xpack.watcher.rest.action; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -44,8 +42,6 @@ import static org.elasticsearch.xpack.watcher.rest.action.RestExecuteWatchAction public class RestExecuteWatchAction extends WatcherRestHandler implements RestRequestFilter { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestExecuteWatchAction.class)); - private static final List RESERVED_FIELD_NAMES = asList(WatchField.TRIGGER.getPreferredName(), WatchField.INPUT.getPreferredName(), WatchField.CONDITION.getPreferredName(), WatchField.ACTIONS.getPreferredName(), WatchField.TRANSFORM.getPreferredName(), @@ -60,10 +56,10 @@ public class RestExecuteWatchAction extends WatcherRestHandler implements RestRe @Override public List replacedRoutes() { return unmodifiableList(asList( - new ReplacedRoute(POST, "/_watcher/watch/{id}/_execute", POST, URI_BASE + "/watcher/watch/{id}/_execute", deprecationLogger), - new ReplacedRoute(PUT, "/_watcher/watch/{id}/_execute", PUT, URI_BASE + "/watcher/watch/{id}/_execute", deprecationLogger), - new ReplacedRoute(POST, "/_watcher/watch/_execute", POST, URI_BASE + "/watcher/watch/_execute", deprecationLogger), - new ReplacedRoute(PUT, "/_watcher/watch/_execute", PUT, URI_BASE + "/watcher/watch/_execute", deprecationLogger))); + new ReplacedRoute(POST, "/_watcher/watch/{id}/_execute", POST, URI_BASE + "/watcher/watch/{id}/_execute"), + new ReplacedRoute(PUT, "/_watcher/watch/{id}/_execute", PUT, URI_BASE + "/watcher/watch/{id}/_execute"), + new ReplacedRoute(POST, "/_watcher/watch/_execute", POST, URI_BASE + "/watcher/watch/_execute"), + new ReplacedRoute(PUT, "/_watcher/watch/_execute", PUT, URI_BASE + "/watcher/watch/_execute"))); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatchAction.java index 56abbbe2d59..eba76a98034 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatchAction.java @@ -6,8 +6,6 @@ package org.elasticsearch.xpack.watcher.rest.action; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestRequest; @@ -29,8 +27,6 @@ import static org.elasticsearch.rest.RestStatus.OK; public class RestGetWatchAction extends WatcherRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetWatchAction.class)); - @Override public List routes() { return emptyList(); @@ -38,7 +34,7 @@ public class RestGetWatchAction extends WatcherRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(GET, "/_watcher/watch/{id}", GET, URI_BASE + "/watcher/watch/{id}", deprecationLogger)); + return singletonList(new ReplacedRoute(GET, "/_watcher/watch/{id}", GET, URI_BASE + "/watcher/watch/{id}")); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java index 1214b43f0f3..7b0813c87ef 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java @@ -6,8 +6,6 @@ package org.elasticsearch.xpack.watcher.rest.action; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -36,8 +34,6 @@ import static org.elasticsearch.rest.RestStatus.OK; public class RestPutWatchAction extends WatcherRestHandler implements RestRequestFilter { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutWatchAction.class)); - @Override public List routes() { return emptyList(); @@ -46,8 +42,8 @@ public class RestPutWatchAction extends WatcherRestHandler implements RestReques @Override public List replacedRoutes() { return unmodifiableList(asList( - new ReplacedRoute(POST, "/_watcher/watch/{id}", POST, URI_BASE + "/watcher/watch/{id}", deprecationLogger), - new ReplacedRoute(PUT, "/_watcher/watch/{id}", PUT, URI_BASE + "/watcher/watch/{id}", deprecationLogger))); + new ReplacedRoute(POST, "/_watcher/watch/{id}", POST, URI_BASE + "/watcher/watch/{id}"), + new ReplacedRoute(PUT, "/_watcher/watch/{id}", PUT, URI_BASE + "/watcher/watch/{id}"))); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatchServiceAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatchServiceAction.java index 0ef08c545a6..57409420c53 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatchServiceAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatchServiceAction.java @@ -6,8 +6,6 @@ package org.elasticsearch.xpack.watcher.rest.action; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; @@ -22,8 +20,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestWatchServiceAction extends WatcherRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestWatchServiceAction.class)); - @Override public List routes() { return emptyList(); @@ -31,7 +27,7 @@ public class RestWatchServiceAction extends WatcherRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(POST, "/_watcher/_start", POST, URI_BASE + "/watcher/_start", deprecationLogger)); + return singletonList(new ReplacedRoute(POST, "/_watcher/_start", POST, URI_BASE + "/watcher/_start")); } @Override @@ -53,7 +49,7 @@ public class RestWatchServiceAction extends WatcherRestHandler { @Override public List replacedRoutes() { - return singletonList(new ReplacedRoute(POST, "/_watcher/_stop", POST, URI_BASE + "/watcher/_stop", deprecationLogger)); + return singletonList(new ReplacedRoute(POST, "/_watcher/_stop", POST, URI_BASE + "/watcher/_stop")); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java index e712e1cf545..a5574a5dba3 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java @@ -37,8 +37,8 @@ public class RestWatcherStatsAction extends WatcherRestHandler { @Override public List replacedRoutes() { return unmodifiableList(asList( - new ReplacedRoute(GET, "/_watcher/stats", GET, URI_BASE + "/watcher/stats", deprecationLogger), - new ReplacedRoute(GET, "/_watcher/stats/{metric}", GET, URI_BASE + "/watcher/stats/{metric}", deprecationLogger))); + new ReplacedRoute(GET, "/_watcher/stats", GET, URI_BASE + "/watcher/stats"), + new ReplacedRoute(GET, "/_watcher/stats/{metric}", GET, URI_BASE + "/watcher/stats/{metric}"))); } @Override diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java index 49dc0a8b82e..5141854f915 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java @@ -75,7 +75,8 @@ public class WatcherPluginTests extends ESTestCase { IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(Watch.INDEX, settings); AnalysisRegistry registry = new AnalysisRegistry(TestEnvironment.newEnvironment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap()); - IndexModule indexModule = new IndexModule(indexSettings, registry, new InternalEngineFactory(), Collections.emptyMap()); + IndexModule indexModule = new IndexModule(indexSettings, registry, new InternalEngineFactory(), Collections.emptyMap(), + () -> true); // this will trip an assertion if the watcher indexing operation listener is null (which it is) but we try to add it watcher.onIndexModule(indexModule); diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java index 452f8d48fc2..461378e27da 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java @@ -11,22 +11,21 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.WarningFailureException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.upgrades.AbstractFullClusterRestartTestCase; -import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; +import org.elasticsearch.xpack.core.ml.MlConfigIndex; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.test.rest.XPackRestTestConstants; import org.elasticsearch.xpack.test.rest.XPackRestTestHelper; import org.junit.Before; @@ -39,8 +38,8 @@ import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class MlConfigIndexMappingsFullClusterRestartIT extends AbstractFullClusterRestartTestCase { @@ -48,24 +47,6 @@ public class MlConfigIndexMappingsFullClusterRestartIT extends AbstractFullClust private static final String OLD_CLUSTER_JOB_ID = "ml-config-mappings-old-cluster-job"; private static final String NEW_CLUSTER_JOB_ID = "ml-config-mappings-new-cluster-job"; - private static final Map EXPECTED_DATA_FRAME_ANALYSIS_MAPPINGS = getDataFrameAnalysisMappings(); - - @SuppressWarnings("unchecked") - private static Map getDataFrameAnalysisMappings() { - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - builder.startObject(); - ElasticsearchMappings.addDataFrameAnalyticsFields(builder); - builder.endObject(); - - Map asMap = builder.generator().contentType().xContent().createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput()).map(); - return (Map) asMap.get(DataFrameAnalyticsConfig.ANALYSIS.getPreferredName()); - } catch (IOException e) { - fail("Failed to initialize expected data frame analysis mappings"); - } - return null; - } - @Override protected Settings restClientSettings() { String token = "Basic " + Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8)); @@ -90,16 +71,16 @@ public class MlConfigIndexMappingsFullClusterRestartIT extends AbstractFullClust createAnomalyDetectorJob(OLD_CLUSTER_JOB_ID); if (getOldClusterVersion().onOrAfter(Version.V_7_3_0)) { // .ml-config has mappings for analytics as the feature was introduced in 7.3.0 - assertThat(mappingsForDataFrameAnalysis(), is(notNullValue())); + assertThat(getDataFrameAnalysisMappings().keySet(), hasItem("outlier_detection")); } else { // .ml-config does not yet have correct mappings, it will need an update after cluster is upgraded - assertThat(mappingsForDataFrameAnalysis(), is(nullValue())); + assertThat(getDataFrameAnalysisMappings(), is(nullValue())); } } else { // trigger .ml-config index mappings update createAnomalyDetectorJob(NEW_CLUSTER_JOB_ID); // assert that the mappings are updated - assertThat(mappingsForDataFrameAnalysis(), is(equalTo(EXPECTED_DATA_FRAME_ANALYSIS_MAPPINGS))); + assertThat(getDataFrameAnalysisMappings(), equalTo(loadDataFrameAnalysisMappings())); } } @@ -110,8 +91,7 @@ public class MlConfigIndexMappingsFullClusterRestartIT extends AbstractFullClust } private void createAnomalyDetectorJob(String jobId) throws IOException { - Detector.Builder detector = new Detector.Builder("metric", "responsetime") - .setByFieldName("airline"); + Detector.Builder detector = new Detector.Builder("metric", "responsetime"); AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build())) .setBucketSpan(TimeValue.timeValueMinutes(10)); Job.Builder job = new Job.Builder(jobId) @@ -125,7 +105,7 @@ public class MlConfigIndexMappingsFullClusterRestartIT extends AbstractFullClust } @SuppressWarnings("unchecked") - private Map mappingsForDataFrameAnalysis() throws Exception { + private Map getConfigIndexMappings() throws Exception { Request getIndexMappingsRequest = new Request("GET", ".ml-config/_mappings"); Response getIndexMappingsResponse; try { @@ -140,7 +120,25 @@ public class MlConfigIndexMappingsFullClusterRestartIT extends AbstractFullClust if (mappings.containsKey("doc")) { mappings = (Map) XContentMapValues.extractValue(mappings, "doc"); } - mappings = (Map) XContentMapValues.extractValue(mappings, "properties", "analysis"); + mappings = (Map) XContentMapValues.extractValue(mappings, "properties"); return mappings; } + + @SuppressWarnings("unchecked") + private Map getDataFrameAnalysisMappings() throws Exception { + Map mappings = getConfigIndexMappings(); + mappings = (Map) XContentMapValues.extractValue(mappings, "analysis", "properties"); + return mappings; + } + + @SuppressWarnings("unchecked") + private Map loadDataFrameAnalysisMappings() throws IOException { + String mapping = MlConfigIndex.mapping(); + try (XContentParser parser = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, new BytesArray(mapping).streamInput())) { + Map mappings = parser.map(); + mappings = (Map) XContentMapValues.extractValue(mappings, "_doc", "properties", "analysis", "properties"); + return mappings; + } + } } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java index 13ed2dafc5f..6d984317288 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java @@ -99,6 +99,7 @@ public class MlMappingsUpgradeIT extends AbstractUpgradeTestCase { assertNotNull(indexLevel); Map mappingsLevel = (Map) indexLevel.get("mappings"); assertNotNull(mappingsLevel); + Map metaLevel = (Map) mappingsLevel.get("_meta"); assertEquals(Collections.singletonMap("version", Version.CURRENT.toString()), metaLevel); Map propertiesLevel = (Map) mappingsLevel.get("properties"); diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml index b084cd8b0fc..c24318e0b19 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml @@ -123,8 +123,8 @@ setup: --- "Put job and datafeed with aggs in old cluster - deprecated interval with warning": - skip: - version: " - 7.99.99" #TODO change this after backport - reason: calendar_interval introduced in 7.1.0 + version: " - 7.1.99" + reason: calendar_interval introduced in 7.2.0 features: warnings - do: diff --git a/x-pack/qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/PreventFailingBuildIT.java b/x-pack/qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/PreventFailingBuildIT.java deleted file mode 100644 index 2c2cdd044aa..00000000000 --- a/x-pack/qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/PreventFailingBuildIT.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.smoketest; - -import org.elasticsearch.test.ESIntegTestCase; - -public class PreventFailingBuildIT extends ESIntegTestCase { - - public void testSoThatTestsDoNotFail() { - // Noop - - // This is required because if tests are not enable no - // tests will be run in the entire project and all tests will fail. - } -}