From 4da04616c9db51469625469ef7f9b51d4545a6e1 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Sun, 10 Mar 2019 14:54:02 -0500 Subject: [PATCH] [ML] refactoring lazy query and agg parsing (#39776) (#39881) * [ML] refactoring lazy query and agg parsing * Clean up and addressing PR comments * removing unnecessary try/catch block * removing bad call to logger * removing unused import * fixing bwc test failure due to serialization and config migrator test * fixing style issues * Adjusting DafafeedUpdate class serialization * Adding todo for refactor in v8 * Making query non-optional so it does not write a boolean byte --- .../elasticsearch/search/SearchModule.java | 10 + .../deprecation/DeprecationInfoAction.java | 8 +- .../xpack/core/ml/MlMetadata.java | 5 +- .../xpack/core/ml/datafeed/AggProvider.java | 157 +++++++++ .../core/ml/datafeed/DatafeedConfig.java | 297 ++++++++---------- .../ml/datafeed/DatafeedJobValidator.java | 17 +- .../core/ml/datafeed/DatafeedUpdate.java | 146 ++++----- .../xpack/core/ml/datafeed/QueryProvider.java | 162 ++++++++++ .../xpack/core/ml/job/messages/Messages.java | 4 +- .../ml/utils/XContentObjectTransformer.java | 18 +- .../DeprecationInfoActionResponseTests.java | 13 +- .../core/ml/datafeed/AggProviderTests.java | 179 +++++++++++ .../core/ml/datafeed/DatafeedConfigTests.java | 253 ++++++--------- .../core/ml/datafeed/DatafeedUpdateTests.java | 145 +++++---- .../core/ml/datafeed/QueryProviderTests.java | 185 +++++++++++ .../utils/XContentObjectTransformerTests.java | 26 +- .../xpack/deprecation/DeprecationChecks.java | 3 +- .../deprecation/MlDeprecationChecks.java | 9 +- .../TransportDeprecationInfoAction.java | 7 +- .../deprecation/MlDeprecationChecksTests.java | 15 +- .../ml/integration/DelayedDataDetectorIT.java | 8 +- .../ml/integration/MlNativeIntegTestCase.java | 7 + .../xpack/ml/MachineLearning.java | 12 +- .../TransportPreviewDatafeedAction.java | 7 +- .../ml/action/TransportPutDatafeedAction.java | 6 +- .../action/TransportStartDatafeedAction.java | 26 +- .../action/TransportUpdateDatafeedAction.java | 2 +- .../xpack/ml/datafeed/DatafeedJobBuilder.java | 14 +- .../DelayedDataDetectorFactory.java | 9 +- .../extractor/DataExtractorFactory.java | 15 +- .../AggregationDataExtractorFactory.java | 11 +- .../RollupDataExtractorFactory.java | 19 +- .../chunked/ChunkedDataExtractorFactory.java | 15 +- .../scroll/ScrollDataExtractorFactory.java | 16 +- .../xpack/ml/job/JobManager.java | 5 +- .../ml/job/persistence/JobConfigProvider.java | 6 +- ...lConfigMigrationEligibilityCheckTests.java | 16 +- .../xpack/ml/MlConfigMigratorTests.java | 23 +- .../xpack/ml/MlMetadataTests.java | 6 +- .../xpack/ml/MlSingleNodeTestCase.java | 9 + .../TransportStartDatafeedActionTests.java | 27 +- .../datafeed/DatafeedJobValidatorTests.java | 47 +-- .../DelayedDataDetectorFactoryTests.java | 22 +- .../extractor/DataExtractorFactoryTests.java | 32 +- .../AggregationDataExtractorFactoryTests.java | 12 +- .../ChunkedDataExtractorFactoryTests.java | 13 +- .../ml/integration/JobConfigProviderIT.java | 2 +- .../ml/integration/MlConfigMigratorIT.java | 19 +- .../xpack/ml/job/JobManagerTests.java | 10 +- 49 files changed, 1441 insertions(+), 634 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProvider.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/AggProviderTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProviderTests.java diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 81c6273ec1a..8db06fd16c5 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -299,6 +299,16 @@ public class SearchModule { private final List namedWriteables = new ArrayList<>(); private final List namedXContents = new ArrayList<>(); + /** + * Constructs a new SearchModule object + * + * NOTE: This constructor should not be called in production unless an accurate {@link Settings} object is provided. + * When constructed, a static flag is set in Lucene {@link BooleanQuery#setMaxClauseCount} according to the settings. + * + * @param settings Current settings + * @param transportClient Is this being constructed in the TransportClient or not + * @param plugins List of included {@link SearchPlugin} objects. + */ public SearchModule(Settings settings, boolean transportClient, List plugins) { this.settings = settings; this.transportClient = transportClient; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java index b917dbf260c..28aa09f6c1e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -34,6 +35,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; @@ -187,19 +189,21 @@ public class DeprecationInfoAction extends Action datafeeds, NodesDeprecationCheckResponse nodeDeprecationResponse, List> indexSettingsChecks, List> clusterSettingsChecks, - List> mlSettingsCheck) { + List> + mlSettingsCheck) { List clusterSettingsIssues = filterChecks(clusterSettingsChecks, (c) -> c.apply(state)); List nodeSettingsIssues = mergeNodeIssues(nodeDeprecationResponse); List mlSettingsIssues = new ArrayList<>(); for (DatafeedConfig config : datafeeds) { - mlSettingsIssues.addAll(filterChecks(mlSettingsCheck, (c) -> c.apply(config))); + mlSettingsIssues.addAll(filterChecks(mlSettingsCheck, (c) -> c.apply(config, xContentRegistry))); } String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(state, indicesOptions, indices); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java index 54c83e9a88a..2ad999d82ad 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -302,7 +303,7 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom { return this; } - public Builder putDatafeed(DatafeedConfig datafeedConfig, Map headers) { + public Builder putDatafeed(DatafeedConfig datafeedConfig, Map headers, NamedXContentRegistry xContentRegistry) { if (datafeeds.containsKey(datafeedConfig.getId())) { throw ExceptionsHelper.datafeedAlreadyExists(datafeedConfig.getId()); } @@ -310,7 +311,7 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom { String jobId = datafeedConfig.getJobId(); checkJobIsAvailableForDatafeed(jobId); Job job = jobs.get(jobId); - DatafeedJobValidator.validate(datafeedConfig, job); + DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry); if (headers.isEmpty() == false) { // Adjust the request, adding security headers from the current thread context diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java new file mode 100644 index 00000000000..7982cffb01d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.datafeed; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer; + +import java.io.IOException; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Objects; + +class AggProvider implements Writeable, ToXContentObject { + + private static final Logger logger = LogManager.getLogger(AggProvider.class); + + private Exception parsingException; + private AggregatorFactories.Builder parsedAggs; + private Map aggs; + + static AggProvider fromXContent(XContentParser parser, boolean lenient) throws IOException { + Map aggs = parser.mapOrdered(); + AggregatorFactories.Builder parsedAggs = null; + Exception exception = null; + try { + if (aggs.isEmpty()) { + throw new Exception("aggs cannot be empty"); + } + parsedAggs = XContentObjectTransformer.aggregatorTransformer(parser.getXContentRegistry()).fromMap(aggs); + } catch(Exception ex) { + if (ex.getCause() instanceof IllegalArgumentException) { + ex = (Exception)ex.getCause(); + } + exception = ex; + if (lenient) { + logger.warn(Messages.DATAFEED_CONFIG_AGG_BAD_FORMAT, ex); + } else { + throw ExceptionsHelper.badRequestException(Messages.DATAFEED_CONFIG_AGG_BAD_FORMAT, ex); + } + } + return new AggProvider(aggs, parsedAggs, exception); + } + + static AggProvider fromParsedAggs(AggregatorFactories.Builder parsedAggs) throws IOException { + return parsedAggs == null ? + null : + new AggProvider( + XContentObjectTransformer.aggregatorTransformer(NamedXContentRegistry.EMPTY).toMap(parsedAggs), + parsedAggs, + null); + } + + static AggProvider fromStream(StreamInput in) throws IOException { + if (in.getVersion().onOrAfter(Version.V_6_7_0)) { // Has our bug fix for query/agg providers + return new AggProvider(in.readMap(), in.readOptionalWriteable(AggregatorFactories.Builder::new), in.readException()); + } else if (in.getVersion().onOrAfter(Version.V_6_6_0)) { // Has the bug, but supports lazy objects + return new AggProvider(in.readMap(), null, null); + } else { // only supports eagerly parsed objects + return AggProvider.fromParsedAggs(in.readOptionalWriteable(AggregatorFactories.Builder::new)); + } + } + + AggProvider(Map aggs, AggregatorFactories.Builder parsedAggs, Exception parsingException) { + this.aggs = Collections.unmodifiableMap(new LinkedHashMap<>(Objects.requireNonNull(aggs, "[aggs] must not be null"))); + this.parsedAggs = parsedAggs; + this.parsingException = parsingException; + } + + AggProvider(AggProvider other) { + this.aggs = new LinkedHashMap<>(other.aggs); + this.parsedAggs = other.parsedAggs; + this.parsingException = other.parsingException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + if (out.getVersion().onOrAfter(Version.V_6_7_0)) { // Has our bug fix for query/agg providers + out.writeMap(aggs); + out.writeOptionalWriteable(parsedAggs); + out.writeException(parsingException); + } else if (out.getVersion().onOrAfter(Version.V_6_6_0)) { // Has the bug, but supports lazy objects + // We allow the lazy parsing nodes that have the bug throw any parsing errors themselves as + // they already have the ability to fully parse the passed Maps + out.writeMap(aggs); + } else { // only supports eagerly parsed objects + if (parsingException != null) { + if (parsingException instanceof IOException) { + throw (IOException) parsingException; + } else { + throw new ElasticsearchException(parsingException); + } + } else if (parsedAggs == null) { + // This is an admittedly rare case but we should fail early instead of writing null when there + // actually are aggregations defined + throw new ElasticsearchException("Unsupported operation: parsed aggregations are null"); + } + out.writeOptionalWriteable(parsedAggs); + } + } + + public Exception getParsingException() { + return parsingException; + } + + AggregatorFactories.Builder getParsedAggs() { + return parsedAggs; + } + + public Map getAggs() { + return aggs; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + AggProvider that = (AggProvider) other; + + return Objects.equals(this.aggs, that.aggs) + && Objects.equals(this.parsedAggs, that.parsedAggs) + && Objects.equals(this.parsingException, that.parsingException); + } + + @Override + public int hashCode() { + return Objects.hash(aggs, parsedAggs, parsingException); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.map(aggs); + return builder; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java index 597edd36752..3cd071f61aa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java @@ -12,16 +12,14 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.CachedSupplier; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -43,7 +41,6 @@ import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -66,33 +63,6 @@ public class DatafeedConfig extends AbstractDiffable implements private static final int TWO_MINS_SECONDS = 2 * SECONDS_IN_MINUTE; private static final int TWENTY_MINS_SECONDS = 20 * SECONDS_IN_MINUTE; private static final int HALF_DAY_SECONDS = 12 * 60 * SECONDS_IN_MINUTE; - static final XContentObjectTransformer QUERY_TRANSFORMER = XContentObjectTransformer.queryBuilderTransformer(); - static final TriFunction, String, List, QueryBuilder> lazyQueryParser = - (objectMap, id, warnings) -> { - try { - return QUERY_TRANSFORMER.fromMap(objectMap, warnings); - } catch (Exception exception) { - // Certain thrown exceptions wrap up the real Illegal argument making it hard to determine cause for the user - if (exception.getCause() instanceof IllegalArgumentException) { - exception = (Exception)exception.getCause(); - } - throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.DATAFEED_CONFIG_QUERY_BAD_FORMAT, id), exception); - } - }; - - static final XContentObjectTransformer AGG_TRANSFORMER = XContentObjectTransformer.aggregatorTransformer(); - static final TriFunction, String, List, AggregatorFactories.Builder> lazyAggParser = - (objectMap, id, warnings) -> { - try { - return AGG_TRANSFORMER.fromMap(objectMap, warnings); - } catch (Exception exception) { - // Certain thrown exceptions wrap up the real Illegal argument making it hard to determine cause for the user - if (exception.getCause() instanceof IllegalArgumentException) { - exception = (Exception)exception.getCause(); - } - throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.DATAFEED_CONFIG_AGG_BAD_FORMAT, id), exception); - } - }; private static final Logger logger = LogManager.getLogger(DatafeedConfig.class); @@ -152,10 +122,14 @@ public class DatafeedConfig extends AbstractDiffable implements builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY); parser.declareString((builder, val) -> builder.setFrequency(TimeValue.parseTimeValue(val, FREQUENCY.getPreferredName())), FREQUENCY); - parser.declareObject((builder, val) -> builder.setQuery(val, ignoreUnknownFields), (p, c) -> p.mapOrdered(), QUERY); - parser.declareObject((builder, val) -> builder.setAggregationsSafe(val, ignoreUnknownFields), (p, c) -> p.mapOrdered(), + parser.declareObject(Builder::setQueryProvider, + (p, c) -> QueryProvider.fromXContent(p, ignoreUnknownFields), + QUERY); + parser.declareObject(Builder::setAggregationsSafe, + (p, c) -> AggProvider.fromXContent(p, ignoreUnknownFields), AGGREGATIONS); - parser.declareObject((builder, val) -> builder.setAggregationsSafe(val, ignoreUnknownFields), (p, c) -> p.mapOrdered(), + parser.declareObject(Builder::setAggregationsSafe, + (p, c) -> AggProvider.fromXContent(p, ignoreUnknownFields), AGGS); parser.declareObject(Builder::setScriptFields, (p, c) -> { List parsedScriptFields = new ArrayList<>(); @@ -194,18 +168,16 @@ public class DatafeedConfig extends AbstractDiffable implements private final TimeValue frequency; private final List indices; - private final Map query; - private final Map aggregations; + private final QueryProvider queryProvider; + private final AggProvider aggProvider; private final List scriptFields; private final Integer scrollSize; private final ChunkingConfig chunkingConfig; private final Map headers; private final DelayedDataCheckConfig delayedDataCheckConfig; - private final CachedSupplier querySupplier; - private final CachedSupplier aggSupplier; private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List indices, - Map query, Map aggregations, List scriptFields, + QueryProvider queryProvider, AggProvider aggProvider, List scriptFields, Integer scrollSize, ChunkingConfig chunkingConfig, Map headers, DelayedDataCheckConfig delayedDataCheckConfig) { this.id = id; @@ -213,15 +185,13 @@ public class DatafeedConfig extends AbstractDiffable implements this.queryDelay = queryDelay; this.frequency = frequency; this.indices = indices == null ? null : Collections.unmodifiableList(indices); - this.query = query == null ? null : Collections.unmodifiableMap(query); - this.aggregations = aggregations == null ? null : Collections.unmodifiableMap(aggregations); + this.queryProvider = queryProvider == null ? null : new QueryProvider(queryProvider); + this.aggProvider = aggProvider == null ? null : new AggProvider(aggProvider); this.scriptFields = scriptFields == null ? null : Collections.unmodifiableList(scriptFields); this.scrollSize = scrollSize; this.chunkingConfig = chunkingConfig; this.headers = Collections.unmodifiableMap(headers); this.delayedDataCheckConfig = delayedDataCheckConfig; - this.querySupplier = new CachedSupplier<>(() -> lazyQueryParser.apply(query, id, new ArrayList<>())); - this.aggSupplier = new CachedSupplier<>(() -> lazyAggParser.apply(aggregations, id, new ArrayList<>())); } public DatafeedConfig(StreamInput in) throws IOException { @@ -240,17 +210,10 @@ public class DatafeedConfig extends AbstractDiffable implements in.readStringList(); } } - if (in.getVersion().before(Version.V_6_6_0)) { - this.query = QUERY_TRANSFORMER.toMap(in.readNamedWriteable(QueryBuilder.class)); - this.aggregations = AGG_TRANSFORMER.toMap(in.readOptionalWriteable(AggregatorFactories.Builder::new)); - } else { - this.query = in.readMap(); - if (in.readBoolean()) { - this.aggregations = in.readMap(); - } else { - this.aggregations = null; - } - } + // each of these writables are version aware + this.queryProvider = QueryProvider.fromStream(in); + this.aggProvider = in.readOptionalWriteable(AggProvider::fromStream); + if (in.readBoolean()) { this.scriptFields = Collections.unmodifiableList(in.readList(SearchSourceBuilder.ScriptField::new)); } else { @@ -268,8 +231,6 @@ public class DatafeedConfig extends AbstractDiffable implements } else { delayedDataCheckConfig = DelayedDataCheckConfig.defaultDelayedDataCheckConfig(); } - this.querySupplier = new CachedSupplier<>(() -> lazyQueryParser.apply(query, id, new ArrayList<>())); - this.aggSupplier = new CachedSupplier<>(() -> lazyAggParser.apply(aggregations, id, new ArrayList<>())); } /** @@ -310,62 +271,116 @@ public class DatafeedConfig extends AbstractDiffable implements return scrollSize; } - public QueryBuilder getParsedQuery() { - return querySupplier.get(); + /** + * Get the fully parsed query from the semi-parsed stored {@code Map} + * + * @param namedXContentRegistry XContent registry to transform the lazily parsed query + * @return Fully parsed query + */ + public QueryBuilder getParsedQuery(NamedXContentRegistry namedXContentRegistry) { + return queryProvider == null ? null : parseQuery(namedXContentRegistry, new ArrayList<>()); + } + + // TODO Remove in v8.0.0 + // We only need this NamedXContentRegistry object if getParsedQuery() == null and getParsingException() == null + // This situation only occurs in past versions that contained the lazy parsing support but not the providers (6.6.x) + // We will still need `NamedXContentRegistry` for getting deprecations, but that is a special situation + private QueryBuilder parseQuery(NamedXContentRegistry namedXContentRegistry, List deprecations) { + try { + return queryProvider == null || queryProvider.getQuery() == null ? + null : + XContentObjectTransformer.queryBuilderTransformer(namedXContentRegistry).fromMap(queryProvider.getQuery(), deprecations); + } catch (Exception exception) { + // Certain thrown exceptions wrap up the real Illegal argument making it hard to determine cause for the user + if (exception.getCause() instanceof IllegalArgumentException) { + exception = (Exception)exception.getCause(); + } + throw ExceptionsHelper.badRequestException(Messages.DATAFEED_CONFIG_QUERY_BAD_FORMAT, exception); + } + } + + Exception getQueryParsingException() { + return queryProvider == null ? null : queryProvider.getParsingException(); } /** - * Calls the lazy parser and returns any gathered deprecations + * Calls the parser and returns any gathered deprecations + * + * @param namedXContentRegistry XContent registry to transform the lazily parsed query * @return The deprecations from parsing the query */ - public List getQueryDeprecations() { - return getQueryDeprecations(lazyQueryParser); - } - - List getQueryDeprecations(TriFunction, String, List, QueryBuilder> parser) { + public List getQueryDeprecations(NamedXContentRegistry namedXContentRegistry) { List deprecations = new ArrayList<>(); - parser.apply(query, id, deprecations); + parseQuery(namedXContentRegistry, deprecations); return deprecations; } public Map getQuery() { - return query; - } - - public AggregatorFactories.Builder getParsedAggregations() { - return aggSupplier.get(); + return queryProvider == null ? null : queryProvider.getQuery(); } /** - * Calls the lazy parser and returns any gathered deprecations - * @return The deprecations from parsing the aggregations + * Fully parses the semi-parsed {@code Map} aggregations + * + * @param namedXContentRegistry XContent registry to transform the lazily parsed aggregations + * @return The fully parsed aggregations */ - public List getAggDeprecations() { - return getAggDeprecations(lazyAggParser); + public AggregatorFactories.Builder getParsedAggregations(NamedXContentRegistry namedXContentRegistry) { + return aggProvider == null ? null : parseAggregations(namedXContentRegistry, new ArrayList<>()); } - List getAggDeprecations(TriFunction, String, List, AggregatorFactories.Builder> parser) { + // TODO refactor in v8.0.0 + // We only need this NamedXContentRegistry object if getParsedQuery() == null and getParsingException() == null + // This situation only occurs in past versions that contained the lazy parsing support but not the providers (6.6.x) + // We will still need `NamedXContentRegistry` for getting deprecations, but that is a special situation + private AggregatorFactories.Builder parseAggregations(NamedXContentRegistry namedXContentRegistry, List deprecations) { + try { + return aggProvider == null || aggProvider.getAggs() == null ? + null : + XContentObjectTransformer.aggregatorTransformer(namedXContentRegistry).fromMap(aggProvider.getAggs(), deprecations); + } catch (Exception exception) { + // Certain thrown exceptions wrap up the real Illegal argument making it hard to determine cause for the user + if (exception.getCause() instanceof IllegalArgumentException) { + exception = (Exception)exception.getCause(); + } + throw ExceptionsHelper.badRequestException(Messages.DATAFEED_CONFIG_AGG_BAD_FORMAT, exception); + } + } + + Exception getAggParsingException() { + return aggProvider == null ? null : aggProvider.getParsingException(); + } + + /** + * Calls the parser and returns any gathered deprecations + * + * @param namedXContentRegistry XContent registry to transform the lazily parsed aggregations + * @return The deprecations from parsing the aggregations + */ + public List getAggDeprecations(NamedXContentRegistry namedXContentRegistry) { List deprecations = new ArrayList<>(); - parser.apply(aggregations, id, deprecations); + parseAggregations(namedXContentRegistry, deprecations); return deprecations; } public Map getAggregations() { - return aggregations; + return aggProvider == null ? null : aggProvider.getAggs(); } /** * Returns the histogram's interval as epoch millis. + * + * @param namedXContentRegistry XContent registry to transform the lazily parsed aggregations */ - public long getHistogramIntervalMillis() { - return ExtractorUtils.getHistogramIntervalMillis(getParsedAggregations()); + public long getHistogramIntervalMillis(NamedXContentRegistry namedXContentRegistry) { + return ExtractorUtils.getHistogramIntervalMillis(getParsedAggregations(namedXContentRegistry)); } /** * @return {@code true} when there are non-empty aggregations, {@code false} otherwise */ public boolean hasAggregations() { - return aggregations != null && aggregations.size() > 0; + return aggProvider != null && aggProvider.getAggs() != null && aggProvider.getAggs().size() > 0; } public List getScriptFields() { @@ -402,16 +417,11 @@ public class DatafeedConfig extends AbstractDiffable implements out.writeBoolean(true); out.writeStringCollection(Collections.emptyList()); } - if (out.getVersion().before(Version.V_6_6_0)) { - out.writeNamedWriteable(getParsedQuery()); - out.writeOptionalWriteable(getParsedAggregations()); - } else { - out.writeMap(query); - out.writeBoolean(aggregations != null); - if (aggregations != null) { - out.writeMap(aggregations); - } - } + + // Each of these writables are version aware + queryProvider.writeTo(out); // never null + out.writeOptionalWriteable(aggProvider); + if (scriptFields != null) { out.writeBoolean(true); out.writeList(scriptFields); @@ -441,9 +451,9 @@ public class DatafeedConfig extends AbstractDiffable implements builder.field(FREQUENCY.getPreferredName(), frequency.getStringRep()); } builder.field(INDICES.getPreferredName(), indices); - builder.field(QUERY.getPreferredName(), query); - if (aggregations != null) { - builder.field(AGGREGATIONS.getPreferredName(), aggregations); + builder.field(QUERY.getPreferredName(), queryProvider.getQuery()); + if (aggProvider != null) { + builder.field(AGGREGATIONS.getPreferredName(), aggProvider.getAggs()); } if (scriptFields != null) { builder.startObject(SCRIPT_FIELDS.getPreferredName()); @@ -488,9 +498,9 @@ public class DatafeedConfig extends AbstractDiffable implements && Objects.equals(this.frequency, that.frequency) && Objects.equals(this.queryDelay, that.queryDelay) && Objects.equals(this.indices, that.indices) - && Objects.equals(this.query, that.query) + && Objects.equals(this.queryProvider, that.queryProvider) && Objects.equals(this.scrollSize, that.scrollSize) - && Objects.equals(this.aggregations, that.aggregations) + && Objects.equals(this.aggProvider, that.aggProvider) && Objects.equals(this.scriptFields, that.scriptFields) && Objects.equals(this.chunkingConfig, that.chunkingConfig) && Objects.equals(this.headers, that.headers) @@ -499,7 +509,7 @@ public class DatafeedConfig extends AbstractDiffable implements @Override public int hashCode() { - return Objects.hash(id, jobId, frequency, queryDelay, indices, query, scrollSize, aggregations, scriptFields, chunkingConfig, + return Objects.hash(id, jobId, frequency, queryDelay, indices, queryProvider, scrollSize, aggProvider, scriptFields, chunkingConfig, headers, delayedDataCheckConfig); } @@ -525,10 +535,10 @@ public class DatafeedConfig extends AbstractDiffable implements * @param bucketSpan the bucket span * @return the default frequency */ - public TimeValue defaultFrequency(TimeValue bucketSpan) { + public TimeValue defaultFrequency(TimeValue bucketSpan, NamedXContentRegistry xContentRegistry) { TimeValue defaultFrequency = defaultFrequencyTarget(bucketSpan); if (hasAggregations()) { - long histogramIntervalMillis = getHistogramIntervalMillis(); + long histogramIntervalMillis = getHistogramIntervalMillis(xContentRegistry); long targetFrequencyMillis = defaultFrequency.millis(); long defaultFrequencyMillis = histogramIntervalMillis > targetFrequencyMillis ? histogramIntervalMillis : (targetFrequencyMillis / histogramIntervalMillis) * histogramIntervalMillis; @@ -566,8 +576,8 @@ public class DatafeedConfig extends AbstractDiffable implements private TimeValue queryDelay; private TimeValue frequency; private List indices = Collections.emptyList(); - private Map query = Collections.singletonMap(MatchAllQueryBuilder.NAME, Collections.emptyMap()); - private Map aggregations; + private QueryProvider queryProvider = QueryProvider.defaultQuery(); + private AggProvider aggProvider; private List scriptFields; private Integer scrollSize = DEFAULT_SCROLL_SIZE; private ChunkingConfig chunkingConfig; @@ -588,8 +598,8 @@ public class DatafeedConfig extends AbstractDiffable implements this.queryDelay = config.queryDelay; this.frequency = config.frequency; this.indices = new ArrayList<>(config.indices); - this.query = config.query == null ? null : new LinkedHashMap<>(config.query); - this.aggregations = config.aggregations == null ? null : new LinkedHashMap<>(config.aggregations); + this.queryProvider = config.queryProvider == null ? null : new QueryProvider(config.queryProvider); + this.aggProvider = config.aggProvider == null ? null : new AggProvider(config.aggProvider); this.scriptFields = config.scriptFields == null ? null : new ArrayList<>(config.scriptFields); this.scrollSize = config.scrollSize; this.chunkingConfig = config.chunkingConfig; @@ -627,74 +637,39 @@ public class DatafeedConfig extends AbstractDiffable implements this.frequency = frequency; } - public void setQuery(Map query) { - setQuery(query, true); + public void setQueryProvider(QueryProvider queryProvider) { + this.queryProvider = ExceptionsHelper.requireNonNull(queryProvider, QUERY.getPreferredName()); } - public void setQuery(Map query, boolean lenient) { - this.query = ExceptionsHelper.requireNonNull(query, QUERY.getPreferredName()); + // For testing only + public void setParsedQuery(QueryBuilder queryBuilder) { try { - QUERY_TRANSFORMER.fromMap(query); - } catch(Exception ex) { - String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_QUERY_BAD_FORMAT, id); - - if (ex.getCause() instanceof IllegalArgumentException) { - ex = (Exception)ex.getCause(); - } - - if (lenient) { - logger.warn(msg, ex); - } else { - throw ExceptionsHelper.badRequestException(msg, ex); - } + this.queryProvider = ExceptionsHelper.requireNonNull(QueryProvider.fromParsedQuery(queryBuilder), QUERY.getPreferredName()); + } catch (IOException exception) { + // eat exception as it should never happen + logger.error("Exception trying to setParsedQuery", exception); } } - // Kept for easier testing + // For testing only public void setParsedAggregations(AggregatorFactories.Builder aggregations) { try { - setAggregations(AGG_TRANSFORMER.toMap(aggregations)); - } catch (Exception exception) { - // Certain thrown exceptions wrap up the real Illegal argument making it hard to determine cause for the user - if (exception.getCause() instanceof IllegalArgumentException) { - exception = (Exception)exception.getCause(); - } - throw ExceptionsHelper.badRequestException( - Messages.getMessage(Messages.DATAFEED_CONFIG_AGG_BAD_FORMAT, id), exception); + this.aggProvider = AggProvider.fromParsedAggs(aggregations); + } catch (IOException exception) { + // eat exception as it should never happen + logger.error("Exception trying to setParsedAggregations", exception); } } - private void setAggregationsSafe(Map aggregations, boolean lenient) { - if (this.aggregations != null) { + private void setAggregationsSafe(AggProvider aggProvider) { + if (this.aggProvider != null) { throw ExceptionsHelper.badRequestException("Found two aggregation definitions: [aggs] and [aggregations]"); } - setAggregations(aggregations, lenient); + this.aggProvider = aggProvider; } - void setAggregations(Map aggregations) { - setAggregations(aggregations, true); - } - - void setAggregations(Map aggregations, boolean lenient) { - this.aggregations = aggregations; - try { - if (aggregations != null && aggregations.isEmpty()) { - throw new Exception("[aggregations] are empty"); - } - AGG_TRANSFORMER.fromMap(aggregations); - } catch (Exception ex) { - String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_AGG_BAD_FORMAT, id); - - if (ex.getCause() instanceof IllegalArgumentException) { - ex = (Exception)ex.getCause(); - } - - if (lenient) { - logger.warn(msg, ex); - } else { - throw ExceptionsHelper.badRequestException(msg, ex); - } - } + public void setAggProvider(AggProvider aggProvider) { + this.aggProvider = aggProvider; } public void setScriptFields(List scriptFields) { @@ -737,12 +712,12 @@ public class DatafeedConfig extends AbstractDiffable implements setDefaultChunkingConfig(); setDefaultQueryDelay(); - return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, query, aggregations, scriptFields, scrollSize, + return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, queryProvider, aggProvider, scriptFields, scrollSize, chunkingConfig, headers, delayedDataCheckConfig); } void validateScriptFields() { - if (aggregations == null) { + if (aggProvider == null) { return; } if (scriptFields != null && !scriptFields.isEmpty()) { @@ -788,11 +763,13 @@ public class DatafeedConfig extends AbstractDiffable implements private void setDefaultChunkingConfig() { if (chunkingConfig == null) { - if (aggregations == null) { + if (aggProvider == null || aggProvider.getParsedAggs() == null) { chunkingConfig = ChunkingConfig.newAuto(); } else { - long histogramIntervalMillis = - ExtractorUtils.getHistogramIntervalMillis(lazyAggParser.apply(aggregations, id, new ArrayList<>())); + long histogramIntervalMillis = ExtractorUtils.getHistogramIntervalMillis(aggProvider.getParsedAggs()); + if (histogramIntervalMillis <= 0) { + throw ExceptionsHelper.badRequestException(Messages.DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO); + } chunkingConfig = ChunkingConfig.newManual(TimeValue.timeValueMillis( DEFAULT_AGGREGATION_CHUNKING_BUCKETS * histogramIntervalMillis)); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedJobValidator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedJobValidator.java index 8a49b955445..4c2e338db09 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedJobValidator.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedJobValidator.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.datafeed; import org.elasticsearch.common.Strings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.messages.Messages; @@ -21,15 +22,15 @@ public final class DatafeedJobValidator { * @param datafeedConfig the datafeed config * @param job the job */ - public static void validate(DatafeedConfig datafeedConfig, Job job) { + public static void validate(DatafeedConfig datafeedConfig, Job job, NamedXContentRegistry xContentRegistry) { AnalysisConfig analysisConfig = job.getAnalysisConfig(); if (analysisConfig.getLatency() != null && analysisConfig.getLatency().seconds() > 0) { throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.DATAFEED_DOES_NOT_SUPPORT_JOB_WITH_LATENCY)); } if (datafeedConfig.hasAggregations()) { checkSummaryCountFieldNameIsSet(analysisConfig); - checkValidHistogramInterval(datafeedConfig, analysisConfig); - checkFrequencyIsMultipleOfHistogramInterval(datafeedConfig); + checkValidHistogramInterval(datafeedConfig, analysisConfig, xContentRegistry); + checkFrequencyIsMultipleOfHistogramInterval(datafeedConfig, xContentRegistry); } DelayedDataCheckConfig delayedDataCheckConfig = datafeedConfig.getDelayedDataCheckConfig(); @@ -64,8 +65,10 @@ public final class DatafeedJobValidator { } } - private static void checkValidHistogramInterval(DatafeedConfig datafeedConfig, AnalysisConfig analysisConfig) { - long histogramIntervalMillis = datafeedConfig.getHistogramIntervalMillis(); + private static void checkValidHistogramInterval(DatafeedConfig datafeedConfig, + AnalysisConfig analysisConfig, + NamedXContentRegistry xContentRegistry) { + long histogramIntervalMillis = datafeedConfig.getHistogramIntervalMillis(xContentRegistry); long bucketSpanMillis = analysisConfig.getBucketSpan().millis(); if (histogramIntervalMillis > bucketSpanMillis) { throw ExceptionsHelper.badRequestException(Messages.getMessage( @@ -82,10 +85,10 @@ public final class DatafeedJobValidator { } } - private static void checkFrequencyIsMultipleOfHistogramInterval(DatafeedConfig datafeedConfig) { + private static void checkFrequencyIsMultipleOfHistogramInterval(DatafeedConfig datafeedConfig, NamedXContentRegistry xContentRegistry) { TimeValue frequency = datafeedConfig.getFrequency(); if (frequency != null) { - long histogramIntervalMillis = datafeedConfig.getHistogramIntervalMillis(); + long histogramIntervalMillis = datafeedConfig.getHistogramIntervalMillis(xContentRegistry); long frequencyMillis = frequency.millis(); if (frequencyMillis % histogramIntervalMillis != 0) { throw ExceptionsHelper.badRequestException(Messages.getMessage( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java index 5468ea1ee26..78b4e4ec7c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -21,8 +22,8 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer; import java.io.IOException; import java.util.ArrayList; @@ -33,10 +34,6 @@ import java.util.Map; import java.util.Objects; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.AGG_TRANSFORMER; -import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.QUERY_TRANSFORMER; -import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.lazyAggParser; -import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.lazyQueryParser; /** * A datafeed update contains partial properties to update a {@link DatafeedConfig}. @@ -56,9 +53,13 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { TimeValue.parseTimeValue(val, DatafeedConfig.QUERY_DELAY.getPreferredName())), DatafeedConfig.QUERY_DELAY); PARSER.declareString((builder, val) -> builder.setFrequency( TimeValue.parseTimeValue(val, DatafeedConfig.FREQUENCY.getPreferredName())), DatafeedConfig.FREQUENCY); - PARSER.declareObject(Builder::setQuery, (p, c) -> p.mapOrdered(), DatafeedConfig.QUERY); - PARSER.declareObject(Builder::setAggregationsSafe, (p, c) -> p.mapOrdered(), DatafeedConfig.AGGREGATIONS); - PARSER.declareObject(Builder::setAggregationsSafe,(p, c) -> p.mapOrdered(), DatafeedConfig.AGGS); + PARSER.declareObject(Builder::setQuery, (p, c) -> QueryProvider.fromXContent(p, false), DatafeedConfig.QUERY); + PARSER.declareObject(Builder::setAggregationsSafe, + (p, c) -> AggProvider.fromXContent(p, false), + DatafeedConfig.AGGREGATIONS); + PARSER.declareObject(Builder::setAggregationsSafe, + (p, c) -> AggProvider.fromXContent(p, false), + DatafeedConfig.AGGS); PARSER.declareObject(Builder::setScriptFields, (p, c) -> { List parsedScriptFields = new ArrayList<>(); while (p.nextToken() != XContentParser.Token.END_OBJECT) { @@ -79,23 +80,24 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { private final TimeValue queryDelay; private final TimeValue frequency; private final List indices; - private final Map query; - private final Map aggregations; + private final QueryProvider queryProvider; + private final AggProvider aggProvider; private final List scriptFields; private final Integer scrollSize; private final ChunkingConfig chunkingConfig; private final DelayedDataCheckConfig delayedDataCheckConfig; private DatafeedUpdate(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List indices, - Map query, Map aggregations, List scriptFields, + QueryProvider queryProvider, AggProvider aggProvider, + List scriptFields, Integer scrollSize, ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) { this.id = id; this.jobId = jobId; this.queryDelay = queryDelay; this.frequency = frequency; this.indices = indices; - this.query = query; - this.aggregations = aggregations; + this.queryProvider = queryProvider; + this.aggProvider = aggProvider; this.scriptFields = scriptFields; this.scrollSize = scrollSize; this.chunkingConfig = chunkingConfig; @@ -118,16 +120,12 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { in.readStringList(); } } - if (in.getVersion().before(Version.V_7_1_0)) { - this.query = QUERY_TRANSFORMER.toMap(in.readOptionalNamedWriteable(QueryBuilder.class)); - this.aggregations = AGG_TRANSFORMER.toMap(in.readOptionalWriteable(AggregatorFactories.Builder::new)); + if (in.getVersion().before(Version.V_7_0_0)) { + this.queryProvider = QueryProvider.fromParsedQuery(in.readOptionalNamedWriteable(QueryBuilder.class)); + this.aggProvider = AggProvider.fromParsedAggs(in.readOptionalWriteable(AggregatorFactories.Builder::new)); } else { - this.query = in.readMap(); - if (in.readBoolean()) { - this.aggregations = in.readMap(); - } else { - this.aggregations = null; - } + this.queryProvider = in.readOptionalWriteable(QueryProvider::fromStream); + this.aggProvider = in.readOptionalWriteable(AggProvider::fromStream); } if (in.readBoolean()) { this.scriptFields = in.readList(SearchSourceBuilder.ScriptField::new); @@ -168,15 +166,12 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { out.writeBoolean(true); out.writeStringCollection(Collections.emptyList()); } - if (out.getVersion().before(Version.V_7_1_0)) { - out.writeOptionalNamedWriteable(lazyQueryParser.apply(query, id, new ArrayList<>())); - out.writeOptionalWriteable(lazyAggParser.apply(aggregations, id, new ArrayList<>())); + if (out.getVersion().before(Version.V_7_0_0)) { + out.writeOptionalNamedWriteable(queryProvider == null ? null : queryProvider.getParsedQuery()); + out.writeOptionalWriteable(aggProvider == null ? null : aggProvider.getParsedAggs()); } else { - out.writeMap(query); - out.writeBoolean(aggregations != null); - if (aggregations != null) { - out.writeMap(aggregations); - } + out.writeOptionalWriteable(queryProvider); + out.writeOptionalWriteable(aggProvider); } if (scriptFields != null) { out.writeBoolean(true); @@ -203,8 +198,12 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { builder.field(DatafeedConfig.FREQUENCY.getPreferredName(), frequency.getStringRep()); } addOptionalField(builder, DatafeedConfig.INDICES, indices); - addOptionalField(builder, DatafeedConfig.QUERY, query); - addOptionalField(builder, DatafeedConfig.AGGREGATIONS, aggregations); + if (queryProvider != null) { + builder.field(DatafeedConfig.QUERY.getPreferredName(), queryProvider.getQuery()); + } + if (aggProvider != null) { + builder.field(DatafeedConfig.AGGREGATIONS.getPreferredName(), aggProvider.getAggs()); + } if (scriptFields != null) { builder.startObject(DatafeedConfig.SCRIPT_FIELDS.getPreferredName()); for (SearchSourceBuilder.ScriptField scriptField : scriptFields) { @@ -246,11 +245,21 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { } Map getQuery() { - return query; + return queryProvider == null ? null : queryProvider.getQuery(); + } + + QueryBuilder getParsedQuery(NamedXContentRegistry namedXContentRegistry) throws IOException { + return XContentObjectTransformer.queryBuilderTransformer(namedXContentRegistry).fromMap(queryProvider.getQuery(), + new ArrayList<>()); } Map getAggregations() { - return aggregations; + return aggProvider == null ? null : aggProvider.getAggs(); + } + + AggregatorFactories.Builder getParsedAgg(NamedXContentRegistry namedXContentRegistry) throws IOException { + return XContentObjectTransformer.aggregatorTransformer(namedXContentRegistry).fromMap(aggProvider.getAggs(), + new ArrayList<>()); } /** @@ -258,7 +267,7 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { * otherwise */ boolean hasAggregations() { - return aggregations != null && aggregations.size() > 0; + return getAggregations() != null && getAggregations().size() > 0; } List getScriptFields() { @@ -295,12 +304,12 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { if (indices != null) { builder.setIndices(indices); } - if (query != null) { - builder.setQuery(query); + if (queryProvider != null) { + builder.setQueryProvider(queryProvider); } - if (aggregations != null) { - DatafeedConfig.validateAggregations(lazyAggParser.apply(aggregations, id, new ArrayList<>())); - builder.setAggregations(aggregations); + if (aggProvider != null) { + DatafeedConfig.validateAggregations(aggProvider.getParsedAggs()); + builder.setAggProvider(aggProvider); } if (scriptFields != null) { builder.setScriptFields(scriptFields); @@ -348,9 +357,9 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { && Objects.equals(this.frequency, that.frequency) && Objects.equals(this.queryDelay, that.queryDelay) && Objects.equals(this.indices, that.indices) - && Objects.equals(this.query, that.query) + && Objects.equals(this.queryProvider, that.queryProvider) && Objects.equals(this.scrollSize, that.scrollSize) - && Objects.equals(this.aggregations, that.aggregations) + && Objects.equals(this.aggProvider, that.aggProvider) && Objects.equals(this.delayedDataCheckConfig, that.delayedDataCheckConfig) && Objects.equals(this.scriptFields, that.scriptFields) && Objects.equals(this.chunkingConfig, that.chunkingConfig); @@ -358,7 +367,7 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { @Override public int hashCode() { - return Objects.hash(id, jobId, frequency, queryDelay, indices, query, scrollSize, aggregations, scriptFields, chunkingConfig, + return Objects.hash(id, jobId, frequency, queryDelay, indices, queryProvider, scrollSize, aggProvider, scriptFields, chunkingConfig, delayedDataCheckConfig); } @@ -371,9 +380,9 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { return (frequency == null || Objects.equals(frequency, datafeed.getFrequency())) && (queryDelay == null || Objects.equals(queryDelay, datafeed.getQueryDelay())) && (indices == null || Objects.equals(indices, datafeed.getIndices())) - && (query == null || Objects.equals(query, datafeed.getQuery())) + && (queryProvider == null || Objects.equals(queryProvider.getQuery(), datafeed.getQuery())) && (scrollSize == null || Objects.equals(scrollSize, datafeed.getQueryDelay())) - && (aggregations == null || Objects.equals(aggregations, datafeed.getAggregations())) + && (aggProvider == null || Objects.equals(aggProvider.getAggs(), datafeed.getAggregations())) && (scriptFields == null || Objects.equals(scriptFields, datafeed.getScriptFields())) && (delayedDataCheckConfig == null || Objects.equals(delayedDataCheckConfig, datafeed.getDelayedDataCheckConfig())) && (chunkingConfig == null || Objects.equals(chunkingConfig, datafeed.getChunkingConfig())); @@ -386,8 +395,8 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { private TimeValue queryDelay; private TimeValue frequency; private List indices; - private Map query; - private Map aggregations; + private QueryProvider queryProvider; + private AggProvider aggProvider; private List scriptFields; private Integer scrollSize; private ChunkingConfig chunkingConfig; @@ -406,8 +415,8 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { this.queryDelay = config.queryDelay; this.frequency = config.frequency; this.indices = config.indices; - this.query = config.query; - this.aggregations = config.aggregations; + this.queryProvider = config.queryProvider; + this.aggProvider = config.aggProvider; this.scriptFields = config.scriptFields; this.scrollSize = config.scrollSize; this.chunkingConfig = config.chunkingConfig; @@ -434,42 +443,19 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { this.frequency = frequency; } - public void setQuery(Map query) { - this.query = query; - try { - QUERY_TRANSFORMER.fromMap(query); - } catch(Exception ex) { - String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_QUERY_BAD_FORMAT, id); - - if (ex.getCause() instanceof IllegalArgumentException) { - ex = (Exception)ex.getCause(); - } - throw ExceptionsHelper.badRequestException(msg, ex); - } + public void setQuery(QueryProvider queryProvider) { + this.queryProvider = queryProvider; } - private void setAggregationsSafe(Map aggregations) { - if (this.aggregations != null) { + private void setAggregationsSafe(AggProvider aggProvider) { + if (this.aggProvider != null) { throw ExceptionsHelper.badRequestException("Found two aggregation definitions: [aggs] and [aggregations]"); } - setAggregations(aggregations); + setAggregations(aggProvider); } - public void setAggregations(Map aggregations) { - this.aggregations = aggregations; - try { - if (aggregations != null && aggregations.isEmpty()) { - throw new Exception("[aggregations] are empty"); - } - AGG_TRANSFORMER.fromMap(aggregations); - } catch(Exception ex) { - String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_AGG_BAD_FORMAT, id); - - if (ex.getCause() instanceof IllegalArgumentException) { - ex = (Exception)ex.getCause(); - } - throw ExceptionsHelper.badRequestException(msg, ex); - } + public void setAggregations(AggProvider aggProvider) { + this.aggProvider = aggProvider; } public void setScriptFields(List scriptFields) { @@ -491,7 +477,7 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { } public DatafeedUpdate build() { - return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, query, aggregations, scriptFields, scrollSize, + return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, queryProvider, aggProvider, scriptFields, scrollSize, chunkingConfig, delayedDataCheckConfig); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProvider.java new file mode 100644 index 00000000000..ff6d2f595af --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProvider.java @@ -0,0 +1,162 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.datafeed; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer; + +import java.io.IOException; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Objects; + +class QueryProvider implements Writeable, ToXContentObject { + + private static final Logger logger = LogManager.getLogger(AggProvider.class); + + private Exception parsingException; + private QueryBuilder parsedQuery; + private Map query; + + static QueryProvider defaultQuery() { + return new QueryProvider( + Collections.singletonMap(MatchAllQueryBuilder.NAME, Collections.emptyMap()), + QueryBuilders.matchAllQuery(), + null); + } + + static QueryProvider fromXContent(XContentParser parser, boolean lenient) throws IOException { + Map query = parser.mapOrdered(); + QueryBuilder parsedQuery = null; + Exception exception = null; + try { + parsedQuery = XContentObjectTransformer.queryBuilderTransformer(parser.getXContentRegistry()).fromMap(query); + } catch(Exception ex) { + if (ex.getCause() instanceof IllegalArgumentException) { + ex = (Exception)ex.getCause(); + } + exception = ex; + if (lenient) { + logger.warn(Messages.DATAFEED_CONFIG_QUERY_BAD_FORMAT, ex); + } else { + throw ExceptionsHelper.badRequestException(Messages.DATAFEED_CONFIG_QUERY_BAD_FORMAT, ex); + } + } + return new QueryProvider(query, parsedQuery, exception); + } + + static QueryProvider fromParsedQuery(QueryBuilder parsedQuery) throws IOException { + return parsedQuery == null ? + null : + new QueryProvider( + XContentObjectTransformer.queryBuilderTransformer(NamedXContentRegistry.EMPTY).toMap(parsedQuery), + parsedQuery, + null); + } + + static QueryProvider fromStream(StreamInput in) throws IOException { + if (in.getVersion().onOrAfter(Version.V_6_7_0)) { // Has our bug fix for query/agg providers + return new QueryProvider(in.readMap(), in.readOptionalNamedWriteable(QueryBuilder.class), in.readException()); + } else if (in.getVersion().onOrAfter(Version.V_6_6_0)) { // Has the bug, but supports lazy objects + return new QueryProvider(in.readMap(), null, null); + } else { // only supports eagerly parsed objects + return QueryProvider.fromParsedQuery(in.readNamedWriteable(QueryBuilder.class)); + } + } + + QueryProvider(Map query, QueryBuilder parsedQuery, Exception parsingException) { + this.query = Collections.unmodifiableMap(new LinkedHashMap<>(Objects.requireNonNull(query, "[query] must not be null"))); + this.parsedQuery = parsedQuery; + this.parsingException = parsingException; + } + + QueryProvider(QueryProvider other) { + this(other.query, other.parsedQuery, other.parsingException); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + if (out.getVersion().onOrAfter(Version.V_6_7_0)) { // Has our bug fix for query/agg providers + out.writeMap(query); + out.writeOptionalNamedWriteable(parsedQuery); + out.writeException(parsingException); + } else if (out.getVersion().onOrAfter(Version.V_6_6_0)) { // Has the bug, but supports lazy objects + // We allow the lazy parsing nodes that have the bug throw any parsing errors themselves as + // they already have the ability to fully parse the passed Maps + out.writeMap(query); + } else { // only supports eagerly parsed objects + if (parsingException != null) { // Do we have a parsing error? Throw it + if (parsingException instanceof IOException) { + throw (IOException) parsingException; + } else { + throw new ElasticsearchException(parsingException); + } + } else if (parsedQuery == null) { // Do we have a query defined but not parsed? + // This is an admittedly rare case but we should fail early instead of writing null when there + // actually is a query defined + throw new ElasticsearchException("Unsupported operation: parsed query is null"); + } + out.writeNamedWriteable(parsedQuery); + } + } + + public Exception getParsingException() { + return parsingException; + } + + public QueryBuilder getParsedQuery() { + return parsedQuery; + } + + public Map getQuery() { + return query; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + QueryProvider that = (QueryProvider) other; + + return Objects.equals(this.query, that.query) + && Objects.equals(this.parsedQuery, that.parsedQuery) + && Objects.equals(this.parsingException, that.parsingException); + } + + @Override + public int hashCode() { + return Objects.hash(query, parsedQuery, parsingException); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.map(query); + return builder; + } +} + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index 09874ec611b..80542909efd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -26,8 +26,8 @@ public final class Messages { "delayed_data_check_config: check_window [{0}] must be greater than the bucket_span [{1}]"; public static final String DATAFEED_CONFIG_DELAYED_DATA_CHECK_SPANS_TOO_MANY_BUCKETS = "delayed_data_check_config: check_window [{0}] must be less than 10,000x the bucket_span [{1}]"; - public static final String DATAFEED_CONFIG_QUERY_BAD_FORMAT = "Datafeed [{0}] query is not parsable"; - public static final String DATAFEED_CONFIG_AGG_BAD_FORMAT = "Datafeed [{0}] aggregations are not parsable"; + public static final String DATAFEED_CONFIG_QUERY_BAD_FORMAT = "Datafeed query is not parsable"; + public static final String DATAFEED_CONFIG_AGG_BAD_FORMAT = "Datafeed aggregations are not parsable"; public static final String DATAFEED_DOES_NOT_SUPPORT_JOB_WITH_LATENCY = "A job configured with datafeed cannot support latency"; public static final String DATAFEED_NOT_FOUND = "No datafeed with id [{0}] exists"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformer.java index bbea1014183..74350c24064 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformer.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.utils; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -18,12 +17,10 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregatorFactories; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; @@ -38,15 +35,8 @@ public class XContentObjectTransformer { private final NamedXContentRegistry registry; private final CheckedFunction parserFunction; - // We need this registry for parsing out Aggregations and Searches - private static NamedXContentRegistry searchRegistry; - static { - SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); - searchRegistry = new NamedXContentRegistry(searchModule.getNamedXContents()); - } - - public static XContentObjectTransformer aggregatorTransformer() { - return new XContentObjectTransformer<>(searchRegistry, (p) -> { + public static XContentObjectTransformer aggregatorTransformer(NamedXContentRegistry registry) { + return new XContentObjectTransformer<>(registry, (p) -> { // Serializing a map creates an object, need to skip the start object for the aggregation parser XContentParser.Token token = p.nextToken(); assert(XContentParser.Token.START_OBJECT.equals(token)); @@ -54,8 +44,8 @@ public class XContentObjectTransformer { }); } - public static XContentObjectTransformer queryBuilderTransformer() { - return new XContentObjectTransformer<>(searchRegistry, AbstractQueryBuilder::parseInnerQueryBuilder); + public static XContentObjectTransformer queryBuilderTransformer(NamedXContentRegistry registry) { + return new XContentObjectTransformer<>(registry, AbstractQueryBuilder::parseInnerQueryBuilder); } XContentObjectTransformer(NamedXContentRegistry registry, CheckedFunction parserFunction) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoActionResponseTests.java index 59ed1dcd17b..2eff0384b6f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoActionResponseTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.AbstractStreamableTestCase; @@ -28,10 +29,12 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import static java.util.Collections.emptyList; import static org.hamcrest.Matchers.empty; import static org.hamcrest.core.IsEqual.equalTo; @@ -91,9 +94,9 @@ public class DeprecationInfoActionResponseTests extends AbstractStreamableTestCa Collections.unmodifiableList(Arrays.asList( (idx) -> indexIssueFound ? foundIssue : null )); - List> mlSettingsChecks = + List> mlSettingsChecks = Collections.unmodifiableList(Arrays.asList( - (idx) -> mlIssueFound ? foundIssue : null + (idx, unused) -> mlIssueFound ? foundIssue : null )); NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse( @@ -101,10 +104,10 @@ public class DeprecationInfoActionResponseTests extends AbstractStreamableTestCa nodeIssueFound ? Collections.singletonList( new NodesDeprecationCheckAction.NodeResponse(discoveryNode, Collections.singletonList(foundIssue))) - : Collections.emptyList(), - Collections.emptyList()); + : emptyList(), + emptyList()); - DeprecationInfoAction.Response response = DeprecationInfoAction.Response.from(state, + DeprecationInfoAction.Response response = DeprecationInfoAction.Response.from(state, NamedXContentRegistry.EMPTY, resolver, Strings.EMPTY_ARRAY, indicesOptions, datafeeds, nodeDeprecationIssues, indexSettingsChecks, clusterSettingsChecks, mlSettingsChecks); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/AggProviderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/AggProviderTests.java new file mode 100644 index 00000000000..dc87cf744cb --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/AggProviderTests.java @@ -0,0 +1,179 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.datafeed; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + + +public class AggProviderTests extends AbstractSerializingTestCase { + + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + + @Override + protected NamedWriteableRegistry writableRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedWriteableRegistry(searchModule.getNamedWriteables()); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return writableRegistry(); + } + + @Override + protected AggProvider createTestInstance() { + return createRandomValidAggProvider(); + } + + @Override + protected Writeable.Reader instanceReader() { + return AggProvider::fromStream; + } + + @Override + protected AggProvider doParseInstance(XContentParser parser) throws IOException { + return AggProvider.fromXContent(parser, false); + } + + public static AggProvider createRandomValidAggProvider() { + return createRandomValidAggProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)); + } + + public static AggProvider createRandomValidAggProvider(String name, String field) { + Map agg = Collections.singletonMap(name, + Collections.singletonMap("avg", Collections.singletonMap("field", field))); + try { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + AggregatorFactories.Builder aggs = + XContentObjectTransformer.aggregatorTransformer(new NamedXContentRegistry(searchModule.getNamedXContents())) + .fromMap(agg); + return new AggProvider(agg, aggs, null); + } catch (IOException ex) { + fail(ex.getMessage()); + } + return null; + } + + public void testEmptyAggMap() throws IOException { + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, "{}"); + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, + () -> AggProvider.fromXContent(parser, false)); + assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); + assertThat(e.getMessage(), equalTo("Datafeed aggregations are not parsable")); + } + + public void testSerializationBetweenBugVersion() throws IOException { + AggProvider tempAggProvider = createRandomValidAggProvider(); + AggProvider aggProviderWithEx = new AggProvider(tempAggProvider.getAggs(), tempAggProvider.getParsedAggs(), new IOException("ex")); + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setVersion(Version.V_6_6_2); + aggProviderWithEx.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry())) { + in.setVersion(Version.V_6_6_2); + AggProvider streamedAggProvider = AggProvider.fromStream(in); + assertThat(streamedAggProvider.getAggs(), equalTo(aggProviderWithEx.getAggs())); + assertThat(streamedAggProvider.getParsingException(), is(nullValue())); + + AggregatorFactories.Builder streamedParsedAggs = XContentObjectTransformer.aggregatorTransformer(xContentRegistry()) + .fromMap(streamedAggProvider.getAggs()); + assertThat(streamedParsedAggs, equalTo(aggProviderWithEx.getParsedAggs())); + assertThat(streamedAggProvider.getParsedAggs(), is(nullValue())); + } + } + } + + public void testSerializationBetweenEagerVersion() throws IOException { + AggProvider validAggProvider = createRandomValidAggProvider(); + + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setVersion(Version.V_6_0_0); + validAggProvider.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry())) { + in.setVersion(Version.V_6_0_0); + AggProvider streamedAggProvider = AggProvider.fromStream(in); + assertThat(streamedAggProvider.getAggs(), equalTo(validAggProvider.getAggs())); + assertThat(streamedAggProvider.getParsingException(), is(nullValue())); + assertThat(streamedAggProvider.getParsedAggs(), equalTo(validAggProvider.getParsedAggs())); + } + } + + try (BytesStreamOutput output = new BytesStreamOutput()) { + AggProvider aggProviderWithEx = new AggProvider(validAggProvider.getAggs(), + validAggProvider.getParsedAggs(), + new IOException("bad parsing")); + output.setVersion(Version.V_6_0_0); + IOException ex = expectThrows(IOException.class, () -> aggProviderWithEx.writeTo(output)); + assertThat(ex.getMessage(), equalTo("bad parsing")); + } + + try (BytesStreamOutput output = new BytesStreamOutput()) { + AggProvider aggProviderWithEx = new AggProvider(validAggProvider.getAggs(), + validAggProvider.getParsedAggs(), + new ElasticsearchException("bad parsing")); + output.setVersion(Version.V_6_0_0); + ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> aggProviderWithEx.writeTo(output)); + assertNotNull(ex.getCause()); + assertThat(ex.getCause().getMessage(), equalTo("bad parsing")); + } + + try (BytesStreamOutput output = new BytesStreamOutput()) { + AggProvider aggProviderWithOutParsed = new AggProvider(validAggProvider.getAggs(), null, null); + output.setVersion(Version.V_6_0_0); + ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> aggProviderWithOutParsed.writeTo(output)); + assertThat(ex.getMessage(), equalTo("Unsupported operation: parsed aggregations are null")); + } + } + + @Override + protected AggProvider mutateInstance(AggProvider instance) throws IOException { + Exception parsingException = instance.getParsingException(); + AggregatorFactories.Builder parsedAggs = instance.getParsedAggs(); + switch (between(0, 1)) { + case 0: + parsingException = parsingException == null ? new IOException("failed parsing") : null; + break; + case 1: + parsedAggs = parsedAggs == null ? + XContentObjectTransformer.aggregatorTransformer(xContentRegistry()).fromMap(instance.getAggs()) : + null; + break; + default: + throw new AssertionError("Illegal randomisation branch"); + } + return new AggProvider(instance.getAggs(), parsedAggs, parsingException); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java index 40b7ce88df0..71491c92277 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.datafeed; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -27,8 +26,8 @@ import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -58,19 +57,16 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.QUERY_TRANSFORMER; -import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.lazyQueryParser; +import static org.elasticsearch.xpack.core.ml.datafeed.QueryProviderTests.createRandomValidQueryProvider; +import static org.elasticsearch.xpack.core.ml.job.messages.Messages.DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; public class DatafeedConfigTests extends AbstractSerializingTestCase { @@ -91,8 +87,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase DatafeedConfig.STRICT_PARSER.apply(parser, null).build()); assertEquals("[6:5] [datafeed_config] unknown field [tomorrows_technology_today], parser not found", e.getMessage()); @@ -262,16 +257,15 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase config.getParsedQuery()); - assertNotNull(e.getCause()); - assertEquals("[match] query doesn't support multiple fields, found [query] and [type]", e.getCause().getMessage()); + assertThat(config.getQueryParsingException().getMessage(), + equalTo("[match] query doesn't support multiple fields, found [query] and [type]")); } try(XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, ANACHRONISTIC_QUERY_DATAFEED)) { + .createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, ANACHRONISTIC_QUERY_DATAFEED)) { XContentParseException e = expectThrows(XContentParseException.class, () -> DatafeedConfig.STRICT_PARSER.apply(parser, null).build()); @@ -281,18 +275,15 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase configBuilder.build()); - assertNotNull(e.getCause()); - assertEquals( - "[size] must be greater than 0. Found [0] in [airline]", - e.getCause().getMessage()); + DatafeedConfig datafeedConfig = DatafeedConfig.LENIENT_PARSER.apply(parser, null).build(); + assertThat(datafeedConfig.getAggParsingException().getMessage(), + equalTo("[size] must be greater than 0. Found [0] in [airline]")); } try(XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, ANACHRONISTIC_AGG_DATAFEED)) { + .createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, ANACHRONISTIC_AGG_DATAFEED)) { XContentParseException e = expectThrows(XContentParseException.class, () -> DatafeedConfig.STRICT_PARSER.apply(parser, null).build()); @@ -302,14 +293,14 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase DatafeedConfig.LENIENT_PARSER.apply(parser, null)); assertThat(ex.getMessage(), equalTo("[32:3] [datafeed_config] failed to parse field [aggs]")); @@ -317,7 +308,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase DatafeedConfig.STRICT_PARSER.apply(parser, null)); assertThat(ex.getMessage(), equalTo("[32:3] [datafeed_config] failed to parse field [aggs]")); @@ -502,8 +493,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase0 for histogram aggregation [time]")); + assertThat(e.getMessage(), containsString(DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO)); } public void testBuild_GivenDateHistogramWithInvalidTimeZone() { @@ -526,16 +516,16 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase datafeed.defaultFrequency(TimeValue.timeValueSeconds(-1))); + ESTestCase.expectThrows(IllegalArgumentException.class, + () -> datafeed.defaultFrequency(TimeValue.timeValueSeconds(-1), xContentRegistry())); } public void testDefaultFrequency_GivenNoAggregations() { @@ -597,106 +588,79 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase deprecations = datafeed.getAggDeprecations((map, id, deprecationlist) -> { - deprecationlist.add(deprecationWarning); - return new AggregatorFactories.Builder().addAggregator(new MaxAggregationBuilder("field").field("field")); - }); - assertThat(deprecations, hasItem(deprecationWarning)); - - DatafeedConfig spiedConfig = spy(datafeed); - spiedConfig.getAggDeprecations(); - verify(spiedConfig).getAggDeprecations(DatafeedConfig.lazyAggParser); - } - - public void testGetQueryDeprecations() { - DatafeedConfig datafeed = createDatafeedWithDateHistogram("1h"); - String deprecationWarning = "Warning"; - List deprecations = datafeed.getQueryDeprecations((map, id, deprecationlist) -> { - deprecationlist.add(deprecationWarning); - return new BoolQueryBuilder(); - }); - assertThat(deprecations, hasItem(deprecationWarning)); - - DatafeedConfig spiedConfig = spy(datafeed); - spiedConfig.getQueryDeprecations(); - verify(spiedConfig).getQueryDeprecations(lazyQueryParser); + assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(1), xContentRegistry())); + assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(3601), xContentRegistry())); + assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(2), xContentRegistry())); + assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(12), xContentRegistry())); } public void testSerializationOfComplexAggs() throws IOException { @@ -716,11 +680,8 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase terms = Collections.singletonMap(BoolQueryBuilder.NAME, - Collections.singletonMap("filter", - Collections.singletonMap(TermQueryBuilder.NAME, - Collections.singletonMap(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))))); - datafeedConfigBuilder.setQuery(terms); + datafeedConfigBuilder.setQueryProvider( + createRandomValidQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); DatafeedConfig datafeedConfig = datafeedConfigBuilder.build(); AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder().addAggregator(dateHistogram); @@ -736,18 +697,20 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase terms = Collections.singletonMap(BoolQueryBuilder.NAME, - Collections.singletonMap("filter", - Collections.singletonList( - Collections.singletonMap(TermQueryBuilder.NAME, - Collections.singletonMap(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)))))); // So equality check between the streamed and current passes // Streamed DatafeedConfigs when they are before 6.6.0 require a parsed object for aggs and queries, consequently all the default // values are added between them - datafeedConfigBuilder.setQuery(QUERY_TRANSFORMER.toMap(QUERY_TRANSFORMER.fromMap(terms))); + datafeedConfigBuilder.setQueryProvider( + QueryProvider + .fromParsedQuery(QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))))); DatafeedConfig datafeedConfig = datafeedConfigBuilder.build(); SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); @@ -793,8 +754,8 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase builder.setQuery(Collections.emptyMap(), false)); - assertThat(ex.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(ex.getMessage(), equalTo("Datafeed [empty_query_map] query is not parsable")); - } - - public void testEmptyAggMap() { - DatafeedConfig.Builder builder = new DatafeedConfig.Builder("empty_agg_map", "job1"); - ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, - () -> builder.setAggregations(Collections.emptyMap(), false)); - assertThat(ex.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(ex.getMessage(), equalTo("Datafeed [empty_agg_map] aggregations are not parsable")); - } - public static String randomValidDatafeedId() { CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray()); return generator.ofCodePointsLength(random(), 10, 10); @@ -884,18 +829,16 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase query = new HashMap<>(); - if (instance.getQuery() != null) { - query.put("must", instance.getQuery()); + BoolQueryBuilder query = new BoolQueryBuilder(); + if (instance.getParsedQuery(xContentRegistry()) != null) { + query.must(instance.getParsedQuery(xContentRegistry())); } - query.put("filter", Collections.singletonList( - Collections.singletonMap(TermQueryBuilder.NAME, - Collections.singletonMap(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))))); - builder.setQuery(query); + query.filter(new TermQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); + builder.setParsedQuery(query); break; case 6: if (instance.hasAggregations()) { - builder.setAggregations(null); + builder.setAggProvider(null); } else { AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder(); String timeField = randomAlphaOfLength(10); @@ -912,7 +855,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase scriptFields = new ArrayList<>(instance.getScriptFields()); scriptFields.add(new ScriptField(randomAlphaOfLengthBetween(1, 10), new Script("foo"), true)); builder.setScriptFields(scriptFields); - builder.setAggregations(null); + builder.setAggProvider(null); break; case 8: builder.setScrollSize(instance.getScrollSize() + between(1, 100)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java index 96798b251d3..62436172d92 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java @@ -5,9 +5,12 @@ */ package org.elasticsearch.xpack.core.ml.datafeed; -import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -17,23 +20,34 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.PipelineAggregatorBuilders; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig.Mode; import org.elasticsearch.xpack.core.ml.job.config.JobTests; +import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer; import java.io.IOException; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.List; -import java.util.Map; +import static org.elasticsearch.xpack.core.ml.datafeed.AggProviderTests.createRandomValidAggProvider; +import static org.elasticsearch.xpack.core.ml.datafeed.QueryProviderTests.createRandomValidQueryProvider; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; @@ -64,8 +78,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase DatafeedUpdate.PARSER.apply(parser, null)); assertThat(ex.getMessage(), equalTo("[32:3] [datafeed_update] failed to parse field [aggs]")); @@ -192,13 +204,13 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase maxTime = Collections.singletonMap("time", - Collections.singletonMap("max", Collections.singletonMap("field", "time"))); - Map histoDefinition = new HashMap<>(); - histoDefinition.put("interval", 300000); - histoDefinition.put("field", "time"); - Map aggBody = new HashMap<>(); - aggBody.put("histogram", histoDefinition); - aggBody.put("aggs", maxTime); - Map aggMap = Collections.singletonMap("a", aggBody); - update.setAggregations(aggMap); + MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); + AggProvider aggProvider = AggProvider.fromParsedAggs(new AggregatorFactories.Builder().addAggregator( + AggregationBuilders.histogram("a").interval(300000).field("time").subAggregation(maxTime))); + update.setAggregations(aggProvider); + DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap()); assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_1"))); - assertThat(updatedDatafeed.getAggregations(), equalTo(aggMap)); + assertThat(updatedDatafeed.getParsedAggregations(xContentRegistry()), equalTo(aggProvider.getParsedAggs())); + assertThat(updatedDatafeed.getAggregations(), equalTo(aggProvider.getAggs())); } public void testApply_GivenRandomUpdates_AssertImmutability() { @@ -249,7 +256,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase builder.setQuery(Collections.emptyMap())); - assertThat(ex.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(ex.getMessage(), equalTo("Datafeed [empty_query_map] query is not parsable")); - } + public void testSerializationOfComplexAggsBetweenVersions() throws IOException { + MaxAggregationBuilder maxTime = AggregationBuilders.max("timestamp").field("timestamp"); + AvgAggregationBuilder avgAggregationBuilder = AggregationBuilders.avg("bytes_in_avg").field("system.network.in.bytes"); + DerivativePipelineAggregationBuilder derivativePipelineAggregationBuilder = + PipelineAggregatorBuilders.derivative("bytes_in_derivative", "bytes_in_avg"); + BucketScriptPipelineAggregationBuilder bucketScriptPipelineAggregationBuilder = + PipelineAggregatorBuilders.bucketScript("non_negative_bytes", + Collections.singletonMap("bytes", "bytes_in_derivative"), + new Script("params.bytes > 0 ? params.bytes : null")); + DateHistogramAggregationBuilder dateHistogram = + AggregationBuilders.dateHistogram("histogram_buckets") + .field("timestamp").interval(300000).timeZone(ZoneOffset.UTC) + .subAggregation(maxTime) + .subAggregation(avgAggregationBuilder) + .subAggregation(derivativePipelineAggregationBuilder) + .subAggregation(bucketScriptPipelineAggregationBuilder); + AggregatorFactories.Builder aggs = new AggregatorFactories.Builder().addAggregator(dateHistogram); + DatafeedUpdate.Builder datafeedUpdateBuilder = new DatafeedUpdate.Builder("df-update-past-serialization-test"); + datafeedUpdateBuilder.setAggregations(new AggProvider( + XContentObjectTransformer.aggregatorTransformer(xContentRegistry()).toMap(aggs), + aggs, + null)); + // So equality check between the streamed and current passes + // Streamed DatafeedConfigs when they are before 6.6.0 require a parsed object for aggs and queries, consequently all the default + // values are added between them + datafeedUpdateBuilder.setQuery( + QueryProvider + .fromParsedQuery(QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))))); + DatafeedUpdate datafeedUpdate = datafeedUpdateBuilder.build(); - public void testEmptyAggMap() { - DatafeedUpdate.Builder builder = new DatafeedUpdate.Builder("empty_agg_map"); - ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, - () -> builder.setAggregations(Collections.emptyMap())); - assertThat(ex.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(ex.getMessage(), equalTo("Datafeed [empty_agg_map] aggregations are not parsable")); + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables()); + + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setVersion(Version.V_6_0_0); + datafeedUpdate.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { + in.setVersion(Version.V_6_0_0); + DatafeedUpdate streamedDatafeedUpdate = new DatafeedUpdate(in); + assertEquals(datafeedUpdate, streamedDatafeedUpdate); + + // Assert that the parsed versions of our aggs and queries work as well + assertEquals(aggs, streamedDatafeedUpdate.getParsedAgg(xContentRegistry())); + assertEquals(datafeedUpdate.getParsedQuery(xContentRegistry()), streamedDatafeedUpdate.getParsedQuery(xContentRegistry())); + } + } } @Override - protected DatafeedUpdate mutateInstance(DatafeedUpdate instance) { + protected DatafeedUpdate mutateInstance(DatafeedUpdate instance) throws IOException { DatafeedUpdate.Builder builder = new DatafeedUpdate.Builder(instance); switch (between(0, 9)) { case 0: @@ -314,31 +354,22 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase boolQuery = new HashMap<>(); + BoolQueryBuilder query = new BoolQueryBuilder(); if (instance.getQuery() != null) { - boolQuery.put("must", instance.getQuery()); + query.must(instance.getParsedQuery(xContentRegistry())); } - boolQuery.put("filter", - Collections.singletonList( - Collections.singletonMap(TermQueryBuilder.NAME, - Collections.singletonMap(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))))); - builder.setQuery(Collections.singletonMap("bool", boolQuery)); + query.filter(new TermQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); + builder.setQuery(QueryProvider.fromParsedQuery(query)); break; case 6: if (instance.hasAggregations()) { builder.setAggregations(null); } else { + AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder(); String timeField = randomAlphaOfLength(10); - Map maxTime = Collections.singletonMap(timeField, - Collections.singletonMap("max", Collections.singletonMap("field", timeField))); - Map histoDefinition = new HashMap<>(); - histoDefinition.put("interval", between(10000, 3600000)); - histoDefinition.put("field", timeField); - Map aggBody = new HashMap<>(); - aggBody.put("aggs", maxTime); - aggBody.put("date_histogram", histoDefinition); - Map aggMap = Collections.singletonMap(timeField, aggBody); - builder.setAggregations(aggMap); + aggBuilder.addAggregator(new DateHistogramAggregationBuilder(timeField).field(timeField).interval(between(10000, 3600000)) + .subAggregation(new MaxAggregationBuilder(timeField).field(timeField))); + builder.setAggregations(AggProvider.fromParsedAggs(aggBuilder)); if (instance.getScriptFields().isEmpty() == false) { builder.setScriptFields(Collections.emptyList()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProviderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProviderTests.java new file mode 100644 index 00000000000..fb6c2e280d9 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProviderTests.java @@ -0,0 +1,185 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.datafeed; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + + +public class QueryProviderTests extends AbstractSerializingTestCase { + + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + + @Override + protected NamedWriteableRegistry writableRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedWriteableRegistry(searchModule.getNamedWriteables()); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return writableRegistry(); + } + + @Override + protected QueryProvider createTestInstance() { + return createRandomValidQueryProvider(); + } + + @Override + protected Writeable.Reader instanceReader() { + return QueryProvider::fromStream; + } + + @Override + protected QueryProvider doParseInstance(XContentParser parser) throws IOException { + return QueryProvider.fromXContent(parser, false); + } + + public static QueryProvider createRandomValidQueryProvider() { + return createRandomValidQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)); + } + + public static QueryProvider createRandomValidQueryProvider(String field, String value) { + Map terms = Collections.singletonMap(BoolQueryBuilder.NAME, + Collections.singletonMap("filter", + Collections.singletonList( + Collections.singletonMap(TermQueryBuilder.NAME, + Collections.singletonMap(field, value))))); + return new QueryProvider( + terms, + QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(field, value)), + null); + } + + public void testEmptyQueryMap() throws IOException { + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, "{}"); + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, + () -> QueryProvider.fromXContent(parser, false)); + assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); + assertThat(e.getMessage(), equalTo("Datafeed query is not parsable")); + } + + public void testSerializationBetweenBugVersion() throws IOException { + QueryProvider tempQueryProvider = createRandomValidQueryProvider(); + QueryProvider queryProviderWithEx = new QueryProvider(tempQueryProvider.getQuery(), + tempQueryProvider.getParsedQuery(), + new IOException("ex")); + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setVersion(Version.V_6_6_2); + queryProviderWithEx.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry())) { + in.setVersion(Version.V_6_6_2); + QueryProvider streamedQueryProvider = QueryProvider.fromStream(in); + assertThat(streamedQueryProvider.getQuery(), equalTo(queryProviderWithEx.getQuery())); + assertThat(streamedQueryProvider.getParsingException(), is(nullValue())); + + QueryBuilder streamedParsedQuery = XContentObjectTransformer.queryBuilderTransformer(xContentRegistry()) + .fromMap(streamedQueryProvider.getQuery()); + assertThat(streamedParsedQuery, equalTo(queryProviderWithEx.getParsedQuery())); + assertThat(streamedQueryProvider.getParsedQuery(), is(nullValue())); + } + } + } + + public void testSerializationBetweenEagerVersion() throws IOException { + QueryProvider validQueryProvider = createRandomValidQueryProvider(); + + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setVersion(Version.V_6_0_0); + validQueryProvider.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry())) { + in.setVersion(Version.V_6_0_0); + + QueryProvider streamedQueryProvider = QueryProvider.fromStream(in); + XContentObjectTransformer transformer = XContentObjectTransformer.queryBuilderTransformer(xContentRegistry()); + Map sourceQueryMapWithDefaults = transformer.toMap(transformer.fromMap(validQueryProvider.getQuery())); + + assertThat(streamedQueryProvider.getQuery(), equalTo(sourceQueryMapWithDefaults)); + assertThat(streamedQueryProvider.getParsingException(), is(nullValue())); + assertThat(streamedQueryProvider.getParsedQuery(), equalTo(validQueryProvider.getParsedQuery())); + } + } + + try (BytesStreamOutput output = new BytesStreamOutput()) { + QueryProvider queryProviderWithEx = new QueryProvider(validQueryProvider.getQuery(), + validQueryProvider.getParsedQuery(), + new IOException("bad parsing")); + output.setVersion(Version.V_6_0_0); + IOException ex = expectThrows(IOException.class, () -> queryProviderWithEx.writeTo(output)); + assertThat(ex.getMessage(), equalTo("bad parsing")); + } + + try (BytesStreamOutput output = new BytesStreamOutput()) { + QueryProvider queryProviderWithEx = new QueryProvider(validQueryProvider.getQuery(), + validQueryProvider.getParsedQuery(), + new ElasticsearchException("bad parsing")); + output.setVersion(Version.V_6_0_0); + ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> queryProviderWithEx.writeTo(output)); + assertNotNull(ex.getCause()); + assertThat(ex.getCause().getMessage(), equalTo("bad parsing")); + } + + try (BytesStreamOutput output = new BytesStreamOutput()) { + QueryProvider queryProviderWithOutParsed = new QueryProvider(validQueryProvider.getQuery(), null, null); + output.setVersion(Version.V_6_0_0); + ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> queryProviderWithOutParsed.writeTo(output)); + assertThat(ex.getMessage(), equalTo("Unsupported operation: parsed query is null")); + } + } + + @Override + protected QueryProvider mutateInstance(QueryProvider instance) throws IOException { + Exception parsingException = instance.getParsingException(); + QueryBuilder parsedQuery = instance.getParsedQuery(); + switch (between(0, 1)) { + case 0: + parsingException = parsingException == null ? new IOException("failed parsing") : null; + break; + case 1: + parsedQuery = parsedQuery == null ? + XContentObjectTransformer.queryBuilderTransformer(xContentRegistry()).fromMap(instance.getQuery()) : + null; + break; + default: + throw new AssertionError("Illegal randomisation branch"); + } + return new QueryProvider(instance.getQuery(), parsedQuery, parsingException); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformerTests.java index 1f61168c420..2e3a6e056ae 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformerTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.utils; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentFactory; @@ -16,6 +17,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; @@ -37,12 +39,19 @@ import static org.hamcrest.Matchers.hasSize; public class XContentObjectTransformerTests extends ESTestCase { + @Override + public NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + public void testFromMap() throws IOException { Map aggMap = Collections.singletonMap("fieldName", Collections.singletonMap("max", Collections.singletonMap("field", "fieldName"))); - XContentObjectTransformer aggTransformer = XContentObjectTransformer.aggregatorTransformer(); + XContentObjectTransformer aggTransformer = + XContentObjectTransformer.aggregatorTransformer(xContentRegistry()); assertXContentAreEqual(aggTransformer.fromMap(aggMap), aggMap); assertXContentAreEqual(aggTransformer.fromMap(aggMap), aggTransformer.toMap(aggTransformer.fromMap(aggMap))); @@ -60,7 +69,8 @@ public class XContentObjectTransformerTests extends ESTestCase { put("boost",1.0); }})); - XContentObjectTransformer queryBuilderTransformer = XContentObjectTransformer.queryBuilderTransformer(); + XContentObjectTransformer queryBuilderTransformer = + XContentObjectTransformer.queryBuilderTransformer(xContentRegistry()); assertXContentAreEqual(queryBuilderTransformer.fromMap(queryMap), queryMap); assertXContentAreEqual(queryBuilderTransformer.fromMap(queryMap), queryBuilderTransformer.toMap(queryBuilderTransformer.fromMap(queryMap))); @@ -73,7 +83,8 @@ public class XContentObjectTransformerTests extends ESTestCase { put("type", "phrase"); //phrase stopped being supported for match in 6.x }})); - XContentObjectTransformer queryBuilderTransformer = XContentObjectTransformer.queryBuilderTransformer(); + XContentObjectTransformer queryBuilderTransformer = + XContentObjectTransformer.queryBuilderTransformer(xContentRegistry()); ParsingException exception = expectThrows(ParsingException.class, () -> queryBuilderTransformer.fromMap(queryMap)); @@ -85,14 +96,17 @@ public class XContentObjectTransformerTests extends ESTestCase { put("field", "myField"); }})); - XContentObjectTransformer aggTransformer = XContentObjectTransformer.aggregatorTransformer(); + XContentObjectTransformer aggTransformer = + XContentObjectTransformer.aggregatorTransformer(xContentRegistry()); XContentParseException xContentParseException = expectThrows(XContentParseException.class, () -> aggTransformer.fromMap(aggMap)); assertThat(xContentParseException.getMessage(), containsString("[terms] failed to parse field [size]")); } public void testToMap() throws IOException { - XContentObjectTransformer aggTransformer = XContentObjectTransformer.aggregatorTransformer(); - XContentObjectTransformer queryBuilderTransformer = XContentObjectTransformer.queryBuilderTransformer(); + XContentObjectTransformer aggTransformer = + XContentObjectTransformer.aggregatorTransformer(xContentRegistry()); + XContentObjectTransformer queryBuilderTransformer = + XContentObjectTransformer.queryBuilderTransformer(xContentRegistry()); AggregatorFactories.Builder aggs = new AggregatorFactories.Builder(); long aggHistogramInterval = randomNonNegativeLong(); diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java index 5b980d851fc..e1711a84529 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -45,7 +46,7 @@ public class DeprecationChecks { Collections.unmodifiableList(Arrays.asList( IndexDeprecationChecks::oldIndicesCheck)); - static List> ML_SETTINGS_CHECKS = + static List> ML_SETTINGS_CHECKS = Collections.unmodifiableList(Arrays.asList( MlDeprecationChecks::checkDataFeedAggregations, MlDeprecationChecks::checkDataFeedQuery diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecks.java index 187a8669574..ebcf160baee 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecks.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.deprecation; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -18,8 +19,8 @@ final class MlDeprecationChecks { private MlDeprecationChecks() { } - static DeprecationIssue checkDataFeedQuery(DatafeedConfig datafeedConfig) { - List deprecations = datafeedConfig.getQueryDeprecations(); + static DeprecationIssue checkDataFeedQuery(DatafeedConfig datafeedConfig, NamedXContentRegistry xContentRegistry) { + List deprecations = datafeedConfig.getQueryDeprecations(xContentRegistry); if (deprecations.isEmpty()) { return null; } else { @@ -30,8 +31,8 @@ final class MlDeprecationChecks { } } - static DeprecationIssue checkDataFeedAggregations(DatafeedConfig datafeedConfig) { - List deprecations = datafeedConfig.getAggDeprecations(); + static DeprecationIssue checkDataFeedAggregations(DatafeedConfig datafeedConfig, NamedXContentRegistry xContentRegistry) { + List deprecations = datafeedConfig.getAggDeprecations(xContentRegistry); if (deprecations.isEmpty()) { return null; } else { diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java index bac290d41a5..6556a8ad0ef 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; @@ -48,18 +49,20 @@ public class TransportDeprecationInfoAction extends TransportMasterNodeReadActio private final NodeClient client; private final IndexNameExpressionResolver indexNameExpressionResolver; private final Settings settings; + private final NamedXContentRegistry xContentRegistry; @Inject public TransportDeprecationInfoAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - XPackLicenseState licenseState, NodeClient client) { + XPackLicenseState licenseState, NodeClient client, NamedXContentRegistry xContentRegistry) { super(DeprecationInfoAction.NAME, transportService, clusterService, threadPool, actionFilters, DeprecationInfoAction.Request::new, indexNameExpressionResolver); this.licenseState = licenseState; this.client = client; this.indexNameExpressionResolver = indexNameExpressionResolver; this.settings = settings; + this.xContentRegistry = xContentRegistry; } @Override @@ -99,7 +102,7 @@ public class TransportDeprecationInfoAction extends TransportMasterNodeReadActio getDatafeedConfigs(ActionListener.wrap( datafeeds -> { listener.onResponse( - DeprecationInfoAction.Response.from(state, indexNameExpressionResolver, + DeprecationInfoAction.Response.from(state, xContentRegistry, indexNameExpressionResolver, request.indices(), request.indicesOptions(), datafeeds, response, INDEX_SETTINGS_CHECKS, CLUSTER_SETTINGS_CHECKS, ML_SETTINGS_CHECKS)); diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecksTests.java index bf868c86bae..8d98cc2adbe 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecksTests.java @@ -6,7 +6,10 @@ package org.elasticsearch.xpack.deprecation; -import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -14,6 +17,12 @@ import java.util.Collections; public class MlDeprecationChecksTests extends ESTestCase { + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + @Override protected boolean enableWarningsCheck() { return false; @@ -22,8 +31,8 @@ public class MlDeprecationChecksTests extends ESTestCase { public void testCheckDataFeedQuery() { DatafeedConfig.Builder goodDatafeed = new DatafeedConfig.Builder("good-df", "job-id"); goodDatafeed.setIndices(Collections.singletonList("some-index")); - goodDatafeed.setQuery(Collections.singletonMap(TermQueryBuilder.NAME, Collections.singletonMap("foo", "bar"))); - assertNull(MlDeprecationChecks.checkDataFeedQuery(goodDatafeed.build())); + goodDatafeed.setParsedQuery(QueryBuilders.termQuery("foo", "bar")); + assertNull(MlDeprecationChecks.checkDataFeedQuery(goodDatafeed.build(), xContentRegistry())); DatafeedConfig.Builder deprecatedDatafeed = new DatafeedConfig.Builder("df-with-deprecated-query", "job-id"); deprecatedDatafeed.setIndices(Collections.singletonList("some-index")); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DelayedDataDetectorIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DelayedDataDetectorIT.java index aa25cb06193..8243c2e61e6 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DelayedDataDetectorIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DelayedDataDetectorIT.java @@ -12,7 +12,7 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; @@ -159,9 +159,7 @@ public class DelayedDataDetectorIT extends MlNativeAutodetectIntegTestCase { .subAggregation(avgAggregationBuilder) .field("time") .interval(TimeValue.timeValueMinutes(5).millis()))); - datafeedConfigBuilder.setQuery(Collections.singletonMap(RangeQueryBuilder.NAME, - Collections.singletonMap("value", - Collections.singletonMap(RangeQueryBuilder.GTE_FIELD.getPreferredName(), numDocs/2)))); + datafeedConfigBuilder.setParsedQuery(QueryBuilders.rangeQuery("value").gte(numDocs/2)); datafeedConfigBuilder.setFrequency(TimeValue.timeValueMinutes(5)); datafeedConfigBuilder.setDelayedDataCheckConfig(DelayedDataCheckConfig.enabledDelayedDataCheckConfig(TimeValue.timeValueHours(12))); @@ -253,6 +251,6 @@ public class DelayedDataDetectorIT extends MlNativeAutodetectIntegTestCase { } private DelayedDataDetector newDetector(Job job, DatafeedConfig datafeedConfig) { - return DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, client()); + return DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, client(), xContentRegistry()); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java index f844d813cb5..89b2ec81f87 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.reindex.ReindexPlugin; import org.elasticsearch.persistent.PersistentTaskParams; import org.elasticsearch.persistent.PersistentTaskState; @@ -53,6 +54,12 @@ import static org.elasticsearch.test.XContentTestUtils.differenceBetweenMapsIgno */ abstract class MlNativeIntegTestCase extends ESIntegTestCase { + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + @Override protected Collection> nodePlugins() { return Arrays.asList(LocalStateCompositeXPackPlugin.class, Netty4Plugin.class); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 4cc5857bad6..3d59a5fb45a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -399,10 +399,18 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu Auditor auditor = new Auditor(client, clusterService.getNodeName()); JobResultsProvider jobResultsProvider = new JobResultsProvider(client, settings); - JobConfigProvider jobConfigProvider = new JobConfigProvider(client); + JobConfigProvider jobConfigProvider = new JobConfigProvider(client, xContentRegistry); DatafeedConfigProvider datafeedConfigProvider = new DatafeedConfigProvider(client, xContentRegistry); UpdateJobProcessNotifier notifier = new UpdateJobProcessNotifier(client, clusterService, threadPool); - JobManager jobManager = new JobManager(env, settings, jobResultsProvider, clusterService, auditor, threadPool, client, notifier); + JobManager jobManager = new JobManager(env, + settings, + jobResultsProvider, + clusterService, + auditor, + threadPool, + client, + notifier, + xContentRegistry); // special holder for @link(MachineLearningFeatureSetUsage) which needs access to job manager if ML is enabled JobManagerHolder jobManagerHolder = new JobManagerHolder(jobManager); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java index c87fabe0b77..89ad54e9c18 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -38,17 +39,19 @@ public class TransportPreviewDatafeedAction extends HandledTransportAction) PreviewDatafeedAction.Request::new); this.threadPool = threadPool; this.client = client; this.jobConfigProvider = jobConfigProvider; this.datafeedConfigProvider = datafeedConfigProvider; + this.xContentRegistry = xContentRegistry; } @Override @@ -67,7 +70,7 @@ public class TransportPreviewDatafeedAction extends HandledTransportAction() { @Override public void onResponse(DataExtractorFactory dataExtractorFactory) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java index c5a8fee50a5..993fe548cc9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java @@ -62,6 +62,7 @@ public class TransportPutDatafeedAction extends TransportMasterNodeAction validationOk = ok -> { datafeedConfigProvider.putDatafeedConfig(request.getDatafeed(), headers, ActionListener.wrap( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java index f81fcfbfb1d..1a0d1b84c87 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.RemoteClusterLicenseChecker; import org.elasticsearch.license.XPackLicenseState; @@ -78,6 +79,7 @@ public class TransportStartDatafeedAction extends TransportMasterNodeAction deprecationWarnings = new ArrayList<>(); - deprecationWarnings.addAll(datafeed.getAggDeprecations()); - deprecationWarnings.addAll(datafeed.getQueryDeprecations()); + deprecationWarnings.addAll(datafeed.getAggDeprecations(xContentRegistry)); + deprecationWarnings.addAll(datafeed.getQueryDeprecations(xContentRegistry)); if (deprecationWarnings.isEmpty() == false) { String msg = "datafeed [" + datafeed.getId() +"] configuration has deprecations. [" + Strings.collectionToDelimitedString(deprecationWarnings, ", ") + "]"; @@ -200,8 +206,8 @@ public class TransportStartDatafeedAction extends TransportMasterNodeAction { try { Job job = jobBuilder.build(); - validate(job, datafeedConfigHolder.get(), tasks); - auditDeprecations(datafeedConfigHolder.get(), job, auditor); + validate(job, datafeedConfigHolder.get(), tasks, xContentRegistry); + auditDeprecations(datafeedConfigHolder.get(), job, auditor, xContentRegistry); createDataExtrator.accept(job); } catch (Exception e) { listener.onFailure(e); @@ -231,7 +237,7 @@ public class TransportStartDatafeedAction extends TransportMasterNodeAction> listener) { - DataExtractorFactory.create(client, datafeed, job, ActionListener.wrap( + DataExtractorFactory.create(client, datafeed, job, xContentRegistry, ActionListener.wrap( dataExtractorFactory -> persistentTasksService.sendStartRequest(MlTasks.datafeedTaskId(params.getDatafeedId()), MlTasks.DATAFEED_TASK_NAME, params, listener) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java index 09a8f219afc..b7029eff79c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java @@ -49,7 +49,7 @@ public class TransportUpdateDatafeedAction extends TransportMasterNodeAction listener) { JobResultsProvider jobResultsProvider = new JobResultsProvider(client, settings); - JobConfigProvider jobConfigProvider = new JobConfigProvider(client); + JobConfigProvider jobConfigProvider = new JobConfigProvider(client, xContentRegistry); DatafeedConfigProvider datafeedConfigProvider = new DatafeedConfigProvider(client, xContentRegistry); build(datafeedId, jobResultsProvider, jobConfigProvider, datafeedConfigProvider, listener); @@ -72,10 +72,10 @@ public class DatafeedJobBuilder { // Step 5. Build datafeed job object Consumer contextHanlder = context -> { - TimeValue frequency = getFrequencyOrDefault(datafeedConfigHolder.get(), jobHolder.get()); + TimeValue frequency = getFrequencyOrDefault(datafeedConfigHolder.get(), jobHolder.get(), xContentRegistry); TimeValue queryDelay = datafeedConfigHolder.get().getQueryDelay(); DelayedDataDetector delayedDataDetector = - DelayedDataDetectorFactory.buildDetector(jobHolder.get(), datafeedConfigHolder.get(), client); + DelayedDataDetectorFactory.buildDetector(jobHolder.get(), datafeedConfigHolder.get(), client, xContentRegistry); DatafeedJob datafeedJob = new DatafeedJob(jobHolder.get().getId(), buildDataDescription(jobHolder.get()), frequency.millis(), queryDelay.millis(), context.dataExtractorFactory, client, auditor, currentTimeSupplier, delayedDataDetector, @@ -102,7 +102,7 @@ public class DatafeedJobBuilder { if (dataCounts.getLatestRecordTimeStamp() != null) { context.latestRecordTimeMs = dataCounts.getLatestRecordTimeStamp().getTime(); } - DataExtractorFactory.create(client, datafeedConfigHolder.get(), jobHolder.get(), dataExtractorFactoryHandler); + DataExtractorFactory.create(client, datafeedConfigHolder.get(), jobHolder.get(), xContentRegistry, dataExtractorFactoryHandler); }; // Collect data counts @@ -137,7 +137,7 @@ public class DatafeedJobBuilder { jobBuilder -> { try { jobHolder.set(jobBuilder.build()); - DatafeedJobValidator.validate(datafeedConfigHolder.get(), jobHolder.get()); + DatafeedJobValidator.validate(datafeedConfigHolder.get(), jobHolder.get(), xContentRegistry); jobIdConsumer.accept(jobHolder.get().getId()); } catch (Exception e) { listener.onFailure(e); @@ -162,11 +162,11 @@ public class DatafeedJobBuilder { datafeedConfigProvider.getDatafeedConfig(datafeedId, datafeedConfigListener); } - private static TimeValue getFrequencyOrDefault(DatafeedConfig datafeed, Job job) { + private static TimeValue getFrequencyOrDefault(DatafeedConfig datafeed, Job job, NamedXContentRegistry xContentRegistry) { TimeValue frequency = datafeed.getFrequency(); if (frequency == null) { TimeValue bucketSpan = job.getAnalysisConfig().getBucketSpan(); - return datafeed.defaultFrequency(bucketSpan); + return datafeed.defaultFrequency(bucketSpan, xContentRegistry); } return frequency; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DelayedDataDetectorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DelayedDataDetectorFactory.java index 37f439df7c2..88f8e6caadf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DelayedDataDetectorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DelayedDataDetectorFactory.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.datafeed.delayeddatacheck; import org.elasticsearch.client.Client; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DelayedDataCheckConfig; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -33,9 +34,13 @@ public class DelayedDataDetectorFactory { * @param job The {@link Job} object for the given `datafeedConfig` * @param datafeedConfig The {@link DatafeedConfig} for which to create the {@link DelayedDataDetector} * @param client The {@link Client} capable of taking action against the ES Cluster. + * @param xContentRegistry The current NamedXContentRegistry with which to parse the query * @return A new {@link DelayedDataDetector} */ - public static DelayedDataDetector buildDetector(Job job, DatafeedConfig datafeedConfig, Client client) { + public static DelayedDataDetector buildDetector(Job job, + DatafeedConfig datafeedConfig, + Client client, + NamedXContentRegistry xContentRegistry) { if (datafeedConfig.getDelayedDataCheckConfig().isEnabled()) { long window = validateAndCalculateWindowLength(job.getAnalysisConfig().getBucketSpan(), datafeedConfig.getDelayedDataCheckConfig().getCheckWindow()); @@ -44,7 +49,7 @@ public class DelayedDataDetectorFactory { window, job.getId(), job.getDataDescription().getTimeField(), - datafeedConfig.getParsedQuery(), + datafeedConfig.getParsedQuery(xContentRegistry), datafeedConfig.getIndices().toArray(new String[0]), client); } else { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java index 77e2c695db7..bca57f7155a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.datafeed.extractor; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -25,10 +26,14 @@ public interface DataExtractorFactory { /** * Creates a {@code DataExtractorFactory} for the given datafeed-job combination. */ - static void create(Client client, DatafeedConfig datafeed, Job job, ActionListener listener) { + static void create(Client client, + DatafeedConfig datafeed, + Job job, + NamedXContentRegistry xContentRegistry, + ActionListener listener) { ActionListener factoryHandler = ActionListener.wrap( factory -> listener.onResponse(datafeed.getChunkingConfig().isEnabled() - ? new ChunkedDataExtractorFactory(client, datafeed, job, factory) : factory) + ? new ChunkedDataExtractorFactory(client, datafeed, job, xContentRegistry, factory) : factory) , listener::onFailure ); @@ -36,13 +41,13 @@ public interface DataExtractorFactory { response -> { if (response.getJobs().isEmpty()) { // This means no rollup indexes are in the config if (datafeed.hasAggregations()) { - factoryHandler.onResponse(new AggregationDataExtractorFactory(client, datafeed, job)); + factoryHandler.onResponse(new AggregationDataExtractorFactory(client, datafeed, job, xContentRegistry)); } else { - ScrollDataExtractorFactory.create(client, datafeed, job, factoryHandler); + ScrollDataExtractorFactory.create(client, datafeed, job, xContentRegistry, factoryHandler); } } else { if (datafeed.hasAggregations()) { // Rollup indexes require aggregations - RollupDataExtractorFactory.create(client, datafeed, job, response.getJobs(), factoryHandler); + RollupDataExtractorFactory.create(client, datafeed, job, response.getJobs(), xContentRegistry, factoryHandler); } else { listener.onFailure(new IllegalArgumentException("Aggregations are required when using Rollup indices")); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java index 12c4a47228f..de205b276a0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation; import org.elasticsearch.client.Client; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; @@ -19,23 +20,25 @@ public class AggregationDataExtractorFactory implements DataExtractorFactory { private final Client client; private final DatafeedConfig datafeedConfig; private final Job job; + private final NamedXContentRegistry xContentRegistry; - public AggregationDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job) { + public AggregationDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job, NamedXContentRegistry xContentRegistry) { this.client = Objects.requireNonNull(client); this.datafeedConfig = Objects.requireNonNull(datafeedConfig); this.job = Objects.requireNonNull(job); + this.xContentRegistry = xContentRegistry; } @Override public DataExtractor newExtractor(long start, long end) { - long histogramInterval = datafeedConfig.getHistogramIntervalMillis(); + long histogramInterval = datafeedConfig.getHistogramIntervalMillis(xContentRegistry); AggregationDataExtractorContext dataExtractorContext = new AggregationDataExtractorContext( job.getId(), job.getDataDescription().getTimeField(), job.getAnalysisConfig().analysisFields(), datafeedConfig.getIndices(), - datafeedConfig.getParsedQuery(), - datafeedConfig.getParsedAggregations(), + datafeedConfig.getParsedQuery(xContentRegistry), + datafeedConfig.getParsedAggregations(xContentRegistry), Intervals.alignToCeil(start, histogramInterval), Intervals.alignToFloor(end, histogramInterval), job.getAnalysisConfig().getSummaryCountFieldName().equals(DatafeedConfig.DOC_COUNT), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java index d5290611ab0..4971ad83879 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; @@ -41,23 +42,25 @@ public class RollupDataExtractorFactory implements DataExtractorFactory { private final Client client; private final DatafeedConfig datafeedConfig; private final Job job; + private final NamedXContentRegistry xContentRegistry; - private RollupDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job) { + private RollupDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job, NamedXContentRegistry xContentRegistry) { this.client = Objects.requireNonNull(client); this.datafeedConfig = Objects.requireNonNull(datafeedConfig); this.job = Objects.requireNonNull(job); + this.xContentRegistry = xContentRegistry; } @Override public DataExtractor newExtractor(long start, long end) { - long histogramInterval = datafeedConfig.getHistogramIntervalMillis(); + long histogramInterval = datafeedConfig.getHistogramIntervalMillis(xContentRegistry); AggregationDataExtractorContext dataExtractorContext = new AggregationDataExtractorContext( job.getId(), job.getDataDescription().getTimeField(), job.getAnalysisConfig().analysisFields(), datafeedConfig.getIndices(), - datafeedConfig.getParsedQuery(), - datafeedConfig.getParsedAggregations(), + datafeedConfig.getParsedQuery(xContentRegistry), + datafeedConfig.getParsedAggregations(xContentRegistry), Intervals.alignToCeil(start, histogramInterval), Intervals.alignToFloor(end, histogramInterval), job.getAnalysisConfig().getSummaryCountFieldName().equals(DatafeedConfig.DOC_COUNT), @@ -69,10 +72,11 @@ public class RollupDataExtractorFactory implements DataExtractorFactory { DatafeedConfig datafeed, Job job, Map rollupJobsWithCaps, + NamedXContentRegistry xContentRegistry, ActionListener listener) { final AggregationBuilder datafeedHistogramAggregation = getHistogramAggregation( - datafeed.getParsedAggregations().getAggregatorFactories()); + datafeed.getParsedAggregations(xContentRegistry).getAggregatorFactories()); if ((datafeedHistogramAggregation instanceof DateHistogramAggregationBuilder) == false) { listener.onFailure( new IllegalArgumentException("Rollup requires that the datafeed configuration use a [date_histogram] aggregation," + @@ -103,7 +107,8 @@ public class RollupDataExtractorFactory implements DataExtractorFactory { return; } final List flattenedAggs = new ArrayList<>(); - flattenAggregations(datafeed.getParsedAggregations().getAggregatorFactories(), datafeedHistogramAggregation, flattenedAggs); + flattenAggregations(datafeed.getParsedAggregations(xContentRegistry) + .getAggregatorFactories(), datafeedHistogramAggregation, flattenedAggs); if (validIntervalCaps.stream().noneMatch(rollupJobConfig -> hasAggregations(rollupJobConfig, flattenedAggs))) { listener.onFailure( @@ -112,7 +117,7 @@ public class RollupDataExtractorFactory implements DataExtractorFactory { return; } - listener.onResponse(new RollupDataExtractorFactory(client, datafeed, job)); + listener.onResponse(new RollupDataExtractorFactory(client, datafeed, job, xContentRegistry)); } private static boolean validInterval(long datafeedInterval, ParsedRollupCaps rollupJobGroupConfig) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java index 76a05e6b4d1..fb8da71faa3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.ml.datafeed.extractor.chunked; import org.elasticsearch.client.Client; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; @@ -20,12 +21,18 @@ public class ChunkedDataExtractorFactory implements DataExtractorFactory { private final DatafeedConfig datafeedConfig; private final Job job; private final DataExtractorFactory dataExtractorFactory; + private final NamedXContentRegistry xContentRegistry; - public ChunkedDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job, DataExtractorFactory dataExtractorFactory) { + public ChunkedDataExtractorFactory(Client client, + DatafeedConfig datafeedConfig, + Job job, + NamedXContentRegistry xContentRegistry, + DataExtractorFactory dataExtractorFactory) { this.client = Objects.requireNonNull(client); this.datafeedConfig = Objects.requireNonNull(datafeedConfig); this.job = Objects.requireNonNull(job); this.dataExtractorFactory = Objects.requireNonNull(dataExtractorFactory); + this.xContentRegistry = xContentRegistry; } @Override @@ -35,7 +42,7 @@ public class ChunkedDataExtractorFactory implements DataExtractorFactory { job.getId(), job.getDataDescription().getTimeField(), datafeedConfig.getIndices(), - datafeedConfig.getParsedQuery(), + datafeedConfig.getParsedQuery(xContentRegistry), datafeedConfig.getScrollSize(), timeAligner.alignToCeil(start), timeAligner.alignToFloor(end), @@ -43,7 +50,7 @@ public class ChunkedDataExtractorFactory implements DataExtractorFactory { timeAligner, datafeedConfig.getHeaders(), datafeedConfig.hasAggregations(), - datafeedConfig.hasAggregations() ? datafeedConfig.getHistogramIntervalMillis() : null + datafeedConfig.hasAggregations() ? datafeedConfig.getHistogramIntervalMillis(xContentRegistry) : null ); return new ChunkedDataExtractor(client, dataExtractorFactory, dataExtractorContext); } @@ -55,7 +62,7 @@ public class ChunkedDataExtractorFactory implements DataExtractorFactory { // the same bucket twice, we need to search buckets aligned to the histogram interval. // This allows us to steer away from partial buckets, and thus avoid the problem of // dropping or duplicating data. - return newIntervalTimeAligner(datafeedConfig.getHistogramIntervalMillis()); + return newIntervalTimeAligner(datafeedConfig.getHistogramIntervalMillis(xContentRegistry)); } return newIdentityTimeAligner(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java index 763d718bcc7..ab912f54fe2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -29,12 +30,15 @@ public class ScrollDataExtractorFactory implements DataExtractorFactory { private final DatafeedConfig datafeedConfig; private final Job job; private final TimeBasedExtractedFields extractedFields; + private final NamedXContentRegistry xContentRegistry; - private ScrollDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job, TimeBasedExtractedFields extractedFields) { + private ScrollDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job, TimeBasedExtractedFields extractedFields, + NamedXContentRegistry xContentRegistry) { this.client = Objects.requireNonNull(client); this.datafeedConfig = Objects.requireNonNull(datafeedConfig); this.job = Objects.requireNonNull(job); this.extractedFields = Objects.requireNonNull(extractedFields); + this.xContentRegistry = xContentRegistry; } @Override @@ -43,7 +47,7 @@ public class ScrollDataExtractorFactory implements DataExtractorFactory { job.getId(), extractedFields, datafeedConfig.getIndices(), - datafeedConfig.getParsedQuery(), + datafeedConfig.getParsedQuery(xContentRegistry), datafeedConfig.getScriptFields(), datafeedConfig.getScrollSize(), start, @@ -52,13 +56,17 @@ public class ScrollDataExtractorFactory implements DataExtractorFactory { return new ScrollDataExtractor(client, dataExtractorContext); } - public static void create(Client client, DatafeedConfig datafeed, Job job, ActionListener listener) { + public static void create(Client client, + DatafeedConfig datafeed, + Job job, + NamedXContentRegistry xContentRegistry, + ActionListener listener ) { // Step 2. Contruct the factory and notify listener ActionListener fieldCapabilitiesHandler = ActionListener.wrap( fieldCapabilitiesResponse -> { TimeBasedExtractedFields extractedFields = TimeBasedExtractedFields.build(job, datafeed, fieldCapabilitiesResponse); - listener.onResponse(new ScrollDataExtractorFactory(client, datafeed, job, extractedFields)); + listener.onResponse(new ScrollDataExtractorFactory(client, datafeed, job, extractedFields, xContentRegistry)); }, e -> { if (e instanceof IndexNotFoundException) { listener.onFailure(new ResourceNotFoundException("datafeed [" + datafeed.getId() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index ccd0d594eb3..929058739cd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -101,7 +102,7 @@ public class JobManager { */ public JobManager(Environment environment, Settings settings, JobResultsProvider jobResultsProvider, ClusterService clusterService, Auditor auditor, ThreadPool threadPool, - Client client, UpdateJobProcessNotifier updateJobProcessNotifier) { + Client client, UpdateJobProcessNotifier updateJobProcessNotifier, NamedXContentRegistry xContentRegistry) { this.environment = environment; this.jobResultsProvider = Objects.requireNonNull(jobResultsProvider); this.clusterService = Objects.requireNonNull(clusterService); @@ -109,7 +110,7 @@ public class JobManager { this.client = Objects.requireNonNull(client); this.threadPool = Objects.requireNonNull(threadPool); this.updateJobProcessNotifier = updateJobProcessNotifier; - this.jobConfigProvider = new JobConfigProvider(client); + this.jobConfigProvider = new JobConfigProvider(client, xContentRegistry); this.migrationEligibilityCheck = new MlConfigMigrationEligibilityCheck(settings, clusterService); maxModelMemoryLimit = MachineLearningField.MAX_MODEL_MEMORY_LIMIT.get(settings); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java index 1db25dd5f3b..5ce424a1581 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java @@ -102,9 +102,11 @@ public class JobConfigProvider { } private final Client client; + private final NamedXContentRegistry xContentRegistry; - public JobConfigProvider(Client client) { + public JobConfigProvider(Client client, NamedXContentRegistry xContentRegistry) { this.client = client; + this.xContentRegistry = xContentRegistry; } /** @@ -737,7 +739,7 @@ public class JobConfigProvider { getJob(config.getJobId(), ActionListener.wrap( jobBuilder -> { try { - DatafeedJobValidator.validate(config, jobBuilder.build()); + DatafeedJobValidator.validate(config, jobBuilder.build(), xContentRegistry); listener.onResponse(Boolean.TRUE); } catch (Exception e) { listener.onFailure(e); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlConfigMigrationEligibilityCheckTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlConfigMigrationEligibilityCheckTests.java index 4a70bcf02d3..4d07a93e101 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlConfigMigrationEligibilityCheckTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlConfigMigrationEligibilityCheckTests.java @@ -22,9 +22,11 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlTasks; @@ -52,6 +54,12 @@ public class MlConfigMigrationEligibilityCheckTests extends ESTestCase { clusterService = mock(ClusterService.class); } + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + public void testCanStartMigration_givenMigrationIsDisabled() { Settings settings = newSettings(false); givenClusterSettings(settings); @@ -327,7 +335,7 @@ public class MlConfigMigrationEligibilityCheckTests extends ESTestCase { public void testDatafeedIsEligibleForMigration_givenStartedDatafeed() { Job openJob = JobTests.buildJobBuilder("open-job").build(); MlMetadata.Builder mlMetadata = new MlMetadata.Builder().putJob(openJob, false); - mlMetadata.putDatafeed(createCompatibleDatafeed(openJob.getId()), Collections.emptyMap()); + mlMetadata.putDatafeed(createCompatibleDatafeed(openJob.getId()), Collections.emptyMap(), xContentRegistry()); String datafeedId = "df-" + openJob.getId(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -353,7 +361,7 @@ public class MlConfigMigrationEligibilityCheckTests extends ESTestCase { public void testDatafeedIsEligibleForMigration_givenStartedDatafeedAndMigrationIsDisabled() { Job openJob = JobTests.buildJobBuilder("open-job").build(); MlMetadata.Builder mlMetadata = new MlMetadata.Builder().putJob(openJob, false); - mlMetadata.putDatafeed(createCompatibleDatafeed(openJob.getId()), Collections.emptyMap()); + mlMetadata.putDatafeed(createCompatibleDatafeed(openJob.getId()), Collections.emptyMap(), xContentRegistry()); String datafeedId = "df-" + openJob.getId(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -379,7 +387,7 @@ public class MlConfigMigrationEligibilityCheckTests extends ESTestCase { public void testDatafeedIsEligibleForMigration_givenStoppedDatafeed() { Job job = JobTests.buildJobBuilder("closed-job").build(); MlMetadata.Builder mlMetadata = new MlMetadata.Builder().putJob(job, false); - mlMetadata.putDatafeed(createCompatibleDatafeed(job.getId()), Collections.emptyMap()); + mlMetadata.putDatafeed(createCompatibleDatafeed(job.getId()), Collections.emptyMap(), xContentRegistry()); String datafeedId = "df-" + job.getId(); MetaData.Builder metaData = MetaData.builder(); @@ -402,7 +410,7 @@ public class MlConfigMigrationEligibilityCheckTests extends ESTestCase { public void testDatafeedIsEligibleForMigration_givenUnallocatedDatafeed() { Job job = JobTests.buildJobBuilder("closed-job").build(); MlMetadata.Builder mlMetadata = new MlMetadata.Builder().putJob(job, false); - mlMetadata.putDatafeed(createCompatibleDatafeed(job.getId()), Collections.emptyMap()); + mlMetadata.putDatafeed(createCompatibleDatafeed(job.getId()), Collections.emptyMap(), xContentRegistry()); String datafeedId = "df-" + job.getId(); MetaData.Builder metaData = MetaData.builder(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlConfigMigratorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlConfigMigratorTests.java index 62c29efdff9..81d344fd1dd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlConfigMigratorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlConfigMigratorTests.java @@ -14,8 +14,11 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlTasks; @@ -47,6 +50,12 @@ import static org.mockito.Mockito.when; public class MlConfigMigratorTests extends ESTestCase { + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + public void testNonDeletingJobs() { Job job1 = JobTests.buildJobBuilder("openjob1").build(); Job job2 = JobTests.buildJobBuilder("openjob2").build(); @@ -64,7 +73,7 @@ public class MlConfigMigratorTests extends ESTestCase { .putJob(closedJob, false) .putJob(jobWithoutAllocation, false) .putJob(openJob, false) - .putDatafeed(createCompatibleDatafeed(closedJob.getId()), Collections.emptyMap()); + .putDatafeed(createCompatibleDatafeed(closedJob.getId()), Collections.emptyMap(), xContentRegistry()); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); tasksBuilder.addTask(MlTasks.jobTaskId("jobwithoutallocation"), MlTasks.JOB_TASK_NAME, @@ -103,9 +112,9 @@ public class MlConfigMigratorTests extends ESTestCase { .putJob(job1, false) .putJob(job2, false) .putJob(job3, false) - .putDatafeed(stopppedDatafeed, Collections.emptyMap()) - .putDatafeed(datafeedWithoutAllocation, Collections.emptyMap()) - .putDatafeed(startedDatafeed, Collections.emptyMap()); + .putDatafeed(stopppedDatafeed, Collections.emptyMap(), xContentRegistry()) + .putDatafeed(datafeedWithoutAllocation, Collections.emptyMap(), xContentRegistry()) + .putDatafeed(startedDatafeed, Collections.emptyMap(), xContentRegistry()); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); tasksBuilder.addTask(MlTasks.datafeedTaskId(stopppedDatafeed.getId()), MlTasks.DATAFEED_TASK_NAME, @@ -206,8 +215,8 @@ public class MlConfigMigratorTests extends ESTestCase { MlMetadata.Builder mlMetadata = new MlMetadata.Builder() .putJob(job1, false) .putJob(job2, false) - .putDatafeed(datafeedConfig1, Collections.emptyMap()) - .putDatafeed(datafeedConfig2, Collections.emptyMap()); + .putDatafeed(datafeedConfig1, Collections.emptyMap(), xContentRegistry()) + .putDatafeed(datafeedConfig2, Collections.emptyMap(), xContentRegistry()); MlConfigMigrator.RemovalResult removalResult = MlConfigMigrator.removeJobsAndDatafeeds( Arrays.asList(job1, job2), Arrays.asList(datafeedConfig1, datafeedConfig2), mlMetadata.build()); @@ -225,7 +234,7 @@ public class MlConfigMigratorTests extends ESTestCase { MlMetadata.Builder mlMetadata = new MlMetadata.Builder() .putJob(job1, false) .putJob(job2, false) - .putDatafeed(datafeedConfig1, Collections.emptyMap()); + .putDatafeed(datafeedConfig1, Collections.emptyMap(), xContentRegistry()); MlConfigMigrator.RemovalResult removalResult = MlConfigMigrator.removeJobsAndDatafeeds( Arrays.asList(job1, JobTests.buildJobBuilder("job-none").build()), diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java index 637b1089d9e..0e6df7db57a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java @@ -50,7 +50,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { } job = new Job.Builder(job).setAnalysisConfig(analysisConfig).build(); builder.putJob(job, false); - builder.putDatafeed(datafeedConfig, Collections.emptyMap()); + builder.putDatafeed(datafeedConfig, Collections.emptyMap(), xContentRegistry()); } else { builder.putJob(job, false); } @@ -151,7 +151,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { metadataBuilder.putJob(entry.getValue(), true); } for (Map.Entry entry : datafeeds.entrySet()) { - metadataBuilder.putDatafeed(entry.getValue(), Collections.emptyMap()); + metadataBuilder.putDatafeed(entry.getValue(), Collections.emptyMap(), xContentRegistry()); } switch (between(0, 1)) { @@ -172,7 +172,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { } randomJob = new Job.Builder(randomJob).setAnalysisConfig(analysisConfig).build(); metadataBuilder.putJob(randomJob, false); - metadataBuilder.putDatafeed(datafeedConfig, Collections.emptyMap()); + metadataBuilder.putDatafeed(datafeedConfig, Collections.emptyMap(), xContentRegistry()); break; default: throw new AssertionError("Illegal randomisation branch"); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlSingleNodeTestCase.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlSingleNodeTestCase.java index 18520eecd10..bdb1be97bd2 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlSingleNodeTestCase.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlSingleNodeTestCase.java @@ -9,14 +9,17 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.license.LicenseService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ml.MachineLearningField; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; @@ -44,6 +47,12 @@ public abstract class MlSingleNodeTestCase extends ESSingleNodeTestCase { return newSettings.build(); } + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + @Override protected Collection> getPlugins() { return pluginList(LocalStateMachineLearning.class); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java index 21a4fb8763e..2216aa0c391 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java @@ -7,7 +7,10 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; @@ -33,12 +36,18 @@ import static org.mockito.Mockito.verify; public class TransportStartDatafeedActionTests extends ESTestCase { + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + public void testValidate_jobClosed() { Job job1 = DatafeedManagerTests.createDatafeedJob().build(new Date()); PersistentTasksCustomMetaData tasks = PersistentTasksCustomMetaData.builder().build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); Exception e = expectThrows(ElasticsearchStatusException.class, - () -> TransportStartDatafeedAction.validate(job1, datafeedConfig1, tasks)); + () -> TransportStartDatafeedAction.validate(job1, datafeedConfig1, tasks, xContentRegistry())); assertThat(e.getMessage(), equalTo("cannot start datafeed [foo-datafeed] because job [job_id] is closed")); } @@ -49,7 +58,7 @@ public class TransportStartDatafeedActionTests extends ESTestCase { PersistentTasksCustomMetaData tasks = tasksBuilder.build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); - TransportStartDatafeedAction.validate(job1, datafeedConfig1, tasks); + TransportStartDatafeedAction.validate(job1, datafeedConfig1, tasks, xContentRegistry()); } public void testValidate_jobOpened() { @@ -59,19 +68,19 @@ public class TransportStartDatafeedActionTests extends ESTestCase { PersistentTasksCustomMetaData tasks = tasksBuilder.build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); - TransportStartDatafeedAction.validate(job1, datafeedConfig1, tasks); + TransportStartDatafeedAction.validate(job1, datafeedConfig1, tasks, xContentRegistry()); } public void testDeprecationsLogged() { Job job1 = DatafeedManagerTests.createDatafeedJob().build(new Date()); DatafeedConfig.Builder datafeedConfig = DatafeedManagerTests.createDatafeedConfig("start-data-feed-test", job1.getId()); DatafeedConfig config = spy(datafeedConfig.build()); - doReturn(Collections.singletonList("Deprecated Agg")).when(config).getAggDeprecations(); - doReturn(Collections.singletonList("Deprecated Query")).when(config).getQueryDeprecations(); + doReturn(Collections.singletonList("Deprecated Agg")).when(config).getAggDeprecations(any(NamedXContentRegistry.class)); + doReturn(Collections.singletonList("Deprecated Query")).when(config).getQueryDeprecations(any(NamedXContentRegistry.class)); Auditor auditor = mock(Auditor.class); - TransportStartDatafeedAction.auditDeprecations(config, job1, auditor); + TransportStartDatafeedAction.auditDeprecations(config, job1, auditor, xContentRegistry()); verify(auditor).warning(job1.getId(), "datafeed [start-data-feed-test] configuration has deprecations. [Deprecated Agg, Deprecated Query]"); @@ -81,12 +90,12 @@ public class TransportStartDatafeedActionTests extends ESTestCase { Job job1 = DatafeedManagerTests.createDatafeedJob().build(new Date()); DatafeedConfig.Builder datafeedConfig = DatafeedManagerTests.createDatafeedConfig("start-data-feed-test", job1.getId()); DatafeedConfig config = spy(datafeedConfig.build()); - doReturn(Collections.emptyList()).when(config).getAggDeprecations(); - doReturn(Collections.emptyList()).when(config).getQueryDeprecations(); + doReturn(Collections.emptyList()).when(config).getAggDeprecations(any(NamedXContentRegistry.class)); + doReturn(Collections.emptyList()).when(config).getQueryDeprecations(any(NamedXContentRegistry.class)); Auditor auditor = mock(Auditor.class); - TransportStartDatafeedAction.auditDeprecations(config, job1, auditor); + TransportStartDatafeedAction.auditDeprecations(config, job1, auditor, xContentRegistry()); verify(auditor, never()).warning(any(), any()); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidatorTests.java index 3c34c2e1d67..00db9462c60 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidatorTests.java @@ -6,7 +6,10 @@ package org.elasticsearch.xpack.ml.datafeed; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; @@ -28,6 +31,12 @@ import java.util.Date; public class DatafeedJobValidatorTests extends ESTestCase { + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + public void testValidate_GivenNonZeroLatency() { String errorMessage = Messages.getMessage(Messages.DATAFEED_DOES_NOT_SUPPORT_JOB_WITH_LATENCY); Job.Builder builder = buildJobBuilder("foo"); @@ -39,7 +48,7 @@ public class DatafeedJobValidatorTests extends ESTestCase { DatafeedConfig datafeedConfig = createValidDatafeedConfig().build(); ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class, - () -> DatafeedJobValidator.validate(datafeedConfig, job)); + () -> DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry())); assertEquals(errorMessage, e.getMessage()); } @@ -53,7 +62,7 @@ public class DatafeedJobValidatorTests extends ESTestCase { Job job = builder.build(new Date()); DatafeedConfig datafeedConfig = createValidDatafeedConfig().build(); - DatafeedJobValidator.validate(datafeedConfig, job); + DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry()); } public void testVerify_GivenNoLatency() { @@ -64,7 +73,7 @@ public class DatafeedJobValidatorTests extends ESTestCase { Job job = builder.build(new Date()); DatafeedConfig datafeedConfig = createValidDatafeedConfig().build(); - DatafeedJobValidator.validate(datafeedConfig, job); + DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry()); } public void testVerify_GivenAggsAndNoSummaryCountField() throws IOException { @@ -79,7 +88,7 @@ public class DatafeedJobValidatorTests extends ESTestCase { DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs(1800.0).build(); ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class, - () -> DatafeedJobValidator.validate(datafeedConfig, job)); + () -> DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry())); assertEquals(errorMessage, e.getMessage()); } @@ -96,7 +105,7 @@ public class DatafeedJobValidatorTests extends ESTestCase { DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs(1800.0).build(); ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class, - () -> DatafeedJobValidator.validate(datafeedConfig, job)); + () -> DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry())); assertEquals(errorMessage, e.getMessage()); } @@ -109,7 +118,7 @@ public class DatafeedJobValidatorTests extends ESTestCase { builder.setAnalysisConfig(ac); Job job = builder.build(new Date()); DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs(900.0).build(); - DatafeedJobValidator.validate(datafeedConfig, job); + DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry()); } public void testVerify_GivenHistogramIntervalGreaterThanBucketSpan() throws IOException { @@ -122,7 +131,7 @@ public class DatafeedJobValidatorTests extends ESTestCase { DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs(1800001.0).build(); ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class, - () -> DatafeedJobValidator.validate(datafeedConfig, job)); + () -> DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry())); assertEquals("Aggregation interval [1800001ms] must be less than or equal to the bucket_span [1800000ms]", e.getMessage()); } @@ -137,11 +146,11 @@ public class DatafeedJobValidatorTests extends ESTestCase { DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs(37 * 1000).build(); ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class, - () -> DatafeedJobValidator.validate(datafeedConfig, job)); + () -> DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry())); assertEquals("Aggregation interval [37000ms] must be a divisor of the bucket_span [300000ms]", e.getMessage()); DatafeedConfig goodDatafeedConfig = createValidDatafeedConfigWithAggs(60 * 1000).build(); - DatafeedJobValidator.validate(goodDatafeedConfig, job); + DatafeedJobValidator.validate(goodDatafeedConfig, job, xContentRegistry()); } public void testVerify_FrequencyIsMultipleOfHistogramInterval() throws IOException { @@ -155,25 +164,25 @@ public class DatafeedJobValidatorTests extends ESTestCase { // Check with multiples datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(60)); - DatafeedJobValidator.validate(datafeedBuilder.build(), job); + DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry()); datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(120)); - DatafeedJobValidator.validate(datafeedBuilder.build(), job); + DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry()); datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(180)); - DatafeedJobValidator.validate(datafeedBuilder.build(), job); + DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry()); datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(240)); - DatafeedJobValidator.validate(datafeedBuilder.build(), job); + DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry()); datafeedBuilder.setFrequency(TimeValue.timeValueHours(1)); - DatafeedJobValidator.validate(datafeedBuilder.build(), job); + DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry()); // Now non-multiples datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(30)); ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class, - () -> DatafeedJobValidator.validate(datafeedBuilder.build(), job)); + () -> DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry())); assertEquals("Datafeed frequency [30s] must be a multiple of the aggregation interval [60000ms]", e.getMessage()); datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(90)); e = ESTestCase.expectThrows(ElasticsearchStatusException.class, - () -> DatafeedJobValidator.validate(datafeedBuilder.build(), job)); + () -> DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry())); assertEquals("Datafeed frequency [1.5m] must be a multiple of the aggregation interval [60000ms]", e.getMessage()); } @@ -187,16 +196,16 @@ public class DatafeedJobValidatorTests extends ESTestCase { DatafeedConfig.Builder datafeedBuilder = createValidDatafeedConfig(); datafeedBuilder.setDelayedDataCheckConfig(DelayedDataCheckConfig.enabledDelayedDataCheckConfig(TimeValue.timeValueMinutes(10))); - DatafeedJobValidator.validate(datafeedBuilder.build(), job); + DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry()); datafeedBuilder.setDelayedDataCheckConfig(DelayedDataCheckConfig.enabledDelayedDataCheckConfig(TimeValue.timeValueSeconds(1))); ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class, - () -> DatafeedJobValidator.validate(datafeedBuilder.build(), job)); + () -> DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry())); assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_DELAYED_DATA_CHECK_TOO_SMALL, "1s", "2s"), e.getMessage()); datafeedBuilder.setDelayedDataCheckConfig(DelayedDataCheckConfig.enabledDelayedDataCheckConfig(TimeValue.timeValueHours(24))); e = ESTestCase.expectThrows(ElasticsearchStatusException.class, - () -> DatafeedJobValidator.validate(datafeedBuilder.build(), job)); + () -> DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry())); assertEquals(Messages.getMessage( Messages.DATAFEED_CONFIG_DELAYED_DATA_CHECK_SPANS_TOO_MANY_BUCKETS, "1d", "2s"), e.getMessage()); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DelayedDataDetectorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DelayedDataDetectorFactoryTests.java index d776b720ed2..8857472062f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DelayedDataDetectorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DelayedDataDetectorFactoryTests.java @@ -6,7 +6,10 @@ package org.elasticsearch.xpack.ml.datafeed.delayeddatacheck; import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DelayedDataCheckConfig; @@ -26,29 +29,35 @@ import static org.mockito.Mockito.mock; public class DelayedDataDetectorFactoryTests extends ESTestCase { + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + public void testBuilder() { Job job = createJob(TimeValue.timeValueSeconds(2)); DatafeedConfig datafeedConfig = createDatafeed(false, null); // Should not throw - assertThat(DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, mock(Client.class)), + assertThat(DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, mock(Client.class), xContentRegistry()), instanceOf(NullDelayedDataDetector.class)); datafeedConfig = createDatafeed(true, TimeValue.timeValueMinutes(10)); // Should not throw - assertThat(DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, mock(Client.class)), + assertThat(DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, mock(Client.class), xContentRegistry()), instanceOf(DatafeedDelayedDataDetector.class)); DatafeedConfig tooSmallDatafeedConfig = createDatafeed(true, TimeValue.timeValueSeconds(1)); IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, - () -> DelayedDataDetectorFactory.buildDetector(job, tooSmallDatafeedConfig, mock(Client.class))); + () -> DelayedDataDetectorFactory.buildDetector(job, tooSmallDatafeedConfig, mock(Client.class), xContentRegistry())); assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_DELAYED_DATA_CHECK_TOO_SMALL, "1s", "2s"), e.getMessage()); DatafeedConfig tooBigDatafeedConfig = createDatafeed(true, TimeValue.timeValueHours(12)); e = ESTestCase.expectThrows(IllegalArgumentException.class, - () -> DelayedDataDetectorFactory.buildDetector(job, tooBigDatafeedConfig, mock(Client.class))); + () -> DelayedDataDetectorFactory.buildDetector(job, tooBigDatafeedConfig, mock(Client.class), xContentRegistry())); assertEquals(Messages.getMessage( Messages.DATAFEED_CONFIG_DELAYED_DATA_CHECK_SPANS_TOO_MANY_BUCKETS, "12h", "2s"), e.getMessage()); @@ -57,14 +66,14 @@ public class DelayedDataDetectorFactoryTests extends ESTestCase { // Should not throw DelayedDataDetector delayedDataDetector = - DelayedDataDetectorFactory.buildDetector(withBigBucketSpan, datafeedConfig, mock(Client.class)); + DelayedDataDetectorFactory.buildDetector(withBigBucketSpan, datafeedConfig, mock(Client.class), xContentRegistry()); assertThat(delayedDataDetector.getWindow(), equalTo(TimeValue.timeValueHours(1).millis() * 8)); datafeedConfig = createDatafeed(true, null); // Should not throw delayedDataDetector = - DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, mock(Client.class)); + DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, mock(Client.class), xContentRegistry()); assertThat(delayedDataDetector.getWindow(), equalTo(TimeValue.timeValueHours(2).millis())); } @@ -98,5 +107,4 @@ public class DelayedDataDetectorFactoryTests extends ESTestCase { return builder.build(); } - } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java index 1478a485cc4..dee28e71a7b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java @@ -12,6 +12,8 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; @@ -61,6 +63,12 @@ public class DataExtractorFactoryTests extends ESTestCase { private Client client; + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + @Before public void setUpTests() { client = mock(Client.class); @@ -101,7 +109,7 @@ public class DataExtractorFactoryTests extends ESTestCase { e -> fail() ); - DataExtractorFactory.create(client, datafeedConfig, jobBuilder.build(new Date()), listener); + DataExtractorFactory.create(client, datafeedConfig, jobBuilder.build(new Date()), xContentRegistry(), listener); } public void testCreateDataExtractorFactoryGivenScrollWithAutoChunk() { @@ -117,7 +125,7 @@ public class DataExtractorFactoryTests extends ESTestCase { e -> fail() ); - DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); } public void testCreateDataExtractorFactoryGivenScrollWithOffChunk() { @@ -133,7 +141,7 @@ public class DataExtractorFactoryTests extends ESTestCase { e -> fail() ); - DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); } public void testCreateDataExtractorFactoryGivenDefaultAggregation() { @@ -151,7 +159,7 @@ public class DataExtractorFactoryTests extends ESTestCase { e -> fail() ); - DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); } public void testCreateDataExtractorFactoryGivenAggregationWithOffChunk() { @@ -170,7 +178,7 @@ public class DataExtractorFactoryTests extends ESTestCase { e -> fail() ); - DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); } public void testCreateDataExtractorFactoryGivenDefaultAggregationWithAutoChunk() { @@ -189,7 +197,7 @@ public class DataExtractorFactoryTests extends ESTestCase { e -> fail() ); - DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); } public void testCreateDataExtractorFactoryGivenRollupAndValidAggregation() { @@ -209,7 +217,7 @@ public class DataExtractorFactoryTests extends ESTestCase { dataExtractorFactory -> assertThat(dataExtractorFactory, instanceOf(RollupDataExtractorFactory.class)), e -> fail() ); - DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); } public void testCreateDataExtractorFactoryGivenRollupAndValidAggregationAndAutoChunk() { @@ -229,7 +237,7 @@ public class DataExtractorFactoryTests extends ESTestCase { dataExtractorFactory -> assertThat(dataExtractorFactory, instanceOf(ChunkedDataExtractorFactory.class)), e -> fail() ); - DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); } public void testCreateDataExtractorFactoryGivenRollupButNoAggregations() { @@ -249,7 +257,7 @@ public class DataExtractorFactoryTests extends ESTestCase { } ); - DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); } public void testCreateDataExtractorFactoryGivenRollupWithBadInterval() { @@ -274,7 +282,7 @@ public class DataExtractorFactoryTests extends ESTestCase { assertThat(e, instanceOf(IllegalArgumentException.class)); } ); - DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); } public void testCreateDataExtractorFactoryGivenRollupMissingTerms() { @@ -298,7 +306,7 @@ public class DataExtractorFactoryTests extends ESTestCase { assertThat(e, instanceOf(IllegalArgumentException.class)); } ); - DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); } public void testCreateDataExtractorFactoryGivenRollupMissingMetric() { @@ -322,7 +330,7 @@ public class DataExtractorFactoryTests extends ESTestCase { assertThat(e, instanceOf(IllegalArgumentException.class)); } ); - DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); } private void givenAggregatableRollup(String field, String type, int minuteInterval, String... groupByTerms) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactoryTests.java index c9a2e8712e2..fdfd75759db 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactoryTests.java @@ -6,6 +6,9 @@ package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation; import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.test.ESTestCase; @@ -17,6 +20,7 @@ import org.elasticsearch.xpack.core.ml.job.config.Job; import org.junit.Before; import java.util.Arrays; +import java.util.Collections; import java.util.Date; import static org.hamcrest.Matchers.equalTo; @@ -31,6 +35,12 @@ public class AggregationDataExtractorFactoryTests extends ESTestCase { client = mock(Client.class); } + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + public void testNewExtractor_GivenAlignedTimes() { AggregationDataExtractorFactory factory = createFactory(1000L); @@ -66,6 +76,6 @@ public class AggregationDataExtractorFactoryTests extends ESTestCase { DatafeedConfig.Builder datafeedConfigBuilder = new DatafeedConfig.Builder("foo-feed", jobBuilder.getId()); datafeedConfigBuilder.setParsedAggregations(aggs); datafeedConfigBuilder.setIndices(Arrays.asList("my_index")); - return new AggregationDataExtractorFactory(client, datafeedConfigBuilder.build(), jobBuilder.build(new Date())); + return new AggregationDataExtractorFactory(client, datafeedConfigBuilder.build(), jobBuilder.build(new Date()), xContentRegistry()); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactoryTests.java index 77a8c936beb..058e3c9e786 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactoryTests.java @@ -6,6 +6,9 @@ package org.elasticsearch.xpack.ml.datafeed.extractor.chunked; import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.test.ESTestCase; @@ -18,6 +21,7 @@ import org.elasticsearch.xpack.core.ml.job.config.Job; import org.junit.Before; import java.util.Arrays; +import java.util.Collections; import java.util.Date; import static org.hamcrest.Matchers.equalTo; @@ -28,6 +32,12 @@ public class ChunkedDataExtractorFactoryTests extends ESTestCase { private Client client; private DataExtractorFactory dataExtractorFactory; + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + @Before public void setUpMocks() { client = mock(Client.class); @@ -93,6 +103,7 @@ public class ChunkedDataExtractorFactoryTests extends ESTestCase { DatafeedConfig.Builder datafeedConfigBuilder = new DatafeedConfig.Builder("foo-feed", jobBuilder.getId()); datafeedConfigBuilder.setParsedAggregations(aggs); datafeedConfigBuilder.setIndices(Arrays.asList("my_index")); - return new ChunkedDataExtractorFactory(client, datafeedConfigBuilder.build(), jobBuilder.build(new Date()), dataExtractorFactory); + return new ChunkedDataExtractorFactory(client, datafeedConfigBuilder.build(), jobBuilder.build(new Date()), + xContentRegistry(), dataExtractorFactory); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java index f6ff80edeec..b1879eb07f1 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java @@ -57,7 +57,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase { @Before public void createComponents() throws Exception { - jobConfigProvider = new JobConfigProvider(client()); + jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); waitForMlTemplates(); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlConfigMigratorIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlConfigMigratorIT.java index 60e9c457f88..0eda4d4dcad 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlConfigMigratorIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlConfigMigratorIT.java @@ -28,7 +28,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -90,7 +89,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase { final String indexJobId = "job-already-migrated"; // Add a job to the index - JobConfigProvider jobConfigProvider = new JobConfigProvider(client()); + JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); Job indexJob = buildJobBuilder(indexJobId).build(); // Same as index job but has extra fields in its custom settings // which will be used to check the config was overwritten @@ -139,7 +138,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase { DatafeedConfig.Builder builder = new DatafeedConfig.Builder("df-1", "job-foo"); builder.setIndices(Collections.singletonList("beats*")); - mlMetadata.putDatafeed(builder.build(), Collections.emptyMap()); + mlMetadata.putDatafeed(builder.build(), Collections.emptyMap(), xContentRegistry()); MetaData.Builder metaData = MetaData.builder(); RoutingTable.Builder routingTable = RoutingTable.builder(); @@ -171,7 +170,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase { // check the jobs have been migrated AtomicReference> jobsHolder = new AtomicReference<>(); - JobConfigProvider jobConfigProvider = new JobConfigProvider(client()); + JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); blockingCall(actionListener -> jobConfigProvider.expandJobs("*", true, true, actionListener), jobsHolder, exceptionHolder); @@ -240,7 +239,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase { // check the jobs have been migrated AtomicReference> jobsHolder = new AtomicReference<>(); - JobConfigProvider jobConfigProvider = new JobConfigProvider(client()); + JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); blockingCall(actionListener -> jobConfigProvider.expandJobs("*", true, true, actionListener), jobsHolder, exceptionHolder); @@ -262,7 +261,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase { for (int i = 0; i < datafeedCount; i++) { DatafeedConfig.Builder builder = new DatafeedConfig.Builder("df-" + i, "job-" + i); builder.setIndices(Collections.singletonList("beats*")); - mlMetadata.putDatafeed(builder.build(), Collections.emptyMap()); + mlMetadata.putDatafeed(builder.build(), Collections.emptyMap(), xContentRegistry()); } MetaData.Builder metaData = MetaData.builder(); @@ -293,7 +292,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase { // check the jobs have been migrated AtomicReference> jobsHolder = new AtomicReference<>(); - JobConfigProvider jobConfigProvider = new JobConfigProvider(client()); + JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); blockingCall(actionListener -> jobConfigProvider.expandJobs("*", true, true, actionListener), jobsHolder, exceptionHolder); @@ -344,7 +343,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase { mlMetadata.putJob(buildJobBuilder("job-bar").build(), false); DatafeedConfig.Builder builder = new DatafeedConfig.Builder("df-1", "job-foo"); builder.setIndices(Collections.singletonList("beats*")); - mlMetadata.putDatafeed(builder.build(), Collections.emptyMap()); + mlMetadata.putDatafeed(builder.build(), Collections.emptyMap(), xContentRegistry()); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) .metaData(MetaData.builder() @@ -364,7 +363,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase { // check the jobs have not been migrated AtomicReference> jobsHolder = new AtomicReference<>(); - JobConfigProvider jobConfigProvider = new JobConfigProvider(client()); + JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); blockingCall(actionListener -> jobConfigProvider.expandJobs("*", true, true, actionListener), jobsHolder, exceptionHolder); assertNull(exceptionHolder.get()); @@ -392,7 +391,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase { try (InputStream stream = searchResponse.getHits().getAt(0).getSourceRef().streamInput(); XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, stream)) { MlMetadata recoveredMeta = MlMetadata.LENIENT_PARSER.apply(parser, null).build(); assertEquals(expectedMlMetadata, recoveredMeta); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java index c52a5a592d8..60e35cc0323 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -37,6 +38,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.MachineLearningField; @@ -104,6 +106,12 @@ public class JobManagerTests extends ESTestCase { private Auditor auditor; private UpdateJobProcessNotifier updateJobProcessNotifier; + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + @Before public void setup() throws Exception { Settings settings = Settings.builder() @@ -586,7 +594,7 @@ public class JobManagerTests extends ESTestCase { private JobManager createJobManager(Client client) { return new JobManager(environment, environment.settings(), jobResultsProvider, clusterService, - auditor, threadPool, client, updateJobProcessNotifier); + auditor, threadPool, client, updateJobProcessNotifier, xContentRegistry()); } private ClusterState createClusterState() {