[ML] refactoring lazy query and agg parsing (#39776) (#39881)

* [ML] refactoring lazy query and agg parsing

* Clean up and addressing PR comments

* removing unnecessary try/catch block

* removing bad call to logger

* removing unused import

* fixing bwc test failure due to serialization and config migrator test

* fixing style issues

* Adjusting DafafeedUpdate class serialization

* Adding todo for refactor in v8

* Making query non-optional so it does not write a boolean byte
This commit is contained in:
Benjamin Trent 2019-03-10 14:54:02 -05:00 committed by GitHub
parent b5ed039160
commit 4da04616c9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
49 changed files with 1441 additions and 634 deletions

View File

@ -299,6 +299,16 @@ public class SearchModule {
private final List<NamedWriteableRegistry.Entry> namedWriteables = new ArrayList<>();
private final List<NamedXContentRegistry.Entry> namedXContents = new ArrayList<>();
/**
* Constructs a new SearchModule object
*
* NOTE: This constructor should not be called in production unless an accurate {@link Settings} object is provided.
* When constructed, a static flag is set in Lucene {@link BooleanQuery#setMaxClauseCount} according to the settings.
*
* @param settings Current settings
* @param transportClient Is this being constructed in the TransportClient or not
* @param plugins List of included {@link SearchPlugin} objects.
*/
public SearchModule(Settings settings, boolean transportClient, List<SearchPlugin> plugins) {
this.settings = settings;
this.transportClient = transportClient;

View File

@ -21,6 +21,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
@ -34,6 +35,7 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Collectors;
@ -187,19 +189,21 @@ public class DeprecationInfoAction extends Action<DeprecationInfoAction.Response
* @return The list of deprecation issues found in the cluster
*/
public static DeprecationInfoAction.Response from(ClusterState state,
NamedXContentRegistry xContentRegistry,
IndexNameExpressionResolver indexNameExpressionResolver,
String[] indices, IndicesOptions indicesOptions,
List<DatafeedConfig> datafeeds,
NodesDeprecationCheckResponse nodeDeprecationResponse,
List<Function<IndexMetaData, DeprecationIssue>> indexSettingsChecks,
List<Function<ClusterState, DeprecationIssue>> clusterSettingsChecks,
List<Function<DatafeedConfig, DeprecationIssue>> mlSettingsCheck) {
List<BiFunction<DatafeedConfig, NamedXContentRegistry, DeprecationIssue>>
mlSettingsCheck) {
List<DeprecationIssue> clusterSettingsIssues = filterChecks(clusterSettingsChecks,
(c) -> c.apply(state));
List<DeprecationIssue> nodeSettingsIssues = mergeNodeIssues(nodeDeprecationResponse);
List<DeprecationIssue> mlSettingsIssues = new ArrayList<>();
for (DatafeedConfig config : datafeeds) {
mlSettingsIssues.addAll(filterChecks(mlSettingsCheck, (c) -> c.apply(config)));
mlSettingsIssues.addAll(filterChecks(mlSettingsCheck, (c) -> c.apply(config, xContentRegistry)));
}
String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(state, indicesOptions, indices);

View File

@ -18,6 +18,7 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -302,7 +303,7 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom {
return this;
}
public Builder putDatafeed(DatafeedConfig datafeedConfig, Map<String, String> headers) {
public Builder putDatafeed(DatafeedConfig datafeedConfig, Map<String, String> headers, NamedXContentRegistry xContentRegistry) {
if (datafeeds.containsKey(datafeedConfig.getId())) {
throw ExceptionsHelper.datafeedAlreadyExists(datafeedConfig.getId());
}
@ -310,7 +311,7 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom {
String jobId = datafeedConfig.getJobId();
checkJobIsAvailableForDatafeed(jobId);
Job job = jobs.get(jobId);
DatafeedJobValidator.validate(datafeedConfig, job);
DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry);
if (headers.isEmpty() == false) {
// Adjust the request, adding security headers from the current thread context

View File

@ -0,0 +1,157 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.datafeed;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
class AggProvider implements Writeable, ToXContentObject {
private static final Logger logger = LogManager.getLogger(AggProvider.class);
private Exception parsingException;
private AggregatorFactories.Builder parsedAggs;
private Map<String, Object> aggs;
static AggProvider fromXContent(XContentParser parser, boolean lenient) throws IOException {
Map<String, Object> aggs = parser.mapOrdered();
AggregatorFactories.Builder parsedAggs = null;
Exception exception = null;
try {
if (aggs.isEmpty()) {
throw new Exception("aggs cannot be empty");
}
parsedAggs = XContentObjectTransformer.aggregatorTransformer(parser.getXContentRegistry()).fromMap(aggs);
} catch(Exception ex) {
if (ex.getCause() instanceof IllegalArgumentException) {
ex = (Exception)ex.getCause();
}
exception = ex;
if (lenient) {
logger.warn(Messages.DATAFEED_CONFIG_AGG_BAD_FORMAT, ex);
} else {
throw ExceptionsHelper.badRequestException(Messages.DATAFEED_CONFIG_AGG_BAD_FORMAT, ex);
}
}
return new AggProvider(aggs, parsedAggs, exception);
}
static AggProvider fromParsedAggs(AggregatorFactories.Builder parsedAggs) throws IOException {
return parsedAggs == null ?
null :
new AggProvider(
XContentObjectTransformer.aggregatorTransformer(NamedXContentRegistry.EMPTY).toMap(parsedAggs),
parsedAggs,
null);
}
static AggProvider fromStream(StreamInput in) throws IOException {
if (in.getVersion().onOrAfter(Version.V_6_7_0)) { // Has our bug fix for query/agg providers
return new AggProvider(in.readMap(), in.readOptionalWriteable(AggregatorFactories.Builder::new), in.readException());
} else if (in.getVersion().onOrAfter(Version.V_6_6_0)) { // Has the bug, but supports lazy objects
return new AggProvider(in.readMap(), null, null);
} else { // only supports eagerly parsed objects
return AggProvider.fromParsedAggs(in.readOptionalWriteable(AggregatorFactories.Builder::new));
}
}
AggProvider(Map<String, Object> aggs, AggregatorFactories.Builder parsedAggs, Exception parsingException) {
this.aggs = Collections.unmodifiableMap(new LinkedHashMap<>(Objects.requireNonNull(aggs, "[aggs] must not be null")));
this.parsedAggs = parsedAggs;
this.parsingException = parsingException;
}
AggProvider(AggProvider other) {
this.aggs = new LinkedHashMap<>(other.aggs);
this.parsedAggs = other.parsedAggs;
this.parsingException = other.parsingException;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (out.getVersion().onOrAfter(Version.V_6_7_0)) { // Has our bug fix for query/agg providers
out.writeMap(aggs);
out.writeOptionalWriteable(parsedAggs);
out.writeException(parsingException);
} else if (out.getVersion().onOrAfter(Version.V_6_6_0)) { // Has the bug, but supports lazy objects
// We allow the lazy parsing nodes that have the bug throw any parsing errors themselves as
// they already have the ability to fully parse the passed Maps
out.writeMap(aggs);
} else { // only supports eagerly parsed objects
if (parsingException != null) {
if (parsingException instanceof IOException) {
throw (IOException) parsingException;
} else {
throw new ElasticsearchException(parsingException);
}
} else if (parsedAggs == null) {
// This is an admittedly rare case but we should fail early instead of writing null when there
// actually are aggregations defined
throw new ElasticsearchException("Unsupported operation: parsed aggregations are null");
}
out.writeOptionalWriteable(parsedAggs);
}
}
public Exception getParsingException() {
return parsingException;
}
AggregatorFactories.Builder getParsedAggs() {
return parsedAggs;
}
public Map<String, Object> getAggs() {
return aggs;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
AggProvider that = (AggProvider) other;
return Objects.equals(this.aggs, that.aggs)
&& Objects.equals(this.parsedAggs, that.parsedAggs)
&& Objects.equals(this.parsingException, that.parsingException);
}
@Override
public int hashCode() {
return Objects.hash(aggs, parsedAggs, parsingException);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.map(aggs);
return builder;
}
}

View File

@ -12,16 +12,14 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.TriFunction;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.CachedSupplier;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories;
@ -43,7 +41,6 @@ import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -66,33 +63,6 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
private static final int TWO_MINS_SECONDS = 2 * SECONDS_IN_MINUTE;
private static final int TWENTY_MINS_SECONDS = 20 * SECONDS_IN_MINUTE;
private static final int HALF_DAY_SECONDS = 12 * 60 * SECONDS_IN_MINUTE;
static final XContentObjectTransformer<QueryBuilder> QUERY_TRANSFORMER = XContentObjectTransformer.queryBuilderTransformer();
static final TriFunction<Map<String, Object>, String, List<String>, QueryBuilder> lazyQueryParser =
(objectMap, id, warnings) -> {
try {
return QUERY_TRANSFORMER.fromMap(objectMap, warnings);
} catch (Exception exception) {
// Certain thrown exceptions wrap up the real Illegal argument making it hard to determine cause for the user
if (exception.getCause() instanceof IllegalArgumentException) {
exception = (Exception)exception.getCause();
}
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.DATAFEED_CONFIG_QUERY_BAD_FORMAT, id), exception);
}
};
static final XContentObjectTransformer<AggregatorFactories.Builder> AGG_TRANSFORMER = XContentObjectTransformer.aggregatorTransformer();
static final TriFunction<Map<String, Object>, String, List<String>, AggregatorFactories.Builder> lazyAggParser =
(objectMap, id, warnings) -> {
try {
return AGG_TRANSFORMER.fromMap(objectMap, warnings);
} catch (Exception exception) {
// Certain thrown exceptions wrap up the real Illegal argument making it hard to determine cause for the user
if (exception.getCause() instanceof IllegalArgumentException) {
exception = (Exception)exception.getCause();
}
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.DATAFEED_CONFIG_AGG_BAD_FORMAT, id), exception);
}
};
private static final Logger logger = LogManager.getLogger(DatafeedConfig.class);
@ -152,10 +122,14 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY);
parser.declareString((builder, val) ->
builder.setFrequency(TimeValue.parseTimeValue(val, FREQUENCY.getPreferredName())), FREQUENCY);
parser.declareObject((builder, val) -> builder.setQuery(val, ignoreUnknownFields), (p, c) -> p.mapOrdered(), QUERY);
parser.declareObject((builder, val) -> builder.setAggregationsSafe(val, ignoreUnknownFields), (p, c) -> p.mapOrdered(),
parser.declareObject(Builder::setQueryProvider,
(p, c) -> QueryProvider.fromXContent(p, ignoreUnknownFields),
QUERY);
parser.declareObject(Builder::setAggregationsSafe,
(p, c) -> AggProvider.fromXContent(p, ignoreUnknownFields),
AGGREGATIONS);
parser.declareObject((builder, val) -> builder.setAggregationsSafe(val, ignoreUnknownFields), (p, c) -> p.mapOrdered(),
parser.declareObject(Builder::setAggregationsSafe,
(p, c) -> AggProvider.fromXContent(p, ignoreUnknownFields),
AGGS);
parser.declareObject(Builder::setScriptFields, (p, c) -> {
List<SearchSourceBuilder.ScriptField> parsedScriptFields = new ArrayList<>();
@ -194,18 +168,16 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
private final TimeValue frequency;
private final List<String> indices;
private final Map<String, Object> query;
private final Map<String, Object> aggregations;
private final QueryProvider queryProvider;
private final AggProvider aggProvider;
private final List<SearchSourceBuilder.ScriptField> scriptFields;
private final Integer scrollSize;
private final ChunkingConfig chunkingConfig;
private final Map<String, String> headers;
private final DelayedDataCheckConfig delayedDataCheckConfig;
private final CachedSupplier<QueryBuilder> querySupplier;
private final CachedSupplier<AggregatorFactories.Builder> aggSupplier;
private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indices,
Map<String, Object> query, Map<String, Object> aggregations, List<SearchSourceBuilder.ScriptField> scriptFields,
QueryProvider queryProvider, AggProvider aggProvider, List<SearchSourceBuilder.ScriptField> scriptFields,
Integer scrollSize, ChunkingConfig chunkingConfig, Map<String, String> headers,
DelayedDataCheckConfig delayedDataCheckConfig) {
this.id = id;
@ -213,15 +185,13 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
this.queryDelay = queryDelay;
this.frequency = frequency;
this.indices = indices == null ? null : Collections.unmodifiableList(indices);
this.query = query == null ? null : Collections.unmodifiableMap(query);
this.aggregations = aggregations == null ? null : Collections.unmodifiableMap(aggregations);
this.queryProvider = queryProvider == null ? null : new QueryProvider(queryProvider);
this.aggProvider = aggProvider == null ? null : new AggProvider(aggProvider);
this.scriptFields = scriptFields == null ? null : Collections.unmodifiableList(scriptFields);
this.scrollSize = scrollSize;
this.chunkingConfig = chunkingConfig;
this.headers = Collections.unmodifiableMap(headers);
this.delayedDataCheckConfig = delayedDataCheckConfig;
this.querySupplier = new CachedSupplier<>(() -> lazyQueryParser.apply(query, id, new ArrayList<>()));
this.aggSupplier = new CachedSupplier<>(() -> lazyAggParser.apply(aggregations, id, new ArrayList<>()));
}
public DatafeedConfig(StreamInput in) throws IOException {
@ -240,17 +210,10 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
in.readStringList();
}
}
if (in.getVersion().before(Version.V_6_6_0)) {
this.query = QUERY_TRANSFORMER.toMap(in.readNamedWriteable(QueryBuilder.class));
this.aggregations = AGG_TRANSFORMER.toMap(in.readOptionalWriteable(AggregatorFactories.Builder::new));
} else {
this.query = in.readMap();
if (in.readBoolean()) {
this.aggregations = in.readMap();
} else {
this.aggregations = null;
}
}
// each of these writables are version aware
this.queryProvider = QueryProvider.fromStream(in);
this.aggProvider = in.readOptionalWriteable(AggProvider::fromStream);
if (in.readBoolean()) {
this.scriptFields = Collections.unmodifiableList(in.readList(SearchSourceBuilder.ScriptField::new));
} else {
@ -268,8 +231,6 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
} else {
delayedDataCheckConfig = DelayedDataCheckConfig.defaultDelayedDataCheckConfig();
}
this.querySupplier = new CachedSupplier<>(() -> lazyQueryParser.apply(query, id, new ArrayList<>()));
this.aggSupplier = new CachedSupplier<>(() -> lazyAggParser.apply(aggregations, id, new ArrayList<>()));
}
/**
@ -310,62 +271,116 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
return scrollSize;
}
public QueryBuilder getParsedQuery() {
return querySupplier.get();
/**
* Get the fully parsed query from the semi-parsed stored {@code Map<String, Object>}
*
* @param namedXContentRegistry XContent registry to transform the lazily parsed query
* @return Fully parsed query
*/
public QueryBuilder getParsedQuery(NamedXContentRegistry namedXContentRegistry) {
return queryProvider == null ? null : parseQuery(namedXContentRegistry, new ArrayList<>());
}
// TODO Remove in v8.0.0
// We only need this NamedXContentRegistry object if getParsedQuery() == null and getParsingException() == null
// This situation only occurs in past versions that contained the lazy parsing support but not the providers (6.6.x)
// We will still need `NamedXContentRegistry` for getting deprecations, but that is a special situation
private QueryBuilder parseQuery(NamedXContentRegistry namedXContentRegistry, List<String> deprecations) {
try {
return queryProvider == null || queryProvider.getQuery() == null ?
null :
XContentObjectTransformer.queryBuilderTransformer(namedXContentRegistry).fromMap(queryProvider.getQuery(), deprecations);
} catch (Exception exception) {
// Certain thrown exceptions wrap up the real Illegal argument making it hard to determine cause for the user
if (exception.getCause() instanceof IllegalArgumentException) {
exception = (Exception)exception.getCause();
}
throw ExceptionsHelper.badRequestException(Messages.DATAFEED_CONFIG_QUERY_BAD_FORMAT, exception);
}
}
Exception getQueryParsingException() {
return queryProvider == null ? null : queryProvider.getParsingException();
}
/**
* Calls the lazy parser and returns any gathered deprecations
* Calls the parser and returns any gathered deprecations
*
* @param namedXContentRegistry XContent registry to transform the lazily parsed query
* @return The deprecations from parsing the query
*/
public List<String> getQueryDeprecations() {
return getQueryDeprecations(lazyQueryParser);
}
List<String> getQueryDeprecations(TriFunction<Map<String, Object>, String, List<String>, QueryBuilder> parser) {
public List<String> getQueryDeprecations(NamedXContentRegistry namedXContentRegistry) {
List<String> deprecations = new ArrayList<>();
parser.apply(query, id, deprecations);
parseQuery(namedXContentRegistry, deprecations);
return deprecations;
}
public Map<String, Object> getQuery() {
return query;
}
public AggregatorFactories.Builder getParsedAggregations() {
return aggSupplier.get();
return queryProvider == null ? null : queryProvider.getQuery();
}
/**
* Calls the lazy parser and returns any gathered deprecations
* @return The deprecations from parsing the aggregations
* Fully parses the semi-parsed {@code Map<String, Object>} aggregations
*
* @param namedXContentRegistry XContent registry to transform the lazily parsed aggregations
* @return The fully parsed aggregations
*/
public List<String> getAggDeprecations() {
return getAggDeprecations(lazyAggParser);
public AggregatorFactories.Builder getParsedAggregations(NamedXContentRegistry namedXContentRegistry) {
return aggProvider == null ? null : parseAggregations(namedXContentRegistry, new ArrayList<>());
}
List<String> getAggDeprecations(TriFunction<Map<String, Object>, String, List<String>, AggregatorFactories.Builder> parser) {
// TODO refactor in v8.0.0
// We only need this NamedXContentRegistry object if getParsedQuery() == null and getParsingException() == null
// This situation only occurs in past versions that contained the lazy parsing support but not the providers (6.6.x)
// We will still need `NamedXContentRegistry` for getting deprecations, but that is a special situation
private AggregatorFactories.Builder parseAggregations(NamedXContentRegistry namedXContentRegistry, List<String> deprecations) {
try {
return aggProvider == null || aggProvider.getAggs() == null ?
null :
XContentObjectTransformer.aggregatorTransformer(namedXContentRegistry).fromMap(aggProvider.getAggs(), deprecations);
} catch (Exception exception) {
// Certain thrown exceptions wrap up the real Illegal argument making it hard to determine cause for the user
if (exception.getCause() instanceof IllegalArgumentException) {
exception = (Exception)exception.getCause();
}
throw ExceptionsHelper.badRequestException(Messages.DATAFEED_CONFIG_AGG_BAD_FORMAT, exception);
}
}
Exception getAggParsingException() {
return aggProvider == null ? null : aggProvider.getParsingException();
}
/**
* Calls the parser and returns any gathered deprecations
*
* @param namedXContentRegistry XContent registry to transform the lazily parsed aggregations
* @return The deprecations from parsing the aggregations
*/
public List<String> getAggDeprecations(NamedXContentRegistry namedXContentRegistry) {
List<String> deprecations = new ArrayList<>();
parser.apply(aggregations, id, deprecations);
parseAggregations(namedXContentRegistry, deprecations);
return deprecations;
}
public Map<String, Object> getAggregations() {
return aggregations;
return aggProvider == null ? null : aggProvider.getAggs();
}
/**
* Returns the histogram's interval as epoch millis.
*
* @param namedXContentRegistry XContent registry to transform the lazily parsed aggregations
*/
public long getHistogramIntervalMillis() {
return ExtractorUtils.getHistogramIntervalMillis(getParsedAggregations());
public long getHistogramIntervalMillis(NamedXContentRegistry namedXContentRegistry) {
return ExtractorUtils.getHistogramIntervalMillis(getParsedAggregations(namedXContentRegistry));
}
/**
* @return {@code true} when there are non-empty aggregations, {@code false} otherwise
*/
public boolean hasAggregations() {
return aggregations != null && aggregations.size() > 0;
return aggProvider != null && aggProvider.getAggs() != null && aggProvider.getAggs().size() > 0;
}
public List<SearchSourceBuilder.ScriptField> getScriptFields() {
@ -402,16 +417,11 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
out.writeBoolean(true);
out.writeStringCollection(Collections.emptyList());
}
if (out.getVersion().before(Version.V_6_6_0)) {
out.writeNamedWriteable(getParsedQuery());
out.writeOptionalWriteable(getParsedAggregations());
} else {
out.writeMap(query);
out.writeBoolean(aggregations != null);
if (aggregations != null) {
out.writeMap(aggregations);
}
}
// Each of these writables are version aware
queryProvider.writeTo(out); // never null
out.writeOptionalWriteable(aggProvider);
if (scriptFields != null) {
out.writeBoolean(true);
out.writeList(scriptFields);
@ -441,9 +451,9 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
builder.field(FREQUENCY.getPreferredName(), frequency.getStringRep());
}
builder.field(INDICES.getPreferredName(), indices);
builder.field(QUERY.getPreferredName(), query);
if (aggregations != null) {
builder.field(AGGREGATIONS.getPreferredName(), aggregations);
builder.field(QUERY.getPreferredName(), queryProvider.getQuery());
if (aggProvider != null) {
builder.field(AGGREGATIONS.getPreferredName(), aggProvider.getAggs());
}
if (scriptFields != null) {
builder.startObject(SCRIPT_FIELDS.getPreferredName());
@ -488,9 +498,9 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
&& Objects.equals(this.frequency, that.frequency)
&& Objects.equals(this.queryDelay, that.queryDelay)
&& Objects.equals(this.indices, that.indices)
&& Objects.equals(this.query, that.query)
&& Objects.equals(this.queryProvider, that.queryProvider)
&& Objects.equals(this.scrollSize, that.scrollSize)
&& Objects.equals(this.aggregations, that.aggregations)
&& Objects.equals(this.aggProvider, that.aggProvider)
&& Objects.equals(this.scriptFields, that.scriptFields)
&& Objects.equals(this.chunkingConfig, that.chunkingConfig)
&& Objects.equals(this.headers, that.headers)
@ -499,7 +509,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
@Override
public int hashCode() {
return Objects.hash(id, jobId, frequency, queryDelay, indices, query, scrollSize, aggregations, scriptFields, chunkingConfig,
return Objects.hash(id, jobId, frequency, queryDelay, indices, queryProvider, scrollSize, aggProvider, scriptFields, chunkingConfig,
headers, delayedDataCheckConfig);
}
@ -525,10 +535,10 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
* @param bucketSpan the bucket span
* @return the default frequency
*/
public TimeValue defaultFrequency(TimeValue bucketSpan) {
public TimeValue defaultFrequency(TimeValue bucketSpan, NamedXContentRegistry xContentRegistry) {
TimeValue defaultFrequency = defaultFrequencyTarget(bucketSpan);
if (hasAggregations()) {
long histogramIntervalMillis = getHistogramIntervalMillis();
long histogramIntervalMillis = getHistogramIntervalMillis(xContentRegistry);
long targetFrequencyMillis = defaultFrequency.millis();
long defaultFrequencyMillis = histogramIntervalMillis > targetFrequencyMillis ? histogramIntervalMillis
: (targetFrequencyMillis / histogramIntervalMillis) * histogramIntervalMillis;
@ -566,8 +576,8 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
private TimeValue queryDelay;
private TimeValue frequency;
private List<String> indices = Collections.emptyList();
private Map<String, Object> query = Collections.singletonMap(MatchAllQueryBuilder.NAME, Collections.emptyMap());
private Map<String, Object> aggregations;
private QueryProvider queryProvider = QueryProvider.defaultQuery();
private AggProvider aggProvider;
private List<SearchSourceBuilder.ScriptField> scriptFields;
private Integer scrollSize = DEFAULT_SCROLL_SIZE;
private ChunkingConfig chunkingConfig;
@ -588,8 +598,8 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
this.queryDelay = config.queryDelay;
this.frequency = config.frequency;
this.indices = new ArrayList<>(config.indices);
this.query = config.query == null ? null : new LinkedHashMap<>(config.query);
this.aggregations = config.aggregations == null ? null : new LinkedHashMap<>(config.aggregations);
this.queryProvider = config.queryProvider == null ? null : new QueryProvider(config.queryProvider);
this.aggProvider = config.aggProvider == null ? null : new AggProvider(config.aggProvider);
this.scriptFields = config.scriptFields == null ? null : new ArrayList<>(config.scriptFields);
this.scrollSize = config.scrollSize;
this.chunkingConfig = config.chunkingConfig;
@ -627,74 +637,39 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
this.frequency = frequency;
}
public void setQuery(Map<String, Object> query) {
setQuery(query, true);
public void setQueryProvider(QueryProvider queryProvider) {
this.queryProvider = ExceptionsHelper.requireNonNull(queryProvider, QUERY.getPreferredName());
}
public void setQuery(Map<String, Object> query, boolean lenient) {
this.query = ExceptionsHelper.requireNonNull(query, QUERY.getPreferredName());
// For testing only
public void setParsedQuery(QueryBuilder queryBuilder) {
try {
QUERY_TRANSFORMER.fromMap(query);
} catch(Exception ex) {
String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_QUERY_BAD_FORMAT, id);
if (ex.getCause() instanceof IllegalArgumentException) {
ex = (Exception)ex.getCause();
}
if (lenient) {
logger.warn(msg, ex);
} else {
throw ExceptionsHelper.badRequestException(msg, ex);
}
this.queryProvider = ExceptionsHelper.requireNonNull(QueryProvider.fromParsedQuery(queryBuilder), QUERY.getPreferredName());
} catch (IOException exception) {
// eat exception as it should never happen
logger.error("Exception trying to setParsedQuery", exception);
}
}
// Kept for easier testing
// For testing only
public void setParsedAggregations(AggregatorFactories.Builder aggregations) {
try {
setAggregations(AGG_TRANSFORMER.toMap(aggregations));
} catch (Exception exception) {
// Certain thrown exceptions wrap up the real Illegal argument making it hard to determine cause for the user
if (exception.getCause() instanceof IllegalArgumentException) {
exception = (Exception)exception.getCause();
}
throw ExceptionsHelper.badRequestException(
Messages.getMessage(Messages.DATAFEED_CONFIG_AGG_BAD_FORMAT, id), exception);
this.aggProvider = AggProvider.fromParsedAggs(aggregations);
} catch (IOException exception) {
// eat exception as it should never happen
logger.error("Exception trying to setParsedAggregations", exception);
}
}
private void setAggregationsSafe(Map<String, Object> aggregations, boolean lenient) {
if (this.aggregations != null) {
private void setAggregationsSafe(AggProvider aggProvider) {
if (this.aggProvider != null) {
throw ExceptionsHelper.badRequestException("Found two aggregation definitions: [aggs] and [aggregations]");
}
setAggregations(aggregations, lenient);
this.aggProvider = aggProvider;
}
void setAggregations(Map<String, Object> aggregations) {
setAggregations(aggregations, true);
}
void setAggregations(Map<String, Object> aggregations, boolean lenient) {
this.aggregations = aggregations;
try {
if (aggregations != null && aggregations.isEmpty()) {
throw new Exception("[aggregations] are empty");
}
AGG_TRANSFORMER.fromMap(aggregations);
} catch (Exception ex) {
String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_AGG_BAD_FORMAT, id);
if (ex.getCause() instanceof IllegalArgumentException) {
ex = (Exception)ex.getCause();
}
if (lenient) {
logger.warn(msg, ex);
} else {
throw ExceptionsHelper.badRequestException(msg, ex);
}
}
public void setAggProvider(AggProvider aggProvider) {
this.aggProvider = aggProvider;
}
public void setScriptFields(List<SearchSourceBuilder.ScriptField> scriptFields) {
@ -737,12 +712,12 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
setDefaultChunkingConfig();
setDefaultQueryDelay();
return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, query, aggregations, scriptFields, scrollSize,
return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, queryProvider, aggProvider, scriptFields, scrollSize,
chunkingConfig, headers, delayedDataCheckConfig);
}
void validateScriptFields() {
if (aggregations == null) {
if (aggProvider == null) {
return;
}
if (scriptFields != null && !scriptFields.isEmpty()) {
@ -788,11 +763,13 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
private void setDefaultChunkingConfig() {
if (chunkingConfig == null) {
if (aggregations == null) {
if (aggProvider == null || aggProvider.getParsedAggs() == null) {
chunkingConfig = ChunkingConfig.newAuto();
} else {
long histogramIntervalMillis =
ExtractorUtils.getHistogramIntervalMillis(lazyAggParser.apply(aggregations, id, new ArrayList<>()));
long histogramIntervalMillis = ExtractorUtils.getHistogramIntervalMillis(aggProvider.getParsedAggs());
if (histogramIntervalMillis <= 0) {
throw ExceptionsHelper.badRequestException(Messages.DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO);
}
chunkingConfig = ChunkingConfig.newManual(TimeValue.timeValueMillis(
DEFAULT_AGGREGATION_CHUNKING_BUCKETS * histogramIntervalMillis));
}

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.datafeed;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
@ -21,15 +22,15 @@ public final class DatafeedJobValidator {
* @param datafeedConfig the datafeed config
* @param job the job
*/
public static void validate(DatafeedConfig datafeedConfig, Job job) {
public static void validate(DatafeedConfig datafeedConfig, Job job, NamedXContentRegistry xContentRegistry) {
AnalysisConfig analysisConfig = job.getAnalysisConfig();
if (analysisConfig.getLatency() != null && analysisConfig.getLatency().seconds() > 0) {
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.DATAFEED_DOES_NOT_SUPPORT_JOB_WITH_LATENCY));
}
if (datafeedConfig.hasAggregations()) {
checkSummaryCountFieldNameIsSet(analysisConfig);
checkValidHistogramInterval(datafeedConfig, analysisConfig);
checkFrequencyIsMultipleOfHistogramInterval(datafeedConfig);
checkValidHistogramInterval(datafeedConfig, analysisConfig, xContentRegistry);
checkFrequencyIsMultipleOfHistogramInterval(datafeedConfig, xContentRegistry);
}
DelayedDataCheckConfig delayedDataCheckConfig = datafeedConfig.getDelayedDataCheckConfig();
@ -64,8 +65,10 @@ public final class DatafeedJobValidator {
}
}
private static void checkValidHistogramInterval(DatafeedConfig datafeedConfig, AnalysisConfig analysisConfig) {
long histogramIntervalMillis = datafeedConfig.getHistogramIntervalMillis();
private static void checkValidHistogramInterval(DatafeedConfig datafeedConfig,
AnalysisConfig analysisConfig,
NamedXContentRegistry xContentRegistry) {
long histogramIntervalMillis = datafeedConfig.getHistogramIntervalMillis(xContentRegistry);
long bucketSpanMillis = analysisConfig.getBucketSpan().millis();
if (histogramIntervalMillis > bucketSpanMillis) {
throw ExceptionsHelper.badRequestException(Messages.getMessage(
@ -82,10 +85,10 @@ public final class DatafeedJobValidator {
}
}
private static void checkFrequencyIsMultipleOfHistogramInterval(DatafeedConfig datafeedConfig) {
private static void checkFrequencyIsMultipleOfHistogramInterval(DatafeedConfig datafeedConfig, NamedXContentRegistry xContentRegistry) {
TimeValue frequency = datafeedConfig.getFrequency();
if (frequency != null) {
long histogramIntervalMillis = datafeedConfig.getHistogramIntervalMillis();
long histogramIntervalMillis = datafeedConfig.getHistogramIntervalMillis(xContentRegistry);
long frequencyMillis = frequency.millis();
if (frequencyMillis % histogramIntervalMillis != 0) {
throw ExceptionsHelper.badRequestException(Messages.getMessage(

View File

@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -21,8 +22,8 @@ import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer;
import java.io.IOException;
import java.util.ArrayList;
@ -33,10 +34,6 @@ import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.AGG_TRANSFORMER;
import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.QUERY_TRANSFORMER;
import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.lazyAggParser;
import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.lazyQueryParser;
/**
* A datafeed update contains partial properties to update a {@link DatafeedConfig}.
@ -56,9 +53,13 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
TimeValue.parseTimeValue(val, DatafeedConfig.QUERY_DELAY.getPreferredName())), DatafeedConfig.QUERY_DELAY);
PARSER.declareString((builder, val) -> builder.setFrequency(
TimeValue.parseTimeValue(val, DatafeedConfig.FREQUENCY.getPreferredName())), DatafeedConfig.FREQUENCY);
PARSER.declareObject(Builder::setQuery, (p, c) -> p.mapOrdered(), DatafeedConfig.QUERY);
PARSER.declareObject(Builder::setAggregationsSafe, (p, c) -> p.mapOrdered(), DatafeedConfig.AGGREGATIONS);
PARSER.declareObject(Builder::setAggregationsSafe,(p, c) -> p.mapOrdered(), DatafeedConfig.AGGS);
PARSER.declareObject(Builder::setQuery, (p, c) -> QueryProvider.fromXContent(p, false), DatafeedConfig.QUERY);
PARSER.declareObject(Builder::setAggregationsSafe,
(p, c) -> AggProvider.fromXContent(p, false),
DatafeedConfig.AGGREGATIONS);
PARSER.declareObject(Builder::setAggregationsSafe,
(p, c) -> AggProvider.fromXContent(p, false),
DatafeedConfig.AGGS);
PARSER.declareObject(Builder::setScriptFields, (p, c) -> {
List<SearchSourceBuilder.ScriptField> parsedScriptFields = new ArrayList<>();
while (p.nextToken() != XContentParser.Token.END_OBJECT) {
@ -79,23 +80,24 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
private final TimeValue queryDelay;
private final TimeValue frequency;
private final List<String> indices;
private final Map<String, Object> query;
private final Map<String, Object> aggregations;
private final QueryProvider queryProvider;
private final AggProvider aggProvider;
private final List<SearchSourceBuilder.ScriptField> scriptFields;
private final Integer scrollSize;
private final ChunkingConfig chunkingConfig;
private final DelayedDataCheckConfig delayedDataCheckConfig;
private DatafeedUpdate(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indices,
Map<String, Object> query, Map<String, Object> aggregations, List<SearchSourceBuilder.ScriptField> scriptFields,
QueryProvider queryProvider, AggProvider aggProvider,
List<SearchSourceBuilder.ScriptField> scriptFields,
Integer scrollSize, ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) {
this.id = id;
this.jobId = jobId;
this.queryDelay = queryDelay;
this.frequency = frequency;
this.indices = indices;
this.query = query;
this.aggregations = aggregations;
this.queryProvider = queryProvider;
this.aggProvider = aggProvider;
this.scriptFields = scriptFields;
this.scrollSize = scrollSize;
this.chunkingConfig = chunkingConfig;
@ -118,16 +120,12 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
in.readStringList();
}
}
if (in.getVersion().before(Version.V_7_1_0)) {
this.query = QUERY_TRANSFORMER.toMap(in.readOptionalNamedWriteable(QueryBuilder.class));
this.aggregations = AGG_TRANSFORMER.toMap(in.readOptionalWriteable(AggregatorFactories.Builder::new));
if (in.getVersion().before(Version.V_7_0_0)) {
this.queryProvider = QueryProvider.fromParsedQuery(in.readOptionalNamedWriteable(QueryBuilder.class));
this.aggProvider = AggProvider.fromParsedAggs(in.readOptionalWriteable(AggregatorFactories.Builder::new));
} else {
this.query = in.readMap();
if (in.readBoolean()) {
this.aggregations = in.readMap();
} else {
this.aggregations = null;
}
this.queryProvider = in.readOptionalWriteable(QueryProvider::fromStream);
this.aggProvider = in.readOptionalWriteable(AggProvider::fromStream);
}
if (in.readBoolean()) {
this.scriptFields = in.readList(SearchSourceBuilder.ScriptField::new);
@ -168,15 +166,12 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
out.writeBoolean(true);
out.writeStringCollection(Collections.emptyList());
}
if (out.getVersion().before(Version.V_7_1_0)) {
out.writeOptionalNamedWriteable(lazyQueryParser.apply(query, id, new ArrayList<>()));
out.writeOptionalWriteable(lazyAggParser.apply(aggregations, id, new ArrayList<>()));
if (out.getVersion().before(Version.V_7_0_0)) {
out.writeOptionalNamedWriteable(queryProvider == null ? null : queryProvider.getParsedQuery());
out.writeOptionalWriteable(aggProvider == null ? null : aggProvider.getParsedAggs());
} else {
out.writeMap(query);
out.writeBoolean(aggregations != null);
if (aggregations != null) {
out.writeMap(aggregations);
}
out.writeOptionalWriteable(queryProvider);
out.writeOptionalWriteable(aggProvider);
}
if (scriptFields != null) {
out.writeBoolean(true);
@ -203,8 +198,12 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
builder.field(DatafeedConfig.FREQUENCY.getPreferredName(), frequency.getStringRep());
}
addOptionalField(builder, DatafeedConfig.INDICES, indices);
addOptionalField(builder, DatafeedConfig.QUERY, query);
addOptionalField(builder, DatafeedConfig.AGGREGATIONS, aggregations);
if (queryProvider != null) {
builder.field(DatafeedConfig.QUERY.getPreferredName(), queryProvider.getQuery());
}
if (aggProvider != null) {
builder.field(DatafeedConfig.AGGREGATIONS.getPreferredName(), aggProvider.getAggs());
}
if (scriptFields != null) {
builder.startObject(DatafeedConfig.SCRIPT_FIELDS.getPreferredName());
for (SearchSourceBuilder.ScriptField scriptField : scriptFields) {
@ -246,11 +245,21 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
}
Map<String, Object> getQuery() {
return query;
return queryProvider == null ? null : queryProvider.getQuery();
}
QueryBuilder getParsedQuery(NamedXContentRegistry namedXContentRegistry) throws IOException {
return XContentObjectTransformer.queryBuilderTransformer(namedXContentRegistry).fromMap(queryProvider.getQuery(),
new ArrayList<>());
}
Map<String, Object> getAggregations() {
return aggregations;
return aggProvider == null ? null : aggProvider.getAggs();
}
AggregatorFactories.Builder getParsedAgg(NamedXContentRegistry namedXContentRegistry) throws IOException {
return XContentObjectTransformer.aggregatorTransformer(namedXContentRegistry).fromMap(aggProvider.getAggs(),
new ArrayList<>());
}
/**
@ -258,7 +267,7 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
* otherwise
*/
boolean hasAggregations() {
return aggregations != null && aggregations.size() > 0;
return getAggregations() != null && getAggregations().size() > 0;
}
List<SearchSourceBuilder.ScriptField> getScriptFields() {
@ -295,12 +304,12 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
if (indices != null) {
builder.setIndices(indices);
}
if (query != null) {
builder.setQuery(query);
if (queryProvider != null) {
builder.setQueryProvider(queryProvider);
}
if (aggregations != null) {
DatafeedConfig.validateAggregations(lazyAggParser.apply(aggregations, id, new ArrayList<>()));
builder.setAggregations(aggregations);
if (aggProvider != null) {
DatafeedConfig.validateAggregations(aggProvider.getParsedAggs());
builder.setAggProvider(aggProvider);
}
if (scriptFields != null) {
builder.setScriptFields(scriptFields);
@ -348,9 +357,9 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
&& Objects.equals(this.frequency, that.frequency)
&& Objects.equals(this.queryDelay, that.queryDelay)
&& Objects.equals(this.indices, that.indices)
&& Objects.equals(this.query, that.query)
&& Objects.equals(this.queryProvider, that.queryProvider)
&& Objects.equals(this.scrollSize, that.scrollSize)
&& Objects.equals(this.aggregations, that.aggregations)
&& Objects.equals(this.aggProvider, that.aggProvider)
&& Objects.equals(this.delayedDataCheckConfig, that.delayedDataCheckConfig)
&& Objects.equals(this.scriptFields, that.scriptFields)
&& Objects.equals(this.chunkingConfig, that.chunkingConfig);
@ -358,7 +367,7 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
@Override
public int hashCode() {
return Objects.hash(id, jobId, frequency, queryDelay, indices, query, scrollSize, aggregations, scriptFields, chunkingConfig,
return Objects.hash(id, jobId, frequency, queryDelay, indices, queryProvider, scrollSize, aggProvider, scriptFields, chunkingConfig,
delayedDataCheckConfig);
}
@ -371,9 +380,9 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
return (frequency == null || Objects.equals(frequency, datafeed.getFrequency()))
&& (queryDelay == null || Objects.equals(queryDelay, datafeed.getQueryDelay()))
&& (indices == null || Objects.equals(indices, datafeed.getIndices()))
&& (query == null || Objects.equals(query, datafeed.getQuery()))
&& (queryProvider == null || Objects.equals(queryProvider.getQuery(), datafeed.getQuery()))
&& (scrollSize == null || Objects.equals(scrollSize, datafeed.getQueryDelay()))
&& (aggregations == null || Objects.equals(aggregations, datafeed.getAggregations()))
&& (aggProvider == null || Objects.equals(aggProvider.getAggs(), datafeed.getAggregations()))
&& (scriptFields == null || Objects.equals(scriptFields, datafeed.getScriptFields()))
&& (delayedDataCheckConfig == null || Objects.equals(delayedDataCheckConfig, datafeed.getDelayedDataCheckConfig()))
&& (chunkingConfig == null || Objects.equals(chunkingConfig, datafeed.getChunkingConfig()));
@ -386,8 +395,8 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
private TimeValue queryDelay;
private TimeValue frequency;
private List<String> indices;
private Map<String, Object> query;
private Map<String, Object> aggregations;
private QueryProvider queryProvider;
private AggProvider aggProvider;
private List<SearchSourceBuilder.ScriptField> scriptFields;
private Integer scrollSize;
private ChunkingConfig chunkingConfig;
@ -406,8 +415,8 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
this.queryDelay = config.queryDelay;
this.frequency = config.frequency;
this.indices = config.indices;
this.query = config.query;
this.aggregations = config.aggregations;
this.queryProvider = config.queryProvider;
this.aggProvider = config.aggProvider;
this.scriptFields = config.scriptFields;
this.scrollSize = config.scrollSize;
this.chunkingConfig = config.chunkingConfig;
@ -434,42 +443,19 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
this.frequency = frequency;
}
public void setQuery(Map<String, Object> query) {
this.query = query;
try {
QUERY_TRANSFORMER.fromMap(query);
} catch(Exception ex) {
String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_QUERY_BAD_FORMAT, id);
if (ex.getCause() instanceof IllegalArgumentException) {
ex = (Exception)ex.getCause();
}
throw ExceptionsHelper.badRequestException(msg, ex);
}
public void setQuery(QueryProvider queryProvider) {
this.queryProvider = queryProvider;
}
private void setAggregationsSafe(Map<String, Object> aggregations) {
if (this.aggregations != null) {
private void setAggregationsSafe(AggProvider aggProvider) {
if (this.aggProvider != null) {
throw ExceptionsHelper.badRequestException("Found two aggregation definitions: [aggs] and [aggregations]");
}
setAggregations(aggregations);
setAggregations(aggProvider);
}
public void setAggregations(Map<String, Object> aggregations) {
this.aggregations = aggregations;
try {
if (aggregations != null && aggregations.isEmpty()) {
throw new Exception("[aggregations] are empty");
}
AGG_TRANSFORMER.fromMap(aggregations);
} catch(Exception ex) {
String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_AGG_BAD_FORMAT, id);
if (ex.getCause() instanceof IllegalArgumentException) {
ex = (Exception)ex.getCause();
}
throw ExceptionsHelper.badRequestException(msg, ex);
}
public void setAggregations(AggProvider aggProvider) {
this.aggProvider = aggProvider;
}
public void setScriptFields(List<SearchSourceBuilder.ScriptField> scriptFields) {
@ -491,7 +477,7 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
}
public DatafeedUpdate build() {
return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, query, aggregations, scriptFields, scrollSize,
return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, queryProvider, aggProvider, scriptFields, scrollSize,
chunkingConfig, delayedDataCheckConfig);
}
}

View File

@ -0,0 +1,162 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.datafeed;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
class QueryProvider implements Writeable, ToXContentObject {
private static final Logger logger = LogManager.getLogger(AggProvider.class);
private Exception parsingException;
private QueryBuilder parsedQuery;
private Map<String, Object> query;
static QueryProvider defaultQuery() {
return new QueryProvider(
Collections.singletonMap(MatchAllQueryBuilder.NAME, Collections.emptyMap()),
QueryBuilders.matchAllQuery(),
null);
}
static QueryProvider fromXContent(XContentParser parser, boolean lenient) throws IOException {
Map<String, Object> query = parser.mapOrdered();
QueryBuilder parsedQuery = null;
Exception exception = null;
try {
parsedQuery = XContentObjectTransformer.queryBuilderTransformer(parser.getXContentRegistry()).fromMap(query);
} catch(Exception ex) {
if (ex.getCause() instanceof IllegalArgumentException) {
ex = (Exception)ex.getCause();
}
exception = ex;
if (lenient) {
logger.warn(Messages.DATAFEED_CONFIG_QUERY_BAD_FORMAT, ex);
} else {
throw ExceptionsHelper.badRequestException(Messages.DATAFEED_CONFIG_QUERY_BAD_FORMAT, ex);
}
}
return new QueryProvider(query, parsedQuery, exception);
}
static QueryProvider fromParsedQuery(QueryBuilder parsedQuery) throws IOException {
return parsedQuery == null ?
null :
new QueryProvider(
XContentObjectTransformer.queryBuilderTransformer(NamedXContentRegistry.EMPTY).toMap(parsedQuery),
parsedQuery,
null);
}
static QueryProvider fromStream(StreamInput in) throws IOException {
if (in.getVersion().onOrAfter(Version.V_6_7_0)) { // Has our bug fix for query/agg providers
return new QueryProvider(in.readMap(), in.readOptionalNamedWriteable(QueryBuilder.class), in.readException());
} else if (in.getVersion().onOrAfter(Version.V_6_6_0)) { // Has the bug, but supports lazy objects
return new QueryProvider(in.readMap(), null, null);
} else { // only supports eagerly parsed objects
return QueryProvider.fromParsedQuery(in.readNamedWriteable(QueryBuilder.class));
}
}
QueryProvider(Map<String, Object> query, QueryBuilder parsedQuery, Exception parsingException) {
this.query = Collections.unmodifiableMap(new LinkedHashMap<>(Objects.requireNonNull(query, "[query] must not be null")));
this.parsedQuery = parsedQuery;
this.parsingException = parsingException;
}
QueryProvider(QueryProvider other) {
this(other.query, other.parsedQuery, other.parsingException);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (out.getVersion().onOrAfter(Version.V_6_7_0)) { // Has our bug fix for query/agg providers
out.writeMap(query);
out.writeOptionalNamedWriteable(parsedQuery);
out.writeException(parsingException);
} else if (out.getVersion().onOrAfter(Version.V_6_6_0)) { // Has the bug, but supports lazy objects
// We allow the lazy parsing nodes that have the bug throw any parsing errors themselves as
// they already have the ability to fully parse the passed Maps
out.writeMap(query);
} else { // only supports eagerly parsed objects
if (parsingException != null) { // Do we have a parsing error? Throw it
if (parsingException instanceof IOException) {
throw (IOException) parsingException;
} else {
throw new ElasticsearchException(parsingException);
}
} else if (parsedQuery == null) { // Do we have a query defined but not parsed?
// This is an admittedly rare case but we should fail early instead of writing null when there
// actually is a query defined
throw new ElasticsearchException("Unsupported operation: parsed query is null");
}
out.writeNamedWriteable(parsedQuery);
}
}
public Exception getParsingException() {
return parsingException;
}
public QueryBuilder getParsedQuery() {
return parsedQuery;
}
public Map<String, Object> getQuery() {
return query;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
QueryProvider that = (QueryProvider) other;
return Objects.equals(this.query, that.query)
&& Objects.equals(this.parsedQuery, that.parsedQuery)
&& Objects.equals(this.parsingException, that.parsingException);
}
@Override
public int hashCode() {
return Objects.hash(query, parsedQuery, parsingException);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.map(query);
return builder;
}
}

View File

@ -26,8 +26,8 @@ public final class Messages {
"delayed_data_check_config: check_window [{0}] must be greater than the bucket_span [{1}]";
public static final String DATAFEED_CONFIG_DELAYED_DATA_CHECK_SPANS_TOO_MANY_BUCKETS =
"delayed_data_check_config: check_window [{0}] must be less than 10,000x the bucket_span [{1}]";
public static final String DATAFEED_CONFIG_QUERY_BAD_FORMAT = "Datafeed [{0}] query is not parsable";
public static final String DATAFEED_CONFIG_AGG_BAD_FORMAT = "Datafeed [{0}] aggregations are not parsable";
public static final String DATAFEED_CONFIG_QUERY_BAD_FORMAT = "Datafeed query is not parsable";
public static final String DATAFEED_CONFIG_AGG_BAD_FORMAT = "Datafeed aggregations are not parsable";
public static final String DATAFEED_DOES_NOT_SUPPORT_JOB_WITH_LATENCY = "A job configured with datafeed cannot support latency";
public static final String DATAFEED_NOT_FOUND = "No datafeed with id [{0}] exists";

View File

@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.utils;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -18,12 +17,10 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@ -38,15 +35,8 @@ public class XContentObjectTransformer<T extends ToXContentObject> {
private final NamedXContentRegistry registry;
private final CheckedFunction<XContentParser, T, IOException> parserFunction;
// We need this registry for parsing out Aggregations and Searches
private static NamedXContentRegistry searchRegistry;
static {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
searchRegistry = new NamedXContentRegistry(searchModule.getNamedXContents());
}
public static XContentObjectTransformer<AggregatorFactories.Builder> aggregatorTransformer() {
return new XContentObjectTransformer<>(searchRegistry, (p) -> {
public static XContentObjectTransformer<AggregatorFactories.Builder> aggregatorTransformer(NamedXContentRegistry registry) {
return new XContentObjectTransformer<>(registry, (p) -> {
// Serializing a map creates an object, need to skip the start object for the aggregation parser
XContentParser.Token token = p.nextToken();
assert(XContentParser.Token.START_OBJECT.equals(token));
@ -54,8 +44,8 @@ public class XContentObjectTransformer<T extends ToXContentObject> {
});
}
public static XContentObjectTransformer<QueryBuilder> queryBuilderTransformer() {
return new XContentObjectTransformer<>(searchRegistry, AbstractQueryBuilder::parseInnerQueryBuilder);
public static XContentObjectTransformer<QueryBuilder> queryBuilderTransformer(NamedXContentRegistry registry) {
return new XContentObjectTransformer<>(registry, AbstractQueryBuilder::parseInnerQueryBuilder);
}
XContentObjectTransformer(NamedXContentRegistry registry, CheckedFunction<XContentParser, T, IOException> parserFunction) {

View File

@ -16,6 +16,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.test.AbstractStreamableTestCase;
@ -28,10 +29,12 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.core.IsEqual.equalTo;
@ -91,9 +94,9 @@ public class DeprecationInfoActionResponseTests extends AbstractStreamableTestCa
Collections.unmodifiableList(Arrays.asList(
(idx) -> indexIssueFound ? foundIssue : null
));
List<Function<DatafeedConfig, DeprecationIssue>> mlSettingsChecks =
List<BiFunction<DatafeedConfig, NamedXContentRegistry, DeprecationIssue>> mlSettingsChecks =
Collections.unmodifiableList(Arrays.asList(
(idx) -> mlIssueFound ? foundIssue : null
(idx, unused) -> mlIssueFound ? foundIssue : null
));
NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse(
@ -101,10 +104,10 @@ public class DeprecationInfoActionResponseTests extends AbstractStreamableTestCa
nodeIssueFound
? Collections.singletonList(
new NodesDeprecationCheckAction.NodeResponse(discoveryNode, Collections.singletonList(foundIssue)))
: Collections.emptyList(),
Collections.emptyList());
: emptyList(),
emptyList());
DeprecationInfoAction.Response response = DeprecationInfoAction.Response.from(state,
DeprecationInfoAction.Response response = DeprecationInfoAction.Response.from(state, NamedXContentRegistry.EMPTY,
resolver, Strings.EMPTY_ARRAY, indicesOptions, datafeeds,
nodeDeprecationIssues, indexSettingsChecks, clusterSettingsChecks, mlSettingsChecks);

View File

@ -0,0 +1,179 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.datafeed;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class AggProviderTests extends AbstractSerializingTestCase<AggProvider> {
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
@Override
protected NamedWriteableRegistry writableRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedWriteableRegistry(searchModule.getNamedWriteables());
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return writableRegistry();
}
@Override
protected AggProvider createTestInstance() {
return createRandomValidAggProvider();
}
@Override
protected Writeable.Reader<AggProvider> instanceReader() {
return AggProvider::fromStream;
}
@Override
protected AggProvider doParseInstance(XContentParser parser) throws IOException {
return AggProvider.fromXContent(parser, false);
}
public static AggProvider createRandomValidAggProvider() {
return createRandomValidAggProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10));
}
public static AggProvider createRandomValidAggProvider(String name, String field) {
Map<String, Object> agg = Collections.singletonMap(name,
Collections.singletonMap("avg", Collections.singletonMap("field", field)));
try {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
AggregatorFactories.Builder aggs =
XContentObjectTransformer.aggregatorTransformer(new NamedXContentRegistry(searchModule.getNamedXContents()))
.fromMap(agg);
return new AggProvider(agg, aggs, null);
} catch (IOException ex) {
fail(ex.getMessage());
}
return null;
}
public void testEmptyAggMap() throws IOException {
XContentParser parser = XContentFactory.xContent(XContentType.JSON)
.createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, "{}");
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
() -> AggProvider.fromXContent(parser, false));
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(e.getMessage(), equalTo("Datafeed aggregations are not parsable"));
}
public void testSerializationBetweenBugVersion() throws IOException {
AggProvider tempAggProvider = createRandomValidAggProvider();
AggProvider aggProviderWithEx = new AggProvider(tempAggProvider.getAggs(), tempAggProvider.getParsedAggs(), new IOException("ex"));
try (BytesStreamOutput output = new BytesStreamOutput()) {
output.setVersion(Version.V_6_6_2);
aggProviderWithEx.writeTo(output);
try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry())) {
in.setVersion(Version.V_6_6_2);
AggProvider streamedAggProvider = AggProvider.fromStream(in);
assertThat(streamedAggProvider.getAggs(), equalTo(aggProviderWithEx.getAggs()));
assertThat(streamedAggProvider.getParsingException(), is(nullValue()));
AggregatorFactories.Builder streamedParsedAggs = XContentObjectTransformer.aggregatorTransformer(xContentRegistry())
.fromMap(streamedAggProvider.getAggs());
assertThat(streamedParsedAggs, equalTo(aggProviderWithEx.getParsedAggs()));
assertThat(streamedAggProvider.getParsedAggs(), is(nullValue()));
}
}
}
public void testSerializationBetweenEagerVersion() throws IOException {
AggProvider validAggProvider = createRandomValidAggProvider();
try (BytesStreamOutput output = new BytesStreamOutput()) {
output.setVersion(Version.V_6_0_0);
validAggProvider.writeTo(output);
try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry())) {
in.setVersion(Version.V_6_0_0);
AggProvider streamedAggProvider = AggProvider.fromStream(in);
assertThat(streamedAggProvider.getAggs(), equalTo(validAggProvider.getAggs()));
assertThat(streamedAggProvider.getParsingException(), is(nullValue()));
assertThat(streamedAggProvider.getParsedAggs(), equalTo(validAggProvider.getParsedAggs()));
}
}
try (BytesStreamOutput output = new BytesStreamOutput()) {
AggProvider aggProviderWithEx = new AggProvider(validAggProvider.getAggs(),
validAggProvider.getParsedAggs(),
new IOException("bad parsing"));
output.setVersion(Version.V_6_0_0);
IOException ex = expectThrows(IOException.class, () -> aggProviderWithEx.writeTo(output));
assertThat(ex.getMessage(), equalTo("bad parsing"));
}
try (BytesStreamOutput output = new BytesStreamOutput()) {
AggProvider aggProviderWithEx = new AggProvider(validAggProvider.getAggs(),
validAggProvider.getParsedAggs(),
new ElasticsearchException("bad parsing"));
output.setVersion(Version.V_6_0_0);
ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> aggProviderWithEx.writeTo(output));
assertNotNull(ex.getCause());
assertThat(ex.getCause().getMessage(), equalTo("bad parsing"));
}
try (BytesStreamOutput output = new BytesStreamOutput()) {
AggProvider aggProviderWithOutParsed = new AggProvider(validAggProvider.getAggs(), null, null);
output.setVersion(Version.V_6_0_0);
ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> aggProviderWithOutParsed.writeTo(output));
assertThat(ex.getMessage(), equalTo("Unsupported operation: parsed aggregations are null"));
}
}
@Override
protected AggProvider mutateInstance(AggProvider instance) throws IOException {
Exception parsingException = instance.getParsingException();
AggregatorFactories.Builder parsedAggs = instance.getParsedAggs();
switch (between(0, 1)) {
case 0:
parsingException = parsingException == null ? new IOException("failed parsing") : null;
break;
case 1:
parsedAggs = parsedAggs == null ?
XContentObjectTransformer.aggregatorTransformer(xContentRegistry()).fromMap(instance.getAggs()) :
null;
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return new AggProvider(instance.getAggs(), parsedAggs, parsingException);
}
}

View File

@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.datafeed;
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
@ -27,8 +26,8 @@ import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregationBuilders;
@ -58,19 +57,16 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.QUERY_TRANSFORMER;
import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig.lazyQueryParser;
import static org.elasticsearch.xpack.core.ml.datafeed.QueryProviderTests.createRandomValidQueryProvider;
import static org.elasticsearch.xpack.core.ml.job.messages.Messages.DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.not;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedConfig> {
@ -91,8 +87,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
DatafeedConfig.Builder builder = new DatafeedConfig.Builder(randomValidDatafeedId(), jobId);
builder.setIndices(randomStringList(1, 10));
if (randomBoolean()) {
builder.setQuery(Collections.singletonMap(TermQueryBuilder.NAME,
Collections.singletonMap(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))));
builder.setQueryProvider(createRandomValidQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
}
boolean addScriptFields = randomBoolean();
if (addScriptFields) {
@ -254,7 +249,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
public void testFutureConfigParse() throws IOException {
XContentParser parser = XContentFactory.xContent(XContentType.JSON)
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, FUTURE_DATAFEED);
.createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, FUTURE_DATAFEED);
XContentParseException e = expectThrows(XContentParseException.class,
() -> DatafeedConfig.STRICT_PARSER.apply(parser, null).build());
assertEquals("[6:5] [datafeed_config] unknown field [tomorrows_technology_today], parser not found", e.getMessage());
@ -262,16 +257,15 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
public void testPastQueryConfigParse() throws IOException {
try(XContentParser parser = XContentFactory.xContent(XContentType.JSON)
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, ANACHRONISTIC_QUERY_DATAFEED)) {
.createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, ANACHRONISTIC_QUERY_DATAFEED)) {
DatafeedConfig config = DatafeedConfig.LENIENT_PARSER.apply(parser, null).build();
ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> config.getParsedQuery());
assertNotNull(e.getCause());
assertEquals("[match] query doesn't support multiple fields, found [query] and [type]", e.getCause().getMessage());
assertThat(config.getQueryParsingException().getMessage(),
equalTo("[match] query doesn't support multiple fields, found [query] and [type]"));
}
try(XContentParser parser = XContentFactory.xContent(XContentType.JSON)
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, ANACHRONISTIC_QUERY_DATAFEED)) {
.createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, ANACHRONISTIC_QUERY_DATAFEED)) {
XContentParseException e = expectThrows(XContentParseException.class,
() -> DatafeedConfig.STRICT_PARSER.apply(parser, null).build());
@ -281,18 +275,15 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
public void testPastAggConfigParse() throws IOException {
try(XContentParser parser = XContentFactory.xContent(XContentType.JSON)
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, ANACHRONISTIC_AGG_DATAFEED)) {
.createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, ANACHRONISTIC_AGG_DATAFEED)) {
DatafeedConfig.Builder configBuilder = DatafeedConfig.LENIENT_PARSER.apply(parser, null);
ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> configBuilder.build());
assertNotNull(e.getCause());
assertEquals(
"[size] must be greater than 0. Found [0] in [airline]",
e.getCause().getMessage());
DatafeedConfig datafeedConfig = DatafeedConfig.LENIENT_PARSER.apply(parser, null).build();
assertThat(datafeedConfig.getAggParsingException().getMessage(),
equalTo("[size] must be greater than 0. Found [0] in [airline]"));
}
try(XContentParser parser = XContentFactory.xContent(XContentType.JSON)
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, ANACHRONISTIC_AGG_DATAFEED)) {
.createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, ANACHRONISTIC_AGG_DATAFEED)) {
XContentParseException e = expectThrows(XContentParseException.class,
() -> DatafeedConfig.STRICT_PARSER.apply(parser, null).build());
@ -302,14 +293,14 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
public void testFutureMetadataParse() throws IOException {
XContentParser parser = XContentFactory.xContent(XContentType.JSON)
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, FUTURE_DATAFEED);
.createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, FUTURE_DATAFEED);
// Unlike the config version of this test, the metadata parser should tolerate the unknown future field
assertNotNull(DatafeedConfig.LENIENT_PARSER.apply(parser, null).build());
}
public void testMultipleDefinedAggParse() throws IOException {
try(XContentParser parser = XContentFactory.xContent(XContentType.JSON)
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, MULTIPLE_AGG_DEF_DATAFEED)) {
.createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, MULTIPLE_AGG_DEF_DATAFEED)) {
XContentParseException ex = expectThrows(XContentParseException.class,
() -> DatafeedConfig.LENIENT_PARSER.apply(parser, null));
assertThat(ex.getMessage(), equalTo("[32:3] [datafeed_config] failed to parse field [aggs]"));
@ -317,7 +308,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
assertThat(ex.getCause().getMessage(), equalTo("Found two aggregation definitions: [aggs] and [aggregations]"));
}
try(XContentParser parser = XContentFactory.xContent(XContentType.JSON)
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, MULTIPLE_AGG_DEF_DATAFEED)) {
.createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, MULTIPLE_AGG_DEF_DATAFEED)) {
XContentParseException ex = expectThrows(XContentParseException.class,
() -> DatafeedConfig.STRICT_PARSER.apply(parser, null));
assertThat(ex.getMessage(), equalTo("[32:3] [datafeed_config] failed to parse field [aggs]"));
@ -502,8 +493,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
ElasticsearchException e = expectThrows(ElasticsearchException.class, builder::build);
assertNotNull(e.getCause());
assertThat(e.getCause().getMessage(), containsString("[interval] must be >0 for histogram aggregation [time]"));
assertThat(e.getMessage(), containsString(DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO));
}
public void testBuild_GivenDateHistogramWithInvalidTimeZone() {
@ -526,16 +516,16 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
public void testBuild_GivenValidDateHistogram() {
long millisInDay = 24 * 3600000L;
assertThat(createDatafeedWithDateHistogram("1s").getHistogramIntervalMillis(), equalTo(1000L));
assertThat(createDatafeedWithDateHistogram("2s").getHistogramIntervalMillis(), equalTo(2000L));
assertThat(createDatafeedWithDateHistogram("1m").getHistogramIntervalMillis(), equalTo(60000L));
assertThat(createDatafeedWithDateHistogram("2m").getHistogramIntervalMillis(), equalTo(120000L));
assertThat(createDatafeedWithDateHistogram("1h").getHistogramIntervalMillis(), equalTo(3600000L));
assertThat(createDatafeedWithDateHistogram("2h").getHistogramIntervalMillis(), equalTo(7200000L));
assertThat(createDatafeedWithDateHistogram("1d").getHistogramIntervalMillis(), equalTo(millisInDay));
assertThat(createDatafeedWithDateHistogram("7d").getHistogramIntervalMillis(), equalTo(7 * millisInDay));
assertThat(createDatafeedWithDateHistogram("1s").getHistogramIntervalMillis(xContentRegistry()), equalTo(1000L));
assertThat(createDatafeedWithDateHistogram("2s").getHistogramIntervalMillis(xContentRegistry()), equalTo(2000L));
assertThat(createDatafeedWithDateHistogram("1m").getHistogramIntervalMillis(xContentRegistry()), equalTo(60000L));
assertThat(createDatafeedWithDateHistogram("2m").getHistogramIntervalMillis(xContentRegistry()), equalTo(120000L));
assertThat(createDatafeedWithDateHistogram("1h").getHistogramIntervalMillis(xContentRegistry()), equalTo(3600000L));
assertThat(createDatafeedWithDateHistogram("2h").getHistogramIntervalMillis(xContentRegistry()), equalTo(7200000L));
assertThat(createDatafeedWithDateHistogram("1d").getHistogramIntervalMillis(xContentRegistry()), equalTo(millisInDay));
assertThat(createDatafeedWithDateHistogram("7d").getHistogramIntervalMillis(xContentRegistry()), equalTo(7 * millisInDay));
assertThat(createDatafeedWithDateHistogram(7 * millisInDay + 1).getHistogramIntervalMillis(),
assertThat(createDatafeedWithDateHistogram(7 * millisInDay + 1).getHistogramIntervalMillis(xContentRegistry()),
equalTo(7 * millisInDay + 1));
}
@ -589,7 +579,8 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
public void testDefaultFrequency_GivenNegative() {
DatafeedConfig datafeed = createTestInstance();
ESTestCase.expectThrows(IllegalArgumentException.class, () -> datafeed.defaultFrequency(TimeValue.timeValueSeconds(-1)));
ESTestCase.expectThrows(IllegalArgumentException.class,
() -> datafeed.defaultFrequency(TimeValue.timeValueSeconds(-1), xContentRegistry()));
}
public void testDefaultFrequency_GivenNoAggregations() {
@ -597,106 +588,79 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
datafeedBuilder.setIndices(Collections.singletonList("my_index"));
DatafeedConfig datafeed = datafeedBuilder.build();
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(1)));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(30)));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(60)));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(90)));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(120)));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(121)));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(1), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(30), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(60), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(90), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(120), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(121), xContentRegistry()));
assertEquals(TimeValue.timeValueSeconds(61), datafeed.defaultFrequency(TimeValue.timeValueSeconds(122)));
assertEquals(TimeValue.timeValueSeconds(75), datafeed.defaultFrequency(TimeValue.timeValueSeconds(150)));
assertEquals(TimeValue.timeValueSeconds(150), datafeed.defaultFrequency(TimeValue.timeValueSeconds(300)));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueSeconds(1200)));
assertEquals(TimeValue.timeValueSeconds(61), datafeed.defaultFrequency(TimeValue.timeValueSeconds(122), xContentRegistry()));
assertEquals(TimeValue.timeValueSeconds(75), datafeed.defaultFrequency(TimeValue.timeValueSeconds(150), xContentRegistry()));
assertEquals(TimeValue.timeValueSeconds(150), datafeed.defaultFrequency(TimeValue.timeValueSeconds(300), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueSeconds(1200), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueSeconds(1201)));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueSeconds(1800)));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueHours(1)));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueHours(2)));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueHours(12)));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueSeconds(1201), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueSeconds(1800), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueHours(1), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueHours(2), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueHours(12), xContentRegistry()));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(12 * 3600 + 1)));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(13)));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(24)));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(48)));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(12 * 3600 + 1), xContentRegistry()));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(13), xContentRegistry()));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(24), xContentRegistry()));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(48), xContentRegistry()));
}
public void testDefaultFrequency_GivenAggregationsWithHistogramInterval_1_Second() {
DatafeedConfig datafeed = createDatafeedWithDateHistogram("1s");
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(60)));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(90)));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(120)));
assertEquals(TimeValue.timeValueSeconds(125), datafeed.defaultFrequency(TimeValue.timeValueSeconds(250)));
assertEquals(TimeValue.timeValueSeconds(250), datafeed.defaultFrequency(TimeValue.timeValueSeconds(500)));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(60), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(90), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(120), xContentRegistry()));
assertEquals(TimeValue.timeValueSeconds(125), datafeed.defaultFrequency(TimeValue.timeValueSeconds(250), xContentRegistry()));
assertEquals(TimeValue.timeValueSeconds(250), datafeed.defaultFrequency(TimeValue.timeValueSeconds(500), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueHours(1)));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(13)));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueHours(1), xContentRegistry()));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(13), xContentRegistry()));
}
public void testDefaultFrequency_GivenAggregationsWithHistogramInterval_1_Minute() {
DatafeedConfig datafeed = createDatafeedWithDateHistogram("1m");
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(60)));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(90)));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(120)));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(180)));
assertEquals(TimeValue.timeValueMinutes(2), datafeed.defaultFrequency(TimeValue.timeValueSeconds(240)));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueMinutes(20)));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(60), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(90), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(120), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(180), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(2), datafeed.defaultFrequency(TimeValue.timeValueSeconds(240), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueMinutes(20), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueSeconds(20 * 60 + 1)));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueHours(6)));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueHours(12)));
assertEquals(TimeValue.timeValueMinutes(10),
datafeed.defaultFrequency(TimeValue.timeValueSeconds(20 * 60 + 1), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueHours(6), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueHours(12), xContentRegistry()));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(13)));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(72)));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(13), xContentRegistry()));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(72), xContentRegistry()));
}
public void testDefaultFrequency_GivenAggregationsWithHistogramInterval_10_Minutes() {
DatafeedConfig datafeed = createDatafeedWithDateHistogram("10m");
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueMinutes(10)));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueMinutes(20)));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueMinutes(30)));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueMinutes(12 * 60)));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueMinutes(13 * 60)));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueMinutes(10), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueMinutes(20), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueMinutes(30), xContentRegistry()));
assertEquals(TimeValue.timeValueMinutes(10), datafeed.defaultFrequency(TimeValue.timeValueMinutes(12 * 60), xContentRegistry()));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueMinutes(13 * 60), xContentRegistry()));
}
public void testDefaultFrequency_GivenAggregationsWithHistogramInterval_1_Hour() {
DatafeedConfig datafeed = createDatafeedWithDateHistogram("1h");
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(1)));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(3601)));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(2)));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(12)));
}
public void testGetAggDeprecations() {
DatafeedConfig datafeed = createDatafeedWithDateHistogram("1h");
String deprecationWarning = "Warning";
List<String> deprecations = datafeed.getAggDeprecations((map, id, deprecationlist) -> {
deprecationlist.add(deprecationWarning);
return new AggregatorFactories.Builder().addAggregator(new MaxAggregationBuilder("field").field("field"));
});
assertThat(deprecations, hasItem(deprecationWarning));
DatafeedConfig spiedConfig = spy(datafeed);
spiedConfig.getAggDeprecations();
verify(spiedConfig).getAggDeprecations(DatafeedConfig.lazyAggParser);
}
public void testGetQueryDeprecations() {
DatafeedConfig datafeed = createDatafeedWithDateHistogram("1h");
String deprecationWarning = "Warning";
List<String> deprecations = datafeed.getQueryDeprecations((map, id, deprecationlist) -> {
deprecationlist.add(deprecationWarning);
return new BoolQueryBuilder();
});
assertThat(deprecations, hasItem(deprecationWarning));
DatafeedConfig spiedConfig = spy(datafeed);
spiedConfig.getQueryDeprecations();
verify(spiedConfig).getQueryDeprecations(lazyQueryParser);
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(1), xContentRegistry()));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueSeconds(3601), xContentRegistry()));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(2), xContentRegistry()));
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(12), xContentRegistry()));
}
public void testSerializationOfComplexAggs() throws IOException {
@ -716,11 +680,8 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
.subAggregation(derivativePipelineAggregationBuilder)
.subAggregation(bucketScriptPipelineAggregationBuilder);
DatafeedConfig.Builder datafeedConfigBuilder = createDatafeedBuilderWithDateHistogram(dateHistogram);
Map<String, Object> terms = Collections.singletonMap(BoolQueryBuilder.NAME,
Collections.singletonMap("filter",
Collections.singletonMap(TermQueryBuilder.NAME,
Collections.singletonMap(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)))));
datafeedConfigBuilder.setQuery(terms);
datafeedConfigBuilder.setQueryProvider(
createRandomValidQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
DatafeedConfig datafeedConfig = datafeedConfigBuilder.build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder().addAggregator(dateHistogram);
@ -736,18 +697,20 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
assertEquals(datafeedConfig, parsedDatafeedConfig);
// Assert that the parsed versions of our aggs and queries work as well
assertEquals(aggBuilder, parsedDatafeedConfig.getParsedAggregations());
assertEquals(terms, parsedDatafeedConfig.getQuery());
assertEquals(aggBuilder, parsedDatafeedConfig.getParsedAggregations(xContentRegistry()));
assertEquals(datafeedConfig.getQuery(), parsedDatafeedConfig.getQuery());
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables());
try(BytesStreamOutput output = new BytesStreamOutput()) {
datafeedConfig.writeTo(output);
try(StreamInput streamInput = output.bytes().streamInput()) {
try(StreamInput streamInput = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) {
DatafeedConfig streamedDatafeedConfig = new DatafeedConfig(streamInput);
assertEquals(datafeedConfig, streamedDatafeedConfig);
// Assert that the parsed versions of our aggs and queries work as well
assertEquals(aggBuilder, streamedDatafeedConfig.getParsedAggregations());
assertEquals(terms, streamedDatafeedConfig.getQuery());
assertEquals(aggBuilder, streamedDatafeedConfig.getParsedAggregations(xContentRegistry()));
assertEquals(datafeedConfig.getQuery(), streamedDatafeedConfig.getQuery());
}
}
}
@ -769,15 +732,13 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
.subAggregation(derivativePipelineAggregationBuilder)
.subAggregation(bucketScriptPipelineAggregationBuilder);
DatafeedConfig.Builder datafeedConfigBuilder = createDatafeedBuilderWithDateHistogram(dateHistogram);
Map<String, Object> terms = Collections.singletonMap(BoolQueryBuilder.NAME,
Collections.singletonMap("filter",
Collections.singletonList(
Collections.singletonMap(TermQueryBuilder.NAME,
Collections.singletonMap(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))))));
// So equality check between the streamed and current passes
// Streamed DatafeedConfigs when they are before 6.6.0 require a parsed object for aggs and queries, consequently all the default
// values are added between them
datafeedConfigBuilder.setQuery(QUERY_TRANSFORMER.toMap(QUERY_TRANSFORMER.fromMap(terms)));
datafeedConfigBuilder.setQueryProvider(
QueryProvider
.fromParsedQuery(QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)))));
DatafeedConfig datafeedConfig = datafeedConfigBuilder.build();
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
@ -793,8 +754,8 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
// Assert that the parsed versions of our aggs and queries work as well
assertEquals(new AggregatorFactories.Builder().addAggregator(dateHistogram),
streamedDatafeedConfig.getParsedAggregations());
assertEquals(datafeedConfig.getParsedQuery(), streamedDatafeedConfig.getParsedQuery());
streamedDatafeedConfig.getParsedAggregations(xContentRegistry()));
assertEquals(datafeedConfig.getParsedQuery(xContentRegistry()), streamedDatafeedConfig.getParsedQuery(xContentRegistry()));
}
}
}
@ -806,22 +767,6 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
}
}
public void testEmptyQueryMap() {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("empty_query_map", "job1");
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class,
() -> builder.setQuery(Collections.emptyMap(), false));
assertThat(ex.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(ex.getMessage(), equalTo("Datafeed [empty_query_map] query is not parsable"));
}
public void testEmptyAggMap() {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("empty_agg_map", "job1");
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class,
() -> builder.setAggregations(Collections.emptyMap(), false));
assertThat(ex.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(ex.getMessage(), equalTo("Datafeed [empty_agg_map] aggregations are not parsable"));
}
public static String randomValidDatafeedId() {
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray());
return generator.ofCodePointsLength(random(), 10, 10);
@ -884,18 +829,16 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
builder.setIndices(indices);
break;
case 5:
Map<String, Object> query = new HashMap<>();
if (instance.getQuery() != null) {
query.put("must", instance.getQuery());
BoolQueryBuilder query = new BoolQueryBuilder();
if (instance.getParsedQuery(xContentRegistry()) != null) {
query.must(instance.getParsedQuery(xContentRegistry()));
}
query.put("filter", Collections.singletonList(
Collections.singletonMap(TermQueryBuilder.NAME,
Collections.singletonMap(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)))));
builder.setQuery(query);
query.filter(new TermQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
builder.setParsedQuery(query);
break;
case 6:
if (instance.hasAggregations()) {
builder.setAggregations(null);
builder.setAggProvider(null);
} else {
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
String timeField = randomAlphaOfLength(10);
@ -912,7 +855,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
ArrayList<ScriptField> scriptFields = new ArrayList<>(instance.getScriptFields());
scriptFields.add(new ScriptField(randomAlphaOfLengthBetween(1, 10), new Script("foo"), true));
builder.setScriptFields(scriptFields);
builder.setAggregations(null);
builder.setAggProvider(null);
break;
case 8:
builder.setScrollSize(instance.getScrollSize() + between(1, 100));

View File

@ -5,9 +5,12 @@
*/
package org.elasticsearch.xpack.core.ml.datafeed;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
@ -17,23 +20,34 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.PipelineAggregatorBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig.Mode;
import org.elasticsearch.xpack.core.ml.job.config.JobTests;
import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer;
import java.io.IOException;
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.xpack.core.ml.datafeed.AggProviderTests.createRandomValidAggProvider;
import static org.elasticsearch.xpack.core.ml.datafeed.QueryProviderTests.createRandomValidQueryProvider;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
@ -64,8 +78,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
builder.setIndices(DatafeedConfigTests.randomStringList(1, 10));
}
if (randomBoolean()) {
builder.setQuery(Collections.singletonMap(TermQueryBuilder.NAME,
Collections.singletonMap(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))));
builder.setQuery(createRandomValidQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
}
if (randomBoolean()) {
int scriptsSize = randomInt(3);
@ -80,8 +93,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
// can only test with a single agg as the xcontent order gets randomized by test base class and then
// the actual xcontent isn't the same and test fail.
// Testing with a single agg is ok as we don't have special list writeable / xcontent logic
builder.setAggregations(Collections.singletonMap(randomAlphaOfLength(10),
Collections.singletonMap("avg", Collections.singletonMap("field", randomAlphaOfLength(10)))));
builder.setAggregations(createRandomValidAggProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
}
if (randomBoolean()) {
builder.setScrollSize(randomIntBetween(0, Integer.MAX_VALUE));
@ -154,7 +166,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
public void testMultipleDefinedAggParse() throws IOException {
try(XContentParser parser = XContentFactory.xContent(XContentType.JSON)
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, MULTIPLE_AGG_DEF_DATAFEED)) {
.createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, MULTIPLE_AGG_DEF_DATAFEED)) {
XContentParseException ex = expectThrows(XContentParseException.class,
() -> DatafeedUpdate.PARSER.apply(parser, null));
assertThat(ex.getMessage(), equalTo("[32:3] [datafeed_update] failed to parse field [aggs]"));
@ -192,13 +204,13 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("foo", "foo-feed");
datafeedBuilder.setIndices(Collections.singletonList("i_1"));
DatafeedConfig datafeed = datafeedBuilder.build();
QueryProvider queryProvider = createRandomValidQueryProvider("a", "b");
DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeed.getId());
update.setJobId("bar");
update.setIndices(Collections.singletonList("i_2"));
update.setQueryDelay(TimeValue.timeValueSeconds(42));
update.setFrequency(TimeValue.timeValueSeconds(142));
update.setQuery(Collections.singletonMap(TermQueryBuilder.NAME, Collections.singletonMap("a", "b")));
update.setQuery(queryProvider);
update.setScriptFields(Collections.singletonList(new SearchSourceBuilder.ScriptField("a", mockScript("b"), false)));
update.setScrollSize(8000);
update.setChunkingConfig(ChunkingConfig.newManual(TimeValue.timeValueHours(1)));
@ -210,8 +222,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_2")));
assertThat(updatedDatafeed.getQueryDelay(), equalTo(TimeValue.timeValueSeconds(42)));
assertThat(updatedDatafeed.getFrequency(), equalTo(TimeValue.timeValueSeconds(142)));
assertThat(updatedDatafeed.getQuery(),
equalTo(Collections.singletonMap(TermQueryBuilder.NAME, Collections.singletonMap("a", "b"))));
assertThat(updatedDatafeed.getQuery(), equalTo(queryProvider.getQuery()));
assertThat(updatedDatafeed.hasAggregations(), is(false));
assertThat(updatedDatafeed.getScriptFields(),
equalTo(Collections.singletonList(new SearchSourceBuilder.ScriptField("a", mockScript("b"), false))));
@ -221,27 +232,23 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
assertThat(updatedDatafeed.getDelayedDataCheckConfig().getCheckWindow(), equalTo(TimeValue.timeValueHours(1)));
}
public void testApply_givenAggregations() {
public void testApply_givenAggregations() throws IOException {
DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("foo", "foo-feed");
datafeedBuilder.setIndices(Collections.singletonList("i_1"));
DatafeedConfig datafeed = datafeedBuilder.build();
DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeed.getId());
Map<String, Object> maxTime = Collections.singletonMap("time",
Collections.singletonMap("max", Collections.singletonMap("field", "time")));
Map<String, Object> histoDefinition = new HashMap<>();
histoDefinition.put("interval", 300000);
histoDefinition.put("field", "time");
Map<String, Object> aggBody = new HashMap<>();
aggBody.put("histogram", histoDefinition);
aggBody.put("aggs", maxTime);
Map<String, Object> aggMap = Collections.singletonMap("a", aggBody);
update.setAggregations(aggMap);
MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time");
AggProvider aggProvider = AggProvider.fromParsedAggs(new AggregatorFactories.Builder().addAggregator(
AggregationBuilders.histogram("a").interval(300000).field("time").subAggregation(maxTime)));
update.setAggregations(aggProvider);
DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap());
assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_1")));
assertThat(updatedDatafeed.getAggregations(), equalTo(aggMap));
assertThat(updatedDatafeed.getParsedAggregations(xContentRegistry()), equalTo(aggProvider.getParsedAggs()));
assertThat(updatedDatafeed.getAggregations(), equalTo(aggProvider.getAggs()));
}
public void testApply_GivenRandomUpdates_AssertImmutability() {
@ -249,7 +256,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig(JobTests.randomValidJobId());
if (datafeed.getAggregations() != null) {
DatafeedConfig.Builder withoutAggs = new DatafeedConfig.Builder(datafeed);
withoutAggs.setAggregations(null);
withoutAggs.setAggProvider(null);
datafeed = withoutAggs.build();
}
DatafeedUpdate update = createRandomized(datafeed.getId(), datafeed);
@ -263,24 +270,57 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
}
}
public void testEmptyQueryMap() {
DatafeedUpdate.Builder builder = new DatafeedUpdate.Builder("empty_query_map");
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class,
() -> builder.setQuery(Collections.emptyMap()));
assertThat(ex.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(ex.getMessage(), equalTo("Datafeed [empty_query_map] query is not parsable"));
}
public void testSerializationOfComplexAggsBetweenVersions() throws IOException {
MaxAggregationBuilder maxTime = AggregationBuilders.max("timestamp").field("timestamp");
AvgAggregationBuilder avgAggregationBuilder = AggregationBuilders.avg("bytes_in_avg").field("system.network.in.bytes");
DerivativePipelineAggregationBuilder derivativePipelineAggregationBuilder =
PipelineAggregatorBuilders.derivative("bytes_in_derivative", "bytes_in_avg");
BucketScriptPipelineAggregationBuilder bucketScriptPipelineAggregationBuilder =
PipelineAggregatorBuilders.bucketScript("non_negative_bytes",
Collections.singletonMap("bytes", "bytes_in_derivative"),
new Script("params.bytes > 0 ? params.bytes : null"));
DateHistogramAggregationBuilder dateHistogram =
AggregationBuilders.dateHistogram("histogram_buckets")
.field("timestamp").interval(300000).timeZone(ZoneOffset.UTC)
.subAggregation(maxTime)
.subAggregation(avgAggregationBuilder)
.subAggregation(derivativePipelineAggregationBuilder)
.subAggregation(bucketScriptPipelineAggregationBuilder);
AggregatorFactories.Builder aggs = new AggregatorFactories.Builder().addAggregator(dateHistogram);
DatafeedUpdate.Builder datafeedUpdateBuilder = new DatafeedUpdate.Builder("df-update-past-serialization-test");
datafeedUpdateBuilder.setAggregations(new AggProvider(
XContentObjectTransformer.aggregatorTransformer(xContentRegistry()).toMap(aggs),
aggs,
null));
// So equality check between the streamed and current passes
// Streamed DatafeedConfigs when they are before 6.6.0 require a parsed object for aggs and queries, consequently all the default
// values are added between them
datafeedUpdateBuilder.setQuery(
QueryProvider
.fromParsedQuery(QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)))));
DatafeedUpdate datafeedUpdate = datafeedUpdateBuilder.build();
public void testEmptyAggMap() {
DatafeedUpdate.Builder builder = new DatafeedUpdate.Builder("empty_agg_map");
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class,
() -> builder.setAggregations(Collections.emptyMap()));
assertThat(ex.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(ex.getMessage(), equalTo("Datafeed [empty_agg_map] aggregations are not parsable"));
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables());
try (BytesStreamOutput output = new BytesStreamOutput()) {
output.setVersion(Version.V_6_0_0);
datafeedUpdate.writeTo(output);
try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) {
in.setVersion(Version.V_6_0_0);
DatafeedUpdate streamedDatafeedUpdate = new DatafeedUpdate(in);
assertEquals(datafeedUpdate, streamedDatafeedUpdate);
// Assert that the parsed versions of our aggs and queries work as well
assertEquals(aggs, streamedDatafeedUpdate.getParsedAgg(xContentRegistry()));
assertEquals(datafeedUpdate.getParsedQuery(xContentRegistry()), streamedDatafeedUpdate.getParsedQuery(xContentRegistry()));
}
}
}
@Override
protected DatafeedUpdate mutateInstance(DatafeedUpdate instance) {
protected DatafeedUpdate mutateInstance(DatafeedUpdate instance) throws IOException {
DatafeedUpdate.Builder builder = new DatafeedUpdate.Builder(instance);
switch (between(0, 9)) {
case 0:
@ -314,31 +354,22 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
builder.setIndices(indices);
break;
case 5:
Map<String, Object> boolQuery = new HashMap<>();
BoolQueryBuilder query = new BoolQueryBuilder();
if (instance.getQuery() != null) {
boolQuery.put("must", instance.getQuery());
query.must(instance.getParsedQuery(xContentRegistry()));
}
boolQuery.put("filter",
Collections.singletonList(
Collections.singletonMap(TermQueryBuilder.NAME,
Collections.singletonMap(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)))));
builder.setQuery(Collections.singletonMap("bool", boolQuery));
query.filter(new TermQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
builder.setQuery(QueryProvider.fromParsedQuery(query));
break;
case 6:
if (instance.hasAggregations()) {
builder.setAggregations(null);
} else {
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
String timeField = randomAlphaOfLength(10);
Map<String, Object> maxTime = Collections.singletonMap(timeField,
Collections.singletonMap("max", Collections.singletonMap("field", timeField)));
Map<String, Object> histoDefinition = new HashMap<>();
histoDefinition.put("interval", between(10000, 3600000));
histoDefinition.put("field", timeField);
Map<String, Object> aggBody = new HashMap<>();
aggBody.put("aggs", maxTime);
aggBody.put("date_histogram", histoDefinition);
Map<String, Object> aggMap = Collections.singletonMap(timeField, aggBody);
builder.setAggregations(aggMap);
aggBuilder.addAggregator(new DateHistogramAggregationBuilder(timeField).field(timeField).interval(between(10000, 3600000))
.subAggregation(new MaxAggregationBuilder(timeField).field(timeField)));
builder.setAggregations(AggProvider.fromParsedAggs(aggBuilder));
if (instance.getScriptFields().isEmpty() == false) {
builder.setScriptFields(Collections.emptyList());
}

View File

@ -0,0 +1,185 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.datafeed;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class QueryProviderTests extends AbstractSerializingTestCase<QueryProvider> {
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
@Override
protected NamedWriteableRegistry writableRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedWriteableRegistry(searchModule.getNamedWriteables());
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return writableRegistry();
}
@Override
protected QueryProvider createTestInstance() {
return createRandomValidQueryProvider();
}
@Override
protected Writeable.Reader<QueryProvider> instanceReader() {
return QueryProvider::fromStream;
}
@Override
protected QueryProvider doParseInstance(XContentParser parser) throws IOException {
return QueryProvider.fromXContent(parser, false);
}
public static QueryProvider createRandomValidQueryProvider() {
return createRandomValidQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10));
}
public static QueryProvider createRandomValidQueryProvider(String field, String value) {
Map<String, Object> terms = Collections.singletonMap(BoolQueryBuilder.NAME,
Collections.singletonMap("filter",
Collections.singletonList(
Collections.singletonMap(TermQueryBuilder.NAME,
Collections.singletonMap(field, value)))));
return new QueryProvider(
terms,
QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(field, value)),
null);
}
public void testEmptyQueryMap() throws IOException {
XContentParser parser = XContentFactory.xContent(XContentType.JSON)
.createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, "{}");
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
() -> QueryProvider.fromXContent(parser, false));
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(e.getMessage(), equalTo("Datafeed query is not parsable"));
}
public void testSerializationBetweenBugVersion() throws IOException {
QueryProvider tempQueryProvider = createRandomValidQueryProvider();
QueryProvider queryProviderWithEx = new QueryProvider(tempQueryProvider.getQuery(),
tempQueryProvider.getParsedQuery(),
new IOException("ex"));
try (BytesStreamOutput output = new BytesStreamOutput()) {
output.setVersion(Version.V_6_6_2);
queryProviderWithEx.writeTo(output);
try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry())) {
in.setVersion(Version.V_6_6_2);
QueryProvider streamedQueryProvider = QueryProvider.fromStream(in);
assertThat(streamedQueryProvider.getQuery(), equalTo(queryProviderWithEx.getQuery()));
assertThat(streamedQueryProvider.getParsingException(), is(nullValue()));
QueryBuilder streamedParsedQuery = XContentObjectTransformer.queryBuilderTransformer(xContentRegistry())
.fromMap(streamedQueryProvider.getQuery());
assertThat(streamedParsedQuery, equalTo(queryProviderWithEx.getParsedQuery()));
assertThat(streamedQueryProvider.getParsedQuery(), is(nullValue()));
}
}
}
public void testSerializationBetweenEagerVersion() throws IOException {
QueryProvider validQueryProvider = createRandomValidQueryProvider();
try (BytesStreamOutput output = new BytesStreamOutput()) {
output.setVersion(Version.V_6_0_0);
validQueryProvider.writeTo(output);
try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry())) {
in.setVersion(Version.V_6_0_0);
QueryProvider streamedQueryProvider = QueryProvider.fromStream(in);
XContentObjectTransformer<QueryBuilder> transformer = XContentObjectTransformer.queryBuilderTransformer(xContentRegistry());
Map<String, Object> sourceQueryMapWithDefaults = transformer.toMap(transformer.fromMap(validQueryProvider.getQuery()));
assertThat(streamedQueryProvider.getQuery(), equalTo(sourceQueryMapWithDefaults));
assertThat(streamedQueryProvider.getParsingException(), is(nullValue()));
assertThat(streamedQueryProvider.getParsedQuery(), equalTo(validQueryProvider.getParsedQuery()));
}
}
try (BytesStreamOutput output = new BytesStreamOutput()) {
QueryProvider queryProviderWithEx = new QueryProvider(validQueryProvider.getQuery(),
validQueryProvider.getParsedQuery(),
new IOException("bad parsing"));
output.setVersion(Version.V_6_0_0);
IOException ex = expectThrows(IOException.class, () -> queryProviderWithEx.writeTo(output));
assertThat(ex.getMessage(), equalTo("bad parsing"));
}
try (BytesStreamOutput output = new BytesStreamOutput()) {
QueryProvider queryProviderWithEx = new QueryProvider(validQueryProvider.getQuery(),
validQueryProvider.getParsedQuery(),
new ElasticsearchException("bad parsing"));
output.setVersion(Version.V_6_0_0);
ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> queryProviderWithEx.writeTo(output));
assertNotNull(ex.getCause());
assertThat(ex.getCause().getMessage(), equalTo("bad parsing"));
}
try (BytesStreamOutput output = new BytesStreamOutput()) {
QueryProvider queryProviderWithOutParsed = new QueryProvider(validQueryProvider.getQuery(), null, null);
output.setVersion(Version.V_6_0_0);
ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> queryProviderWithOutParsed.writeTo(output));
assertThat(ex.getMessage(), equalTo("Unsupported operation: parsed query is null"));
}
}
@Override
protected QueryProvider mutateInstance(QueryProvider instance) throws IOException {
Exception parsingException = instance.getParsingException();
QueryBuilder parsedQuery = instance.getParsedQuery();
switch (between(0, 1)) {
case 0:
parsingException = parsingException == null ? new IOException("failed parsing") : null;
break;
case 1:
parsedQuery = parsedQuery == null ?
XContentObjectTransformer.queryBuilderTransformer(xContentRegistry()).fromMap(instance.getQuery()) :
null;
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return new QueryProvider(instance.getQuery(), parsedQuery, parsingException);
}
}

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.utils;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -16,6 +17,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
@ -37,12 +39,19 @@ import static org.hamcrest.Matchers.hasSize;
public class XContentObjectTransformerTests extends ESTestCase {
@Override
public NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
public void testFromMap() throws IOException {
Map<String, Object> aggMap = Collections.singletonMap("fieldName",
Collections.singletonMap("max",
Collections.singletonMap("field", "fieldName")));
XContentObjectTransformer<AggregatorFactories.Builder> aggTransformer = XContentObjectTransformer.aggregatorTransformer();
XContentObjectTransformer<AggregatorFactories.Builder> aggTransformer =
XContentObjectTransformer.aggregatorTransformer(xContentRegistry());
assertXContentAreEqual(aggTransformer.fromMap(aggMap), aggMap);
assertXContentAreEqual(aggTransformer.fromMap(aggMap), aggTransformer.toMap(aggTransformer.fromMap(aggMap)));
@ -60,7 +69,8 @@ public class XContentObjectTransformerTests extends ESTestCase {
put("boost",1.0);
}}));
XContentObjectTransformer<QueryBuilder> queryBuilderTransformer = XContentObjectTransformer.queryBuilderTransformer();
XContentObjectTransformer<QueryBuilder> queryBuilderTransformer =
XContentObjectTransformer.queryBuilderTransformer(xContentRegistry());
assertXContentAreEqual(queryBuilderTransformer.fromMap(queryMap), queryMap);
assertXContentAreEqual(queryBuilderTransformer.fromMap(queryMap),
queryBuilderTransformer.toMap(queryBuilderTransformer.fromMap(queryMap)));
@ -73,7 +83,8 @@ public class XContentObjectTransformerTests extends ESTestCase {
put("type", "phrase"); //phrase stopped being supported for match in 6.x
}}));
XContentObjectTransformer<QueryBuilder> queryBuilderTransformer = XContentObjectTransformer.queryBuilderTransformer();
XContentObjectTransformer<QueryBuilder> queryBuilderTransformer =
XContentObjectTransformer.queryBuilderTransformer(xContentRegistry());
ParsingException exception = expectThrows(ParsingException.class,
() -> queryBuilderTransformer.fromMap(queryMap));
@ -85,14 +96,17 @@ public class XContentObjectTransformerTests extends ESTestCase {
put("field", "myField");
}}));
XContentObjectTransformer<AggregatorFactories.Builder> aggTransformer = XContentObjectTransformer.aggregatorTransformer();
XContentObjectTransformer<AggregatorFactories.Builder> aggTransformer =
XContentObjectTransformer.aggregatorTransformer(xContentRegistry());
XContentParseException xContentParseException = expectThrows(XContentParseException.class, () -> aggTransformer.fromMap(aggMap));
assertThat(xContentParseException.getMessage(), containsString("[terms] failed to parse field [size]"));
}
public void testToMap() throws IOException {
XContentObjectTransformer<AggregatorFactories.Builder> aggTransformer = XContentObjectTransformer.aggregatorTransformer();
XContentObjectTransformer<QueryBuilder> queryBuilderTransformer = XContentObjectTransformer.queryBuilderTransformer();
XContentObjectTransformer<AggregatorFactories.Builder> aggTransformer =
XContentObjectTransformer.aggregatorTransformer(xContentRegistry());
XContentObjectTransformer<QueryBuilder> queryBuilderTransformer =
XContentObjectTransformer.queryBuilderTransformer(xContentRegistry());
AggregatorFactories.Builder aggs = new AggregatorFactories.Builder();
long aggHistogramInterval = randomNonNegativeLong();

View File

@ -9,6 +9,7 @@ import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
@ -45,7 +46,7 @@ public class DeprecationChecks {
Collections.unmodifiableList(Arrays.asList(
IndexDeprecationChecks::oldIndicesCheck));
static List<Function<DatafeedConfig, DeprecationIssue>> ML_SETTINGS_CHECKS =
static List<BiFunction<DatafeedConfig, NamedXContentRegistry, DeprecationIssue>> ML_SETTINGS_CHECKS =
Collections.unmodifiableList(Arrays.asList(
MlDeprecationChecks::checkDataFeedAggregations,
MlDeprecationChecks::checkDataFeedQuery

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.deprecation;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
@ -18,8 +19,8 @@ final class MlDeprecationChecks {
private MlDeprecationChecks() {
}
static DeprecationIssue checkDataFeedQuery(DatafeedConfig datafeedConfig) {
List<String> deprecations = datafeedConfig.getQueryDeprecations();
static DeprecationIssue checkDataFeedQuery(DatafeedConfig datafeedConfig, NamedXContentRegistry xContentRegistry) {
List<String> deprecations = datafeedConfig.getQueryDeprecations(xContentRegistry);
if (deprecations.isEmpty()) {
return null;
} else {
@ -30,8 +31,8 @@ final class MlDeprecationChecks {
}
}
static DeprecationIssue checkDataFeedAggregations(DatafeedConfig datafeedConfig) {
List<String> deprecations = datafeedConfig.getAggDeprecations();
static DeprecationIssue checkDataFeedAggregations(DatafeedConfig datafeedConfig, NamedXContentRegistry xContentRegistry) {
List<String> deprecations = datafeedConfig.getAggDeprecations(xContentRegistry);
if (deprecations.isEmpty()) {
return null;
} else {

View File

@ -19,6 +19,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.license.LicenseUtils;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.threadpool.ThreadPool;
@ -48,18 +49,20 @@ public class TransportDeprecationInfoAction extends TransportMasterNodeReadActio
private final NodeClient client;
private final IndexNameExpressionResolver indexNameExpressionResolver;
private final Settings settings;
private final NamedXContentRegistry xContentRegistry;
@Inject
public TransportDeprecationInfoAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver,
XPackLicenseState licenseState, NodeClient client) {
XPackLicenseState licenseState, NodeClient client, NamedXContentRegistry xContentRegistry) {
super(DeprecationInfoAction.NAME, transportService, clusterService, threadPool, actionFilters,
DeprecationInfoAction.Request::new, indexNameExpressionResolver);
this.licenseState = licenseState;
this.client = client;
this.indexNameExpressionResolver = indexNameExpressionResolver;
this.settings = settings;
this.xContentRegistry = xContentRegistry;
}
@Override
@ -99,7 +102,7 @@ public class TransportDeprecationInfoAction extends TransportMasterNodeReadActio
getDatafeedConfigs(ActionListener.wrap(
datafeeds -> {
listener.onResponse(
DeprecationInfoAction.Response.from(state, indexNameExpressionResolver,
DeprecationInfoAction.Response.from(state, xContentRegistry, indexNameExpressionResolver,
request.indices(), request.indicesOptions(), datafeeds,
response, INDEX_SETTINGS_CHECKS, CLUSTER_SETTINGS_CHECKS,
ML_SETTINGS_CHECKS));

View File

@ -6,7 +6,10 @@
package org.elasticsearch.xpack.deprecation;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
@ -14,6 +17,12 @@ import java.util.Collections;
public class MlDeprecationChecksTests extends ESTestCase {
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
@Override
protected boolean enableWarningsCheck() {
return false;
@ -22,8 +31,8 @@ public class MlDeprecationChecksTests extends ESTestCase {
public void testCheckDataFeedQuery() {
DatafeedConfig.Builder goodDatafeed = new DatafeedConfig.Builder("good-df", "job-id");
goodDatafeed.setIndices(Collections.singletonList("some-index"));
goodDatafeed.setQuery(Collections.singletonMap(TermQueryBuilder.NAME, Collections.singletonMap("foo", "bar")));
assertNull(MlDeprecationChecks.checkDataFeedQuery(goodDatafeed.build()));
goodDatafeed.setParsedQuery(QueryBuilders.termQuery("foo", "bar"));
assertNull(MlDeprecationChecks.checkDataFeedQuery(goodDatafeed.build(), xContentRegistry()));
DatafeedConfig.Builder deprecatedDatafeed = new DatafeedConfig.Builder("df-with-deprecated-query", "job-id");
deprecatedDatafeed.setIndices(Collections.singletonList("some-index"));

View File

@ -12,7 +12,7 @@ import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
@ -159,9 +159,7 @@ public class DelayedDataDetectorIT extends MlNativeAutodetectIntegTestCase {
.subAggregation(avgAggregationBuilder)
.field("time")
.interval(TimeValue.timeValueMinutes(5).millis())));
datafeedConfigBuilder.setQuery(Collections.singletonMap(RangeQueryBuilder.NAME,
Collections.singletonMap("value",
Collections.singletonMap(RangeQueryBuilder.GTE_FIELD.getPreferredName(), numDocs/2))));
datafeedConfigBuilder.setParsedQuery(QueryBuilders.rangeQuery("value").gte(numDocs/2));
datafeedConfigBuilder.setFrequency(TimeValue.timeValueMinutes(5));
datafeedConfigBuilder.setDelayedDataCheckConfig(DelayedDataCheckConfig.enabledDelayedDataCheckConfig(TimeValue.timeValueHours(12)));
@ -253,6 +251,6 @@ public class DelayedDataDetectorIT extends MlNativeAutodetectIntegTestCase {
}
private DelayedDataDetector newDetector(Job job, DatafeedConfig datafeedConfig) {
return DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, client());
return DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, client(), xContentRegistry());
}
}

View File

@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.index.reindex.ReindexPlugin;
import org.elasticsearch.persistent.PersistentTaskParams;
import org.elasticsearch.persistent.PersistentTaskState;
@ -53,6 +54,12 @@ import static org.elasticsearch.test.XContentTestUtils.differenceBetweenMapsIgno
*/
abstract class MlNativeIntegTestCase extends ESIntegTestCase {
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(LocalStateCompositeXPackPlugin.class, Netty4Plugin.class);

View File

@ -399,10 +399,18 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu
Auditor auditor = new Auditor(client, clusterService.getNodeName());
JobResultsProvider jobResultsProvider = new JobResultsProvider(client, settings);
JobConfigProvider jobConfigProvider = new JobConfigProvider(client);
JobConfigProvider jobConfigProvider = new JobConfigProvider(client, xContentRegistry);
DatafeedConfigProvider datafeedConfigProvider = new DatafeedConfigProvider(client, xContentRegistry);
UpdateJobProcessNotifier notifier = new UpdateJobProcessNotifier(client, clusterService, threadPool);
JobManager jobManager = new JobManager(env, settings, jobResultsProvider, clusterService, auditor, threadPool, client, notifier);
JobManager jobManager = new JobManager(env,
settings,
jobResultsProvider,
clusterService,
auditor,
threadPool,
client,
notifier,
xContentRegistry);
// special holder for @link(MachineLearningFeatureSetUsage) which needs access to job manager if ML is enabled
JobManagerHolder jobManagerHolder = new JobManagerHolder(jobManager);

View File

@ -11,6 +11,7 @@ import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@ -38,17 +39,19 @@ public class TransportPreviewDatafeedAction extends HandledTransportAction<Previ
private final Client client;
private final JobConfigProvider jobConfigProvider;
private final DatafeedConfigProvider datafeedConfigProvider;
private final NamedXContentRegistry xContentRegistry;
@Inject
public TransportPreviewDatafeedAction(ThreadPool threadPool, TransportService transportService,
ActionFilters actionFilters, Client client, JobConfigProvider jobConfigProvider,
DatafeedConfigProvider datafeedConfigProvider) {
DatafeedConfigProvider datafeedConfigProvider, NamedXContentRegistry xContentRegistry) {
super(PreviewDatafeedAction.NAME, transportService, actionFilters,
(Supplier<PreviewDatafeedAction.Request>) PreviewDatafeedAction.Request::new);
this.threadPool = threadPool;
this.client = client;
this.jobConfigProvider = jobConfigProvider;
this.datafeedConfigProvider = datafeedConfigProvider;
this.xContentRegistry = xContentRegistry;
}
@Override
@ -67,7 +70,7 @@ public class TransportPreviewDatafeedAction extends HandledTransportAction<Previ
// NB: this is using the client from the transport layer, NOT the internal client.
// This is important because it means the datafeed search will fail if the user
// requesting the preview doesn't have permission to search the relevant indices.
DataExtractorFactory.create(client, previewDatafeed.build(), jobBuilder.build(),
DataExtractorFactory.create(client, previewDatafeed.build(), jobBuilder.build(), xContentRegistry,
new ActionListener<DataExtractorFactory>() {
@Override
public void onResponse(DataExtractorFactory dataExtractorFactory) {

View File

@ -62,6 +62,7 @@ public class TransportPutDatafeedAction extends TransportMasterNodeAction<PutDat
private final SecurityContext securityContext;
private final DatafeedConfigProvider datafeedConfigProvider;
private final JobConfigProvider jobConfigProvider;
private final NamedXContentRegistry xContentRegistry;
@Inject
public TransportPutDatafeedAction(Settings settings, TransportService transportService,
@ -76,7 +77,8 @@ public class TransportPutDatafeedAction extends TransportMasterNodeAction<PutDat
this.securityContext = XPackSettings.SECURITY_ENABLED.get(settings) ?
new SecurityContext(settings, threadPool.getThreadContext()) : null;
this.datafeedConfigProvider = new DatafeedConfigProvider(client, xContentRegistry);
this.jobConfigProvider = new JobConfigProvider(client);
this.jobConfigProvider = new JobConfigProvider(client, xContentRegistry);
this.xContentRegistry = xContentRegistry;
}
@Override
@ -172,7 +174,7 @@ public class TransportPutDatafeedAction extends TransportMasterNodeAction<PutDat
listener.onFailure(validationError);
return;
}
DatafeedConfig.validateAggregations(request.getDatafeed().getParsedAggregations());
DatafeedConfig.validateAggregations(request.getDatafeed().getParsedAggregations(xContentRegistry));
CheckedConsumer<Boolean, Exception> validationOk = ok -> {
datafeedConfigProvider.putDatafeedConfig(request.getDatafeed(), headers, ActionListener.wrap(

View File

@ -23,6 +23,7 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.license.LicenseUtils;
import org.elasticsearch.license.RemoteClusterLicenseChecker;
import org.elasticsearch.license.XPackLicenseState;
@ -78,6 +79,7 @@ public class TransportStartDatafeedAction extends TransportMasterNodeAction<Star
private final DatafeedConfigProvider datafeedConfigProvider;
private final Auditor auditor;
private final MlConfigMigrationEligibilityCheck migrationEligibilityCheck;
private final NamedXContentRegistry xContentRegistry;
@Inject
public TransportStartDatafeedAction(Settings settings, TransportService transportService, ThreadPool threadPool,
@ -85,7 +87,7 @@ public class TransportStartDatafeedAction extends TransportMasterNodeAction<Star
PersistentTasksService persistentTasksService,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
Client client, JobConfigProvider jobConfigProvider, DatafeedConfigProvider datafeedConfigProvider,
Auditor auditor) {
Auditor auditor, NamedXContentRegistry xContentRegistry) {
super(StartDatafeedAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver,
StartDatafeedAction.Request::new);
this.licenseState = licenseState;
@ -95,11 +97,15 @@ public class TransportStartDatafeedAction extends TransportMasterNodeAction<Star
this.datafeedConfigProvider = datafeedConfigProvider;
this.auditor = auditor;
this.migrationEligibilityCheck = new MlConfigMigrationEligibilityCheck(settings, clusterService);
this.xContentRegistry = xContentRegistry;
}
static void validate(Job job, DatafeedConfig datafeedConfig, PersistentTasksCustomMetaData tasks) {
DatafeedJobValidator.validate(datafeedConfig, job);
DatafeedConfig.validateAggregations(datafeedConfig.getParsedAggregations());
static void validate(Job job,
DatafeedConfig datafeedConfig,
PersistentTasksCustomMetaData tasks,
NamedXContentRegistry xContentRegistry) {
DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry);
DatafeedConfig.validateAggregations(datafeedConfig.getParsedAggregations(xContentRegistry));
JobState jobState = MlTasks.getJobState(datafeedConfig.getJobId(), tasks);
if (jobState.isAnyOf(JobState.OPENING, JobState.OPENED) == false) {
throw ExceptionsHelper.conflictStatusException("cannot start datafeed [" + datafeedConfig.getId() +
@ -108,10 +114,10 @@ public class TransportStartDatafeedAction extends TransportMasterNodeAction<Star
}
//Get the deprecation warnings from the parsed query and aggs to audit
static void auditDeprecations(DatafeedConfig datafeed, Job job, Auditor auditor) {
static void auditDeprecations(DatafeedConfig datafeed, Job job, Auditor auditor, NamedXContentRegistry xContentRegistry) {
List<String> deprecationWarnings = new ArrayList<>();
deprecationWarnings.addAll(datafeed.getAggDeprecations());
deprecationWarnings.addAll(datafeed.getQueryDeprecations());
deprecationWarnings.addAll(datafeed.getAggDeprecations(xContentRegistry));
deprecationWarnings.addAll(datafeed.getQueryDeprecations(xContentRegistry));
if (deprecationWarnings.isEmpty() == false) {
String msg = "datafeed [" + datafeed.getId() +"] configuration has deprecations. [" +
Strings.collectionToDelimitedString(deprecationWarnings, ", ") + "]";
@ -200,8 +206,8 @@ public class TransportStartDatafeedAction extends TransportMasterNodeAction<Star
jobBuilder -> {
try {
Job job = jobBuilder.build();
validate(job, datafeedConfigHolder.get(), tasks);
auditDeprecations(datafeedConfigHolder.get(), job, auditor);
validate(job, datafeedConfigHolder.get(), tasks, xContentRegistry);
auditDeprecations(datafeedConfigHolder.get(), job, auditor, xContentRegistry);
createDataExtrator.accept(job);
} catch (Exception e) {
listener.onFailure(e);
@ -231,7 +237,7 @@ public class TransportStartDatafeedAction extends TransportMasterNodeAction<Star
private void createDataExtractor(Job job, DatafeedConfig datafeed, StartDatafeedAction.DatafeedParams params,
ActionListener<PersistentTasksCustomMetaData.PersistentTask<StartDatafeedAction.DatafeedParams>>
listener) {
DataExtractorFactory.create(client, datafeed, job, ActionListener.wrap(
DataExtractorFactory.create(client, datafeed, job, xContentRegistry, ActionListener.wrap(
dataExtractorFactory ->
persistentTasksService.sendStartRequest(MlTasks.datafeedTaskId(params.getDatafeedId()),
MlTasks.DATAFEED_TASK_NAME, params, listener)

View File

@ -49,7 +49,7 @@ public class TransportUpdateDatafeedAction extends TransportMasterNodeAction<Upd
indexNameExpressionResolver, UpdateDatafeedAction.Request::new);
datafeedConfigProvider = new DatafeedConfigProvider(client, xContentRegistry);
jobConfigProvider = new JobConfigProvider(client);
jobConfigProvider = new JobConfigProvider(client, xContentRegistry);
migrationEligibilityCheck = new MlConfigMigrationEligibilityCheck(settings, clusterService);
}

View File

@ -54,7 +54,7 @@ public class DatafeedJobBuilder {
void build(String datafeedId, ActionListener<DatafeedJob> listener) {
JobResultsProvider jobResultsProvider = new JobResultsProvider(client, settings);
JobConfigProvider jobConfigProvider = new JobConfigProvider(client);
JobConfigProvider jobConfigProvider = new JobConfigProvider(client, xContentRegistry);
DatafeedConfigProvider datafeedConfigProvider = new DatafeedConfigProvider(client, xContentRegistry);
build(datafeedId, jobResultsProvider, jobConfigProvider, datafeedConfigProvider, listener);
@ -72,10 +72,10 @@ public class DatafeedJobBuilder {
// Step 5. Build datafeed job object
Consumer<Context> contextHanlder = context -> {
TimeValue frequency = getFrequencyOrDefault(datafeedConfigHolder.get(), jobHolder.get());
TimeValue frequency = getFrequencyOrDefault(datafeedConfigHolder.get(), jobHolder.get(), xContentRegistry);
TimeValue queryDelay = datafeedConfigHolder.get().getQueryDelay();
DelayedDataDetector delayedDataDetector =
DelayedDataDetectorFactory.buildDetector(jobHolder.get(), datafeedConfigHolder.get(), client);
DelayedDataDetectorFactory.buildDetector(jobHolder.get(), datafeedConfigHolder.get(), client, xContentRegistry);
DatafeedJob datafeedJob = new DatafeedJob(jobHolder.get().getId(), buildDataDescription(jobHolder.get()),
frequency.millis(), queryDelay.millis(),
context.dataExtractorFactory, client, auditor, currentTimeSupplier, delayedDataDetector,
@ -102,7 +102,7 @@ public class DatafeedJobBuilder {
if (dataCounts.getLatestRecordTimeStamp() != null) {
context.latestRecordTimeMs = dataCounts.getLatestRecordTimeStamp().getTime();
}
DataExtractorFactory.create(client, datafeedConfigHolder.get(), jobHolder.get(), dataExtractorFactoryHandler);
DataExtractorFactory.create(client, datafeedConfigHolder.get(), jobHolder.get(), xContentRegistry, dataExtractorFactoryHandler);
};
// Collect data counts
@ -137,7 +137,7 @@ public class DatafeedJobBuilder {
jobBuilder -> {
try {
jobHolder.set(jobBuilder.build());
DatafeedJobValidator.validate(datafeedConfigHolder.get(), jobHolder.get());
DatafeedJobValidator.validate(datafeedConfigHolder.get(), jobHolder.get(), xContentRegistry);
jobIdConsumer.accept(jobHolder.get().getId());
} catch (Exception e) {
listener.onFailure(e);
@ -162,11 +162,11 @@ public class DatafeedJobBuilder {
datafeedConfigProvider.getDatafeedConfig(datafeedId, datafeedConfigListener);
}
private static TimeValue getFrequencyOrDefault(DatafeedConfig datafeed, Job job) {
private static TimeValue getFrequencyOrDefault(DatafeedConfig datafeed, Job job, NamedXContentRegistry xContentRegistry) {
TimeValue frequency = datafeed.getFrequency();
if (frequency == null) {
TimeValue bucketSpan = job.getAnalysisConfig().getBucketSpan();
return datafeed.defaultFrequency(bucketSpan);
return datafeed.defaultFrequency(bucketSpan, xContentRegistry);
}
return frequency;
}

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.datafeed.delayeddatacheck;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.datafeed.DelayedDataCheckConfig;
import org.elasticsearch.xpack.core.ml.job.config.Job;
@ -33,9 +34,13 @@ public class DelayedDataDetectorFactory {
* @param job The {@link Job} object for the given `datafeedConfig`
* @param datafeedConfig The {@link DatafeedConfig} for which to create the {@link DelayedDataDetector}
* @param client The {@link Client} capable of taking action against the ES Cluster.
* @param xContentRegistry The current NamedXContentRegistry with which to parse the query
* @return A new {@link DelayedDataDetector}
*/
public static DelayedDataDetector buildDetector(Job job, DatafeedConfig datafeedConfig, Client client) {
public static DelayedDataDetector buildDetector(Job job,
DatafeedConfig datafeedConfig,
Client client,
NamedXContentRegistry xContentRegistry) {
if (datafeedConfig.getDelayedDataCheckConfig().isEnabled()) {
long window = validateAndCalculateWindowLength(job.getAnalysisConfig().getBucketSpan(),
datafeedConfig.getDelayedDataCheckConfig().getCheckWindow());
@ -44,7 +49,7 @@ public class DelayedDataDetectorFactory {
window,
job.getId(),
job.getDataDescription().getTimeField(),
datafeedConfig.getParsedQuery(),
datafeedConfig.getParsedQuery(xContentRegistry),
datafeedConfig.getIndices().toArray(new String[0]),
client);
} else {

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.datafeed.extractor;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
@ -25,10 +26,14 @@ public interface DataExtractorFactory {
/**
* Creates a {@code DataExtractorFactory} for the given datafeed-job combination.
*/
static void create(Client client, DatafeedConfig datafeed, Job job, ActionListener<DataExtractorFactory> listener) {
static void create(Client client,
DatafeedConfig datafeed,
Job job,
NamedXContentRegistry xContentRegistry,
ActionListener<DataExtractorFactory> listener) {
ActionListener<DataExtractorFactory> factoryHandler = ActionListener.wrap(
factory -> listener.onResponse(datafeed.getChunkingConfig().isEnabled()
? new ChunkedDataExtractorFactory(client, datafeed, job, factory) : factory)
? new ChunkedDataExtractorFactory(client, datafeed, job, xContentRegistry, factory) : factory)
, listener::onFailure
);
@ -36,13 +41,13 @@ public interface DataExtractorFactory {
response -> {
if (response.getJobs().isEmpty()) { // This means no rollup indexes are in the config
if (datafeed.hasAggregations()) {
factoryHandler.onResponse(new AggregationDataExtractorFactory(client, datafeed, job));
factoryHandler.onResponse(new AggregationDataExtractorFactory(client, datafeed, job, xContentRegistry));
} else {
ScrollDataExtractorFactory.create(client, datafeed, job, factoryHandler);
ScrollDataExtractorFactory.create(client, datafeed, job, xContentRegistry, factoryHandler);
}
} else {
if (datafeed.hasAggregations()) { // Rollup indexes require aggregations
RollupDataExtractorFactory.create(client, datafeed, job, response.getJobs(), factoryHandler);
RollupDataExtractorFactory.create(client, datafeed, job, response.getJobs(), xContentRegistry, factoryHandler);
} else {
listener.onFailure(new IllegalArgumentException("Aggregations are required when using Rollup indices"));
}

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
@ -19,23 +20,25 @@ public class AggregationDataExtractorFactory implements DataExtractorFactory {
private final Client client;
private final DatafeedConfig datafeedConfig;
private final Job job;
private final NamedXContentRegistry xContentRegistry;
public AggregationDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job) {
public AggregationDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job, NamedXContentRegistry xContentRegistry) {
this.client = Objects.requireNonNull(client);
this.datafeedConfig = Objects.requireNonNull(datafeedConfig);
this.job = Objects.requireNonNull(job);
this.xContentRegistry = xContentRegistry;
}
@Override
public DataExtractor newExtractor(long start, long end) {
long histogramInterval = datafeedConfig.getHistogramIntervalMillis();
long histogramInterval = datafeedConfig.getHistogramIntervalMillis(xContentRegistry);
AggregationDataExtractorContext dataExtractorContext = new AggregationDataExtractorContext(
job.getId(),
job.getDataDescription().getTimeField(),
job.getAnalysisConfig().analysisFields(),
datafeedConfig.getIndices(),
datafeedConfig.getParsedQuery(),
datafeedConfig.getParsedAggregations(),
datafeedConfig.getParsedQuery(xContentRegistry),
datafeedConfig.getParsedAggregations(xContentRegistry),
Intervals.alignToCeil(start, histogramInterval),
Intervals.alignToFloor(end, histogramInterval),
job.getAnalysisConfig().getSummaryCountFieldName().equals(DatafeedConfig.DOC_COUNT),

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
@ -41,23 +42,25 @@ public class RollupDataExtractorFactory implements DataExtractorFactory {
private final Client client;
private final DatafeedConfig datafeedConfig;
private final Job job;
private final NamedXContentRegistry xContentRegistry;
private RollupDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job) {
private RollupDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job, NamedXContentRegistry xContentRegistry) {
this.client = Objects.requireNonNull(client);
this.datafeedConfig = Objects.requireNonNull(datafeedConfig);
this.job = Objects.requireNonNull(job);
this.xContentRegistry = xContentRegistry;
}
@Override
public DataExtractor newExtractor(long start, long end) {
long histogramInterval = datafeedConfig.getHistogramIntervalMillis();
long histogramInterval = datafeedConfig.getHistogramIntervalMillis(xContentRegistry);
AggregationDataExtractorContext dataExtractorContext = new AggregationDataExtractorContext(
job.getId(),
job.getDataDescription().getTimeField(),
job.getAnalysisConfig().analysisFields(),
datafeedConfig.getIndices(),
datafeedConfig.getParsedQuery(),
datafeedConfig.getParsedAggregations(),
datafeedConfig.getParsedQuery(xContentRegistry),
datafeedConfig.getParsedAggregations(xContentRegistry),
Intervals.alignToCeil(start, histogramInterval),
Intervals.alignToFloor(end, histogramInterval),
job.getAnalysisConfig().getSummaryCountFieldName().equals(DatafeedConfig.DOC_COUNT),
@ -69,10 +72,11 @@ public class RollupDataExtractorFactory implements DataExtractorFactory {
DatafeedConfig datafeed,
Job job,
Map<String, RollableIndexCaps> rollupJobsWithCaps,
NamedXContentRegistry xContentRegistry,
ActionListener<DataExtractorFactory> listener) {
final AggregationBuilder datafeedHistogramAggregation = getHistogramAggregation(
datafeed.getParsedAggregations().getAggregatorFactories());
datafeed.getParsedAggregations(xContentRegistry).getAggregatorFactories());
if ((datafeedHistogramAggregation instanceof DateHistogramAggregationBuilder) == false) {
listener.onFailure(
new IllegalArgumentException("Rollup requires that the datafeed configuration use a [date_histogram] aggregation," +
@ -103,7 +107,8 @@ public class RollupDataExtractorFactory implements DataExtractorFactory {
return;
}
final List<ValuesSourceAggregationBuilder> flattenedAggs = new ArrayList<>();
flattenAggregations(datafeed.getParsedAggregations().getAggregatorFactories(), datafeedHistogramAggregation, flattenedAggs);
flattenAggregations(datafeed.getParsedAggregations(xContentRegistry)
.getAggregatorFactories(), datafeedHistogramAggregation, flattenedAggs);
if (validIntervalCaps.stream().noneMatch(rollupJobConfig -> hasAggregations(rollupJobConfig, flattenedAggs))) {
listener.onFailure(
@ -112,7 +117,7 @@ public class RollupDataExtractorFactory implements DataExtractorFactory {
return;
}
listener.onResponse(new RollupDataExtractorFactory(client, datafeed, job));
listener.onResponse(new RollupDataExtractorFactory(client, datafeed, job, xContentRegistry));
}
private static boolean validInterval(long datafeedInterval, ParsedRollupCaps rollupJobGroupConfig) {

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.ml.datafeed.extractor.chunked;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
@ -20,12 +21,18 @@ public class ChunkedDataExtractorFactory implements DataExtractorFactory {
private final DatafeedConfig datafeedConfig;
private final Job job;
private final DataExtractorFactory dataExtractorFactory;
private final NamedXContentRegistry xContentRegistry;
public ChunkedDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job, DataExtractorFactory dataExtractorFactory) {
public ChunkedDataExtractorFactory(Client client,
DatafeedConfig datafeedConfig,
Job job,
NamedXContentRegistry xContentRegistry,
DataExtractorFactory dataExtractorFactory) {
this.client = Objects.requireNonNull(client);
this.datafeedConfig = Objects.requireNonNull(datafeedConfig);
this.job = Objects.requireNonNull(job);
this.dataExtractorFactory = Objects.requireNonNull(dataExtractorFactory);
this.xContentRegistry = xContentRegistry;
}
@Override
@ -35,7 +42,7 @@ public class ChunkedDataExtractorFactory implements DataExtractorFactory {
job.getId(),
job.getDataDescription().getTimeField(),
datafeedConfig.getIndices(),
datafeedConfig.getParsedQuery(),
datafeedConfig.getParsedQuery(xContentRegistry),
datafeedConfig.getScrollSize(),
timeAligner.alignToCeil(start),
timeAligner.alignToFloor(end),
@ -43,7 +50,7 @@ public class ChunkedDataExtractorFactory implements DataExtractorFactory {
timeAligner,
datafeedConfig.getHeaders(),
datafeedConfig.hasAggregations(),
datafeedConfig.hasAggregations() ? datafeedConfig.getHistogramIntervalMillis() : null
datafeedConfig.hasAggregations() ? datafeedConfig.getHistogramIntervalMillis(xContentRegistry) : null
);
return new ChunkedDataExtractor(client, dataExtractorFactory, dataExtractorContext);
}
@ -55,7 +62,7 @@ public class ChunkedDataExtractorFactory implements DataExtractorFactory {
// the same bucket twice, we need to search buckets aligned to the histogram interval.
// This allows us to steer away from partial buckets, and thus avoid the problem of
// dropping or duplicating data.
return newIntervalTimeAligner(datafeedConfig.getHistogramIntervalMillis());
return newIntervalTimeAligner(datafeedConfig.getHistogramIntervalMillis(xContentRegistry));
}
return newIdentityTimeAligner();
}

View File

@ -11,6 +11,7 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
@ -29,12 +30,15 @@ public class ScrollDataExtractorFactory implements DataExtractorFactory {
private final DatafeedConfig datafeedConfig;
private final Job job;
private final TimeBasedExtractedFields extractedFields;
private final NamedXContentRegistry xContentRegistry;
private ScrollDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job, TimeBasedExtractedFields extractedFields) {
private ScrollDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job, TimeBasedExtractedFields extractedFields,
NamedXContentRegistry xContentRegistry) {
this.client = Objects.requireNonNull(client);
this.datafeedConfig = Objects.requireNonNull(datafeedConfig);
this.job = Objects.requireNonNull(job);
this.extractedFields = Objects.requireNonNull(extractedFields);
this.xContentRegistry = xContentRegistry;
}
@Override
@ -43,7 +47,7 @@ public class ScrollDataExtractorFactory implements DataExtractorFactory {
job.getId(),
extractedFields,
datafeedConfig.getIndices(),
datafeedConfig.getParsedQuery(),
datafeedConfig.getParsedQuery(xContentRegistry),
datafeedConfig.getScriptFields(),
datafeedConfig.getScrollSize(),
start,
@ -52,13 +56,17 @@ public class ScrollDataExtractorFactory implements DataExtractorFactory {
return new ScrollDataExtractor(client, dataExtractorContext);
}
public static void create(Client client, DatafeedConfig datafeed, Job job, ActionListener<DataExtractorFactory> listener) {
public static void create(Client client,
DatafeedConfig datafeed,
Job job,
NamedXContentRegistry xContentRegistry,
ActionListener<DataExtractorFactory> listener ) {
// Step 2. Contruct the factory and notify listener
ActionListener<FieldCapabilitiesResponse> fieldCapabilitiesHandler = ActionListener.wrap(
fieldCapabilitiesResponse -> {
TimeBasedExtractedFields extractedFields = TimeBasedExtractedFields.build(job, datafeed, fieldCapabilitiesResponse);
listener.onResponse(new ScrollDataExtractorFactory(client, datafeed, job, extractedFields));
listener.onResponse(new ScrollDataExtractorFactory(client, datafeed, job, extractedFields, xContentRegistry));
}, e -> {
if (e instanceof IndexNotFoundException) {
listener.onFailure(new ResourceNotFoundException("datafeed [" + datafeed.getId()

View File

@ -22,6 +22,7 @@ import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -101,7 +102,7 @@ public class JobManager {
*/
public JobManager(Environment environment, Settings settings, JobResultsProvider jobResultsProvider,
ClusterService clusterService, Auditor auditor, ThreadPool threadPool,
Client client, UpdateJobProcessNotifier updateJobProcessNotifier) {
Client client, UpdateJobProcessNotifier updateJobProcessNotifier, NamedXContentRegistry xContentRegistry) {
this.environment = environment;
this.jobResultsProvider = Objects.requireNonNull(jobResultsProvider);
this.clusterService = Objects.requireNonNull(clusterService);
@ -109,7 +110,7 @@ public class JobManager {
this.client = Objects.requireNonNull(client);
this.threadPool = Objects.requireNonNull(threadPool);
this.updateJobProcessNotifier = updateJobProcessNotifier;
this.jobConfigProvider = new JobConfigProvider(client);
this.jobConfigProvider = new JobConfigProvider(client, xContentRegistry);
this.migrationEligibilityCheck = new MlConfigMigrationEligibilityCheck(settings, clusterService);
maxModelMemoryLimit = MachineLearningField.MAX_MODEL_MEMORY_LIMIT.get(settings);

View File

@ -102,9 +102,11 @@ public class JobConfigProvider {
}
private final Client client;
private final NamedXContentRegistry xContentRegistry;
public JobConfigProvider(Client client) {
public JobConfigProvider(Client client, NamedXContentRegistry xContentRegistry) {
this.client = client;
this.xContentRegistry = xContentRegistry;
}
/**
@ -737,7 +739,7 @@ public class JobConfigProvider {
getJob(config.getJobId(), ActionListener.wrap(
jobBuilder -> {
try {
DatafeedJobValidator.validate(config, jobBuilder.build());
DatafeedJobValidator.validate(config, jobBuilder.build(), xContentRegistry);
listener.onResponse(Boolean.TRUE);
} catch (Exception e) {
listener.onFailure(e);

View File

@ -22,9 +22,11 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.MlTasks;
@ -52,6 +54,12 @@ public class MlConfigMigrationEligibilityCheckTests extends ESTestCase {
clusterService = mock(ClusterService.class);
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
public void testCanStartMigration_givenMigrationIsDisabled() {
Settings settings = newSettings(false);
givenClusterSettings(settings);
@ -327,7 +335,7 @@ public class MlConfigMigrationEligibilityCheckTests extends ESTestCase {
public void testDatafeedIsEligibleForMigration_givenStartedDatafeed() {
Job openJob = JobTests.buildJobBuilder("open-job").build();
MlMetadata.Builder mlMetadata = new MlMetadata.Builder().putJob(openJob, false);
mlMetadata.putDatafeed(createCompatibleDatafeed(openJob.getId()), Collections.emptyMap());
mlMetadata.putDatafeed(createCompatibleDatafeed(openJob.getId()), Collections.emptyMap(), xContentRegistry());
String datafeedId = "df-" + openJob.getId();
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();
@ -353,7 +361,7 @@ public class MlConfigMigrationEligibilityCheckTests extends ESTestCase {
public void testDatafeedIsEligibleForMigration_givenStartedDatafeedAndMigrationIsDisabled() {
Job openJob = JobTests.buildJobBuilder("open-job").build();
MlMetadata.Builder mlMetadata = new MlMetadata.Builder().putJob(openJob, false);
mlMetadata.putDatafeed(createCompatibleDatafeed(openJob.getId()), Collections.emptyMap());
mlMetadata.putDatafeed(createCompatibleDatafeed(openJob.getId()), Collections.emptyMap(), xContentRegistry());
String datafeedId = "df-" + openJob.getId();
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();
@ -379,7 +387,7 @@ public class MlConfigMigrationEligibilityCheckTests extends ESTestCase {
public void testDatafeedIsEligibleForMigration_givenStoppedDatafeed() {
Job job = JobTests.buildJobBuilder("closed-job").build();
MlMetadata.Builder mlMetadata = new MlMetadata.Builder().putJob(job, false);
mlMetadata.putDatafeed(createCompatibleDatafeed(job.getId()), Collections.emptyMap());
mlMetadata.putDatafeed(createCompatibleDatafeed(job.getId()), Collections.emptyMap(), xContentRegistry());
String datafeedId = "df-" + job.getId();
MetaData.Builder metaData = MetaData.builder();
@ -402,7 +410,7 @@ public class MlConfigMigrationEligibilityCheckTests extends ESTestCase {
public void testDatafeedIsEligibleForMigration_givenUnallocatedDatafeed() {
Job job = JobTests.buildJobBuilder("closed-job").build();
MlMetadata.Builder mlMetadata = new MlMetadata.Builder().putJob(job, false);
mlMetadata.putDatafeed(createCompatibleDatafeed(job.getId()), Collections.emptyMap());
mlMetadata.putDatafeed(createCompatibleDatafeed(job.getId()), Collections.emptyMap(), xContentRegistry());
String datafeedId = "df-" + job.getId();
MetaData.Builder metaData = MetaData.builder();

View File

@ -14,8 +14,11 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.MlTasks;
@ -47,6 +50,12 @@ import static org.mockito.Mockito.when;
public class MlConfigMigratorTests extends ESTestCase {
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
public void testNonDeletingJobs() {
Job job1 = JobTests.buildJobBuilder("openjob1").build();
Job job2 = JobTests.buildJobBuilder("openjob2").build();
@ -64,7 +73,7 @@ public class MlConfigMigratorTests extends ESTestCase {
.putJob(closedJob, false)
.putJob(jobWithoutAllocation, false)
.putJob(openJob, false)
.putDatafeed(createCompatibleDatafeed(closedJob.getId()), Collections.emptyMap());
.putDatafeed(createCompatibleDatafeed(closedJob.getId()), Collections.emptyMap(), xContentRegistry());
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();
tasksBuilder.addTask(MlTasks.jobTaskId("jobwithoutallocation"), MlTasks.JOB_TASK_NAME,
@ -103,9 +112,9 @@ public class MlConfigMigratorTests extends ESTestCase {
.putJob(job1, false)
.putJob(job2, false)
.putJob(job3, false)
.putDatafeed(stopppedDatafeed, Collections.emptyMap())
.putDatafeed(datafeedWithoutAllocation, Collections.emptyMap())
.putDatafeed(startedDatafeed, Collections.emptyMap());
.putDatafeed(stopppedDatafeed, Collections.emptyMap(), xContentRegistry())
.putDatafeed(datafeedWithoutAllocation, Collections.emptyMap(), xContentRegistry())
.putDatafeed(startedDatafeed, Collections.emptyMap(), xContentRegistry());
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();
tasksBuilder.addTask(MlTasks.datafeedTaskId(stopppedDatafeed.getId()), MlTasks.DATAFEED_TASK_NAME,
@ -206,8 +215,8 @@ public class MlConfigMigratorTests extends ESTestCase {
MlMetadata.Builder mlMetadata = new MlMetadata.Builder()
.putJob(job1, false)
.putJob(job2, false)
.putDatafeed(datafeedConfig1, Collections.emptyMap())
.putDatafeed(datafeedConfig2, Collections.emptyMap());
.putDatafeed(datafeedConfig1, Collections.emptyMap(), xContentRegistry())
.putDatafeed(datafeedConfig2, Collections.emptyMap(), xContentRegistry());
MlConfigMigrator.RemovalResult removalResult = MlConfigMigrator.removeJobsAndDatafeeds(
Arrays.asList(job1, job2), Arrays.asList(datafeedConfig1, datafeedConfig2), mlMetadata.build());
@ -225,7 +234,7 @@ public class MlConfigMigratorTests extends ESTestCase {
MlMetadata.Builder mlMetadata = new MlMetadata.Builder()
.putJob(job1, false)
.putJob(job2, false)
.putDatafeed(datafeedConfig1, Collections.emptyMap());
.putDatafeed(datafeedConfig1, Collections.emptyMap(), xContentRegistry());
MlConfigMigrator.RemovalResult removalResult = MlConfigMigrator.removeJobsAndDatafeeds(
Arrays.asList(job1, JobTests.buildJobBuilder("job-none").build()),

View File

@ -50,7 +50,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
}
job = new Job.Builder(job).setAnalysisConfig(analysisConfig).build();
builder.putJob(job, false);
builder.putDatafeed(datafeedConfig, Collections.emptyMap());
builder.putDatafeed(datafeedConfig, Collections.emptyMap(), xContentRegistry());
} else {
builder.putJob(job, false);
}
@ -151,7 +151,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
metadataBuilder.putJob(entry.getValue(), true);
}
for (Map.Entry<String, DatafeedConfig> entry : datafeeds.entrySet()) {
metadataBuilder.putDatafeed(entry.getValue(), Collections.emptyMap());
metadataBuilder.putDatafeed(entry.getValue(), Collections.emptyMap(), xContentRegistry());
}
switch (between(0, 1)) {
@ -172,7 +172,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
}
randomJob = new Job.Builder(randomJob).setAnalysisConfig(analysisConfig).build();
metadataBuilder.putJob(randomJob, false);
metadataBuilder.putDatafeed(datafeedConfig, Collections.emptyMap());
metadataBuilder.putDatafeed(datafeedConfig, Collections.emptyMap(), xContentRegistry());
break;
default:
throw new AssertionError("Illegal randomisation branch");

View File

@ -9,14 +9,17 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.license.LicenseService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.ml.MachineLearningField;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
@ -44,6 +47,12 @@ public abstract class MlSingleNodeTestCase extends ESSingleNodeTestCase {
return newSettings.build();
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(LocalStateMachineLearning.class);

View File

@ -7,7 +7,10 @@
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction;
@ -33,12 +36,18 @@ import static org.mockito.Mockito.verify;
public class TransportStartDatafeedActionTests extends ESTestCase {
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
public void testValidate_jobClosed() {
Job job1 = DatafeedManagerTests.createDatafeedJob().build(new Date());
PersistentTasksCustomMetaData tasks = PersistentTasksCustomMetaData.builder().build();
DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build();
Exception e = expectThrows(ElasticsearchStatusException.class,
() -> TransportStartDatafeedAction.validate(job1, datafeedConfig1, tasks));
() -> TransportStartDatafeedAction.validate(job1, datafeedConfig1, tasks, xContentRegistry()));
assertThat(e.getMessage(), equalTo("cannot start datafeed [foo-datafeed] because job [job_id] is closed"));
}
@ -49,7 +58,7 @@ public class TransportStartDatafeedActionTests extends ESTestCase {
PersistentTasksCustomMetaData tasks = tasksBuilder.build();
DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build();
TransportStartDatafeedAction.validate(job1, datafeedConfig1, tasks);
TransportStartDatafeedAction.validate(job1, datafeedConfig1, tasks, xContentRegistry());
}
public void testValidate_jobOpened() {
@ -59,19 +68,19 @@ public class TransportStartDatafeedActionTests extends ESTestCase {
PersistentTasksCustomMetaData tasks = tasksBuilder.build();
DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build();
TransportStartDatafeedAction.validate(job1, datafeedConfig1, tasks);
TransportStartDatafeedAction.validate(job1, datafeedConfig1, tasks, xContentRegistry());
}
public void testDeprecationsLogged() {
Job job1 = DatafeedManagerTests.createDatafeedJob().build(new Date());
DatafeedConfig.Builder datafeedConfig = DatafeedManagerTests.createDatafeedConfig("start-data-feed-test", job1.getId());
DatafeedConfig config = spy(datafeedConfig.build());
doReturn(Collections.singletonList("Deprecated Agg")).when(config).getAggDeprecations();
doReturn(Collections.singletonList("Deprecated Query")).when(config).getQueryDeprecations();
doReturn(Collections.singletonList("Deprecated Agg")).when(config).getAggDeprecations(any(NamedXContentRegistry.class));
doReturn(Collections.singletonList("Deprecated Query")).when(config).getQueryDeprecations(any(NamedXContentRegistry.class));
Auditor auditor = mock(Auditor.class);
TransportStartDatafeedAction.auditDeprecations(config, job1, auditor);
TransportStartDatafeedAction.auditDeprecations(config, job1, auditor, xContentRegistry());
verify(auditor).warning(job1.getId(),
"datafeed [start-data-feed-test] configuration has deprecations. [Deprecated Agg, Deprecated Query]");
@ -81,12 +90,12 @@ public class TransportStartDatafeedActionTests extends ESTestCase {
Job job1 = DatafeedManagerTests.createDatafeedJob().build(new Date());
DatafeedConfig.Builder datafeedConfig = DatafeedManagerTests.createDatafeedConfig("start-data-feed-test", job1.getId());
DatafeedConfig config = spy(datafeedConfig.build());
doReturn(Collections.emptyList()).when(config).getAggDeprecations();
doReturn(Collections.emptyList()).when(config).getQueryDeprecations();
doReturn(Collections.emptyList()).when(config).getAggDeprecations(any(NamedXContentRegistry.class));
doReturn(Collections.emptyList()).when(config).getQueryDeprecations(any(NamedXContentRegistry.class));
Auditor auditor = mock(Auditor.class);
TransportStartDatafeedAction.auditDeprecations(config, job1, auditor);
TransportStartDatafeedAction.auditDeprecations(config, job1, auditor, xContentRegistry());
verify(auditor, never()).warning(any(), any());
}

View File

@ -6,7 +6,10 @@
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
@ -28,6 +31,12 @@ import java.util.Date;
public class DatafeedJobValidatorTests extends ESTestCase {
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
public void testValidate_GivenNonZeroLatency() {
String errorMessage = Messages.getMessage(Messages.DATAFEED_DOES_NOT_SUPPORT_JOB_WITH_LATENCY);
Job.Builder builder = buildJobBuilder("foo");
@ -39,7 +48,7 @@ public class DatafeedJobValidatorTests extends ESTestCase {
DatafeedConfig datafeedConfig = createValidDatafeedConfig().build();
ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class,
() -> DatafeedJobValidator.validate(datafeedConfig, job));
() -> DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry()));
assertEquals(errorMessage, e.getMessage());
}
@ -53,7 +62,7 @@ public class DatafeedJobValidatorTests extends ESTestCase {
Job job = builder.build(new Date());
DatafeedConfig datafeedConfig = createValidDatafeedConfig().build();
DatafeedJobValidator.validate(datafeedConfig, job);
DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry());
}
public void testVerify_GivenNoLatency() {
@ -64,7 +73,7 @@ public class DatafeedJobValidatorTests extends ESTestCase {
Job job = builder.build(new Date());
DatafeedConfig datafeedConfig = createValidDatafeedConfig().build();
DatafeedJobValidator.validate(datafeedConfig, job);
DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry());
}
public void testVerify_GivenAggsAndNoSummaryCountField() throws IOException {
@ -79,7 +88,7 @@ public class DatafeedJobValidatorTests extends ESTestCase {
DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs(1800.0).build();
ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class,
() -> DatafeedJobValidator.validate(datafeedConfig, job));
() -> DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry()));
assertEquals(errorMessage, e.getMessage());
}
@ -96,7 +105,7 @@ public class DatafeedJobValidatorTests extends ESTestCase {
DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs(1800.0).build();
ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class,
() -> DatafeedJobValidator.validate(datafeedConfig, job));
() -> DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry()));
assertEquals(errorMessage, e.getMessage());
}
@ -109,7 +118,7 @@ public class DatafeedJobValidatorTests extends ESTestCase {
builder.setAnalysisConfig(ac);
Job job = builder.build(new Date());
DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs(900.0).build();
DatafeedJobValidator.validate(datafeedConfig, job);
DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry());
}
public void testVerify_GivenHistogramIntervalGreaterThanBucketSpan() throws IOException {
@ -122,7 +131,7 @@ public class DatafeedJobValidatorTests extends ESTestCase {
DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs(1800001.0).build();
ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class,
() -> DatafeedJobValidator.validate(datafeedConfig, job));
() -> DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry()));
assertEquals("Aggregation interval [1800001ms] must be less than or equal to the bucket_span [1800000ms]", e.getMessage());
}
@ -137,11 +146,11 @@ public class DatafeedJobValidatorTests extends ESTestCase {
DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs(37 * 1000).build();
ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class,
() -> DatafeedJobValidator.validate(datafeedConfig, job));
() -> DatafeedJobValidator.validate(datafeedConfig, job, xContentRegistry()));
assertEquals("Aggregation interval [37000ms] must be a divisor of the bucket_span [300000ms]", e.getMessage());
DatafeedConfig goodDatafeedConfig = createValidDatafeedConfigWithAggs(60 * 1000).build();
DatafeedJobValidator.validate(goodDatafeedConfig, job);
DatafeedJobValidator.validate(goodDatafeedConfig, job, xContentRegistry());
}
public void testVerify_FrequencyIsMultipleOfHistogramInterval() throws IOException {
@ -155,25 +164,25 @@ public class DatafeedJobValidatorTests extends ESTestCase {
// Check with multiples
datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(60));
DatafeedJobValidator.validate(datafeedBuilder.build(), job);
DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry());
datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(120));
DatafeedJobValidator.validate(datafeedBuilder.build(), job);
DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry());
datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(180));
DatafeedJobValidator.validate(datafeedBuilder.build(), job);
DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry());
datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(240));
DatafeedJobValidator.validate(datafeedBuilder.build(), job);
DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry());
datafeedBuilder.setFrequency(TimeValue.timeValueHours(1));
DatafeedJobValidator.validate(datafeedBuilder.build(), job);
DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry());
// Now non-multiples
datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(30));
ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class,
() -> DatafeedJobValidator.validate(datafeedBuilder.build(), job));
() -> DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry()));
assertEquals("Datafeed frequency [30s] must be a multiple of the aggregation interval [60000ms]", e.getMessage());
datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(90));
e = ESTestCase.expectThrows(ElasticsearchStatusException.class,
() -> DatafeedJobValidator.validate(datafeedBuilder.build(), job));
() -> DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry()));
assertEquals("Datafeed frequency [1.5m] must be a multiple of the aggregation interval [60000ms]", e.getMessage());
}
@ -187,16 +196,16 @@ public class DatafeedJobValidatorTests extends ESTestCase {
DatafeedConfig.Builder datafeedBuilder = createValidDatafeedConfig();
datafeedBuilder.setDelayedDataCheckConfig(DelayedDataCheckConfig.enabledDelayedDataCheckConfig(TimeValue.timeValueMinutes(10)));
DatafeedJobValidator.validate(datafeedBuilder.build(), job);
DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry());
datafeedBuilder.setDelayedDataCheckConfig(DelayedDataCheckConfig.enabledDelayedDataCheckConfig(TimeValue.timeValueSeconds(1)));
ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class,
() -> DatafeedJobValidator.validate(datafeedBuilder.build(), job));
() -> DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry()));
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_DELAYED_DATA_CHECK_TOO_SMALL, "1s", "2s"), e.getMessage());
datafeedBuilder.setDelayedDataCheckConfig(DelayedDataCheckConfig.enabledDelayedDataCheckConfig(TimeValue.timeValueHours(24)));
e = ESTestCase.expectThrows(ElasticsearchStatusException.class,
() -> DatafeedJobValidator.validate(datafeedBuilder.build(), job));
() -> DatafeedJobValidator.validate(datafeedBuilder.build(), job, xContentRegistry()));
assertEquals(Messages.getMessage(
Messages.DATAFEED_CONFIG_DELAYED_DATA_CHECK_SPANS_TOO_MANY_BUCKETS, "1d", "2s"), e.getMessage());
}

View File

@ -6,7 +6,10 @@
package org.elasticsearch.xpack.ml.datafeed.delayeddatacheck;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.datafeed.DelayedDataCheckConfig;
@ -26,29 +29,35 @@ import static org.mockito.Mockito.mock;
public class DelayedDataDetectorFactoryTests extends ESTestCase {
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
public void testBuilder() {
Job job = createJob(TimeValue.timeValueSeconds(2));
DatafeedConfig datafeedConfig = createDatafeed(false, null);
// Should not throw
assertThat(DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, mock(Client.class)),
assertThat(DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, mock(Client.class), xContentRegistry()),
instanceOf(NullDelayedDataDetector.class));
datafeedConfig = createDatafeed(true, TimeValue.timeValueMinutes(10));
// Should not throw
assertThat(DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, mock(Client.class)),
assertThat(DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, mock(Client.class), xContentRegistry()),
instanceOf(DatafeedDelayedDataDetector.class));
DatafeedConfig tooSmallDatafeedConfig = createDatafeed(true, TimeValue.timeValueSeconds(1));
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class,
() -> DelayedDataDetectorFactory.buildDetector(job, tooSmallDatafeedConfig, mock(Client.class)));
() -> DelayedDataDetectorFactory.buildDetector(job, tooSmallDatafeedConfig, mock(Client.class), xContentRegistry()));
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_DELAYED_DATA_CHECK_TOO_SMALL, "1s", "2s"), e.getMessage());
DatafeedConfig tooBigDatafeedConfig = createDatafeed(true, TimeValue.timeValueHours(12));
e = ESTestCase.expectThrows(IllegalArgumentException.class,
() -> DelayedDataDetectorFactory.buildDetector(job, tooBigDatafeedConfig, mock(Client.class)));
() -> DelayedDataDetectorFactory.buildDetector(job, tooBigDatafeedConfig, mock(Client.class), xContentRegistry()));
assertEquals(Messages.getMessage(
Messages.DATAFEED_CONFIG_DELAYED_DATA_CHECK_SPANS_TOO_MANY_BUCKETS, "12h", "2s"), e.getMessage());
@ -57,14 +66,14 @@ public class DelayedDataDetectorFactoryTests extends ESTestCase {
// Should not throw
DelayedDataDetector delayedDataDetector =
DelayedDataDetectorFactory.buildDetector(withBigBucketSpan, datafeedConfig, mock(Client.class));
DelayedDataDetectorFactory.buildDetector(withBigBucketSpan, datafeedConfig, mock(Client.class), xContentRegistry());
assertThat(delayedDataDetector.getWindow(), equalTo(TimeValue.timeValueHours(1).millis() * 8));
datafeedConfig = createDatafeed(true, null);
// Should not throw
delayedDataDetector =
DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, mock(Client.class));
DelayedDataDetectorFactory.buildDetector(job, datafeedConfig, mock(Client.class), xContentRegistry());
assertThat(delayedDataDetector.getWindow(), equalTo(TimeValue.timeValueHours(2).millis()));
}
@ -98,5 +107,4 @@ public class DelayedDataDetectorFactoryTests extends ESTestCase {
return builder.build();
}
}

View File

@ -12,6 +12,8 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
@ -61,6 +63,12 @@ public class DataExtractorFactoryTests extends ESTestCase {
private Client client;
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
@Before
public void setUpTests() {
client = mock(Client.class);
@ -101,7 +109,7 @@ public class DataExtractorFactoryTests extends ESTestCase {
e -> fail()
);
DataExtractorFactory.create(client, datafeedConfig, jobBuilder.build(new Date()), listener);
DataExtractorFactory.create(client, datafeedConfig, jobBuilder.build(new Date()), xContentRegistry(), listener);
}
public void testCreateDataExtractorFactoryGivenScrollWithAutoChunk() {
@ -117,7 +125,7 @@ public class DataExtractorFactoryTests extends ESTestCase {
e -> fail()
);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener);
}
public void testCreateDataExtractorFactoryGivenScrollWithOffChunk() {
@ -133,7 +141,7 @@ public class DataExtractorFactoryTests extends ESTestCase {
e -> fail()
);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener);
}
public void testCreateDataExtractorFactoryGivenDefaultAggregation() {
@ -151,7 +159,7 @@ public class DataExtractorFactoryTests extends ESTestCase {
e -> fail()
);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener);
}
public void testCreateDataExtractorFactoryGivenAggregationWithOffChunk() {
@ -170,7 +178,7 @@ public class DataExtractorFactoryTests extends ESTestCase {
e -> fail()
);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener);
}
public void testCreateDataExtractorFactoryGivenDefaultAggregationWithAutoChunk() {
@ -189,7 +197,7 @@ public class DataExtractorFactoryTests extends ESTestCase {
e -> fail()
);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener);
}
public void testCreateDataExtractorFactoryGivenRollupAndValidAggregation() {
@ -209,7 +217,7 @@ public class DataExtractorFactoryTests extends ESTestCase {
dataExtractorFactory -> assertThat(dataExtractorFactory, instanceOf(RollupDataExtractorFactory.class)),
e -> fail()
);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener);
}
public void testCreateDataExtractorFactoryGivenRollupAndValidAggregationAndAutoChunk() {
@ -229,7 +237,7 @@ public class DataExtractorFactoryTests extends ESTestCase {
dataExtractorFactory -> assertThat(dataExtractorFactory, instanceOf(ChunkedDataExtractorFactory.class)),
e -> fail()
);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener);
}
public void testCreateDataExtractorFactoryGivenRollupButNoAggregations() {
@ -249,7 +257,7 @@ public class DataExtractorFactoryTests extends ESTestCase {
}
);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener);
}
public void testCreateDataExtractorFactoryGivenRollupWithBadInterval() {
@ -274,7 +282,7 @@ public class DataExtractorFactoryTests extends ESTestCase {
assertThat(e, instanceOf(IllegalArgumentException.class));
}
);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener);
}
public void testCreateDataExtractorFactoryGivenRollupMissingTerms() {
@ -298,7 +306,7 @@ public class DataExtractorFactoryTests extends ESTestCase {
assertThat(e, instanceOf(IllegalArgumentException.class));
}
);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener);
}
public void testCreateDataExtractorFactoryGivenRollupMissingMetric() {
@ -322,7 +330,7 @@ public class DataExtractorFactoryTests extends ESTestCase {
assertThat(e, instanceOf(IllegalArgumentException.class));
}
);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener);
DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener);
}
private void givenAggregatableRollup(String field, String type, int minuteInterval, String... groupByTerms) {

View File

@ -6,6 +6,9 @@
package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.test.ESTestCase;
@ -17,6 +20,7 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.junit.Before;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import static org.hamcrest.Matchers.equalTo;
@ -31,6 +35,12 @@ public class AggregationDataExtractorFactoryTests extends ESTestCase {
client = mock(Client.class);
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
public void testNewExtractor_GivenAlignedTimes() {
AggregationDataExtractorFactory factory = createFactory(1000L);
@ -66,6 +76,6 @@ public class AggregationDataExtractorFactoryTests extends ESTestCase {
DatafeedConfig.Builder datafeedConfigBuilder = new DatafeedConfig.Builder("foo-feed", jobBuilder.getId());
datafeedConfigBuilder.setParsedAggregations(aggs);
datafeedConfigBuilder.setIndices(Arrays.asList("my_index"));
return new AggregationDataExtractorFactory(client, datafeedConfigBuilder.build(), jobBuilder.build(new Date()));
return new AggregationDataExtractorFactory(client, datafeedConfigBuilder.build(), jobBuilder.build(new Date()), xContentRegistry());
}
}

View File

@ -6,6 +6,9 @@
package org.elasticsearch.xpack.ml.datafeed.extractor.chunked;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.test.ESTestCase;
@ -18,6 +21,7 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.junit.Before;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import static org.hamcrest.Matchers.equalTo;
@ -28,6 +32,12 @@ public class ChunkedDataExtractorFactoryTests extends ESTestCase {
private Client client;
private DataExtractorFactory dataExtractorFactory;
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
@Before
public void setUpMocks() {
client = mock(Client.class);
@ -93,6 +103,7 @@ public class ChunkedDataExtractorFactoryTests extends ESTestCase {
DatafeedConfig.Builder datafeedConfigBuilder = new DatafeedConfig.Builder("foo-feed", jobBuilder.getId());
datafeedConfigBuilder.setParsedAggregations(aggs);
datafeedConfigBuilder.setIndices(Arrays.asList("my_index"));
return new ChunkedDataExtractorFactory(client, datafeedConfigBuilder.build(), jobBuilder.build(new Date()), dataExtractorFactory);
return new ChunkedDataExtractorFactory(client, datafeedConfigBuilder.build(), jobBuilder.build(new Date()),
xContentRegistry(), dataExtractorFactory);
}
}

View File

@ -57,7 +57,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
@Before
public void createComponents() throws Exception {
jobConfigProvider = new JobConfigProvider(client());
jobConfigProvider = new JobConfigProvider(client(), xContentRegistry());
waitForMlTemplates();
}

View File

@ -28,7 +28,6 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
@ -90,7 +89,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
final String indexJobId = "job-already-migrated";
// Add a job to the index
JobConfigProvider jobConfigProvider = new JobConfigProvider(client());
JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry());
Job indexJob = buildJobBuilder(indexJobId).build();
// Same as index job but has extra fields in its custom settings
// which will be used to check the config was overwritten
@ -139,7 +138,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("df-1", "job-foo");
builder.setIndices(Collections.singletonList("beats*"));
mlMetadata.putDatafeed(builder.build(), Collections.emptyMap());
mlMetadata.putDatafeed(builder.build(), Collections.emptyMap(), xContentRegistry());
MetaData.Builder metaData = MetaData.builder();
RoutingTable.Builder routingTable = RoutingTable.builder();
@ -171,7 +170,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
// check the jobs have been migrated
AtomicReference<List<Job.Builder>> jobsHolder = new AtomicReference<>();
JobConfigProvider jobConfigProvider = new JobConfigProvider(client());
JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry());
blockingCall(actionListener -> jobConfigProvider.expandJobs("*", true, true, actionListener),
jobsHolder, exceptionHolder);
@ -240,7 +239,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
// check the jobs have been migrated
AtomicReference<List<Job.Builder>> jobsHolder = new AtomicReference<>();
JobConfigProvider jobConfigProvider = new JobConfigProvider(client());
JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry());
blockingCall(actionListener -> jobConfigProvider.expandJobs("*", true, true, actionListener),
jobsHolder, exceptionHolder);
@ -262,7 +261,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
for (int i = 0; i < datafeedCount; i++) {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("df-" + i, "job-" + i);
builder.setIndices(Collections.singletonList("beats*"));
mlMetadata.putDatafeed(builder.build(), Collections.emptyMap());
mlMetadata.putDatafeed(builder.build(), Collections.emptyMap(), xContentRegistry());
}
MetaData.Builder metaData = MetaData.builder();
@ -293,7 +292,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
// check the jobs have been migrated
AtomicReference<List<Job.Builder>> jobsHolder = new AtomicReference<>();
JobConfigProvider jobConfigProvider = new JobConfigProvider(client());
JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry());
blockingCall(actionListener -> jobConfigProvider.expandJobs("*", true, true, actionListener),
jobsHolder, exceptionHolder);
@ -344,7 +343,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
mlMetadata.putJob(buildJobBuilder("job-bar").build(), false);
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("df-1", "job-foo");
builder.setIndices(Collections.singletonList("beats*"));
mlMetadata.putDatafeed(builder.build(), Collections.emptyMap());
mlMetadata.putDatafeed(builder.build(), Collections.emptyMap(), xContentRegistry());
ClusterState clusterState = ClusterState.builder(new ClusterName("_name"))
.metaData(MetaData.builder()
@ -364,7 +363,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
// check the jobs have not been migrated
AtomicReference<List<Job.Builder>> jobsHolder = new AtomicReference<>();
JobConfigProvider jobConfigProvider = new JobConfigProvider(client());
JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry());
blockingCall(actionListener -> jobConfigProvider.expandJobs("*", true, true, actionListener),
jobsHolder, exceptionHolder);
assertNull(exceptionHolder.get());
@ -392,7 +391,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
try (InputStream stream = searchResponse.getHits().getAt(0).getSourceRef().streamInput();
XContentParser parser = XContentFactory.xContent(XContentType.JSON)
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
.createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, stream)) {
MlMetadata recoveredMeta = MlMetadata.LENIENT_PARSER.apply(parser, null).build();
assertEquals(expectedMlMetadata, recoveredMeta);
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -37,6 +38,7 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.ml.MachineLearningField;
@ -104,6 +106,12 @@ public class JobManagerTests extends ESTestCase {
private Auditor auditor;
private UpdateJobProcessNotifier updateJobProcessNotifier;
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
@Before
public void setup() throws Exception {
Settings settings = Settings.builder()
@ -586,7 +594,7 @@ public class JobManagerTests extends ESTestCase {
private JobManager createJobManager(Client client) {
return new JobManager(environment, environment.settings(), jobResultsProvider, clusterService,
auditor, threadPool, client, updateJobProcessNotifier);
auditor, threadPool, client, updateJobProcessNotifier, xContentRegistry());
}
private ClusterState createClusterState() {