Merge branch 'master' into feature/sql
Original commit: elastic/x-pack-elasticsearch@d24c740fe3
This commit is contained in:
commit
8a7a724179
|
@ -29,6 +29,7 @@ import org.elasticsearch.xpack.ml.datafeed.DatafeedUpdate;
|
|||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobState;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobTaskStatus;
|
||||
import org.elasticsearch.xpack.ml.job.groups.GroupOrJobLookup;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.ml.utils.NameResolver;
|
||||
|
@ -66,10 +67,12 @@ public class MlMetadata implements MetaData.Custom {
|
|||
|
||||
private final SortedMap<String, Job> jobs;
|
||||
private final SortedMap<String, DatafeedConfig> datafeeds;
|
||||
private final GroupOrJobLookup groupOrJobLookup;
|
||||
|
||||
private MlMetadata(SortedMap<String, Job> jobs, SortedMap<String, DatafeedConfig> datafeeds) {
|
||||
this.jobs = Collections.unmodifiableSortedMap(jobs);
|
||||
this.datafeeds = Collections.unmodifiableSortedMap(datafeeds);
|
||||
this.groupOrJobLookup = new GroupOrJobLookup(jobs.values());
|
||||
}
|
||||
|
||||
public Map<String, Job> getJobs() {
|
||||
|
@ -77,8 +80,7 @@ public class MlMetadata implements MetaData.Custom {
|
|||
}
|
||||
|
||||
public Set<String> expandJobIds(String expression, boolean allowNoJobs) {
|
||||
return NameResolver.newUnaliased(jobs.keySet(), jobId -> ExceptionsHelper.missingJobException(jobId))
|
||||
.expand(expression, allowNoJobs);
|
||||
return groupOrJobLookup.expandJobIds(expression, allowNoJobs);
|
||||
}
|
||||
|
||||
public boolean isJobDeleted(String jobId) {
|
||||
|
@ -136,6 +138,8 @@ public class MlMetadata implements MetaData.Custom {
|
|||
datafeeds.put(in.readString(), new DatafeedConfig(in));
|
||||
}
|
||||
this.datafeeds = datafeeds;
|
||||
|
||||
this.groupOrJobLookup = new GroupOrJobLookup(jobs.values());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -48,5 +48,13 @@ public final class DatafeedJobValidator {
|
|||
TimeValue.timeValueMillis(histogramIntervalMillis).getStringRep(),
|
||||
TimeValue.timeValueMillis(bucketSpanMillis).getStringRep()));
|
||||
}
|
||||
|
||||
if (bucketSpanMillis % histogramIntervalMillis != 0) {
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(
|
||||
Messages.DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_DIVISOR_OF_BUCKET_SPAN,
|
||||
TimeValue.timeValueMillis(histogramIntervalMillis).getStringRep(),
|
||||
TimeValue.timeValueMillis(bucketSpanMillis).getStringRep()));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -115,13 +115,10 @@ class AggregationDataExtractor implements DataExtractor {
|
|||
}
|
||||
|
||||
private SearchRequestBuilder buildSearchRequest() {
|
||||
long histogramSearchStartTime = context.start;
|
||||
if (context.aggs.getPipelineAggregatorFactories().isEmpty() == false) {
|
||||
// For derivative aggregations the first bucket will always be null
|
||||
// so query one extra histogram bucket back and hope there is data
|
||||
// in that bucket
|
||||
histogramSearchStartTime = Math.max(0, context.start - getHistogramInterval());
|
||||
}
|
||||
long histogramSearchStartTime = Math.max(0, context.start - getHistogramInterval());
|
||||
|
||||
SearchRequestBuilder searchRequestBuilder = SearchAction.INSTANCE.newRequestBuilder(client)
|
||||
.setIndices(context.indices)
|
||||
|
|
|
@ -16,7 +16,6 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
|
@ -29,6 +28,7 @@ import org.elasticsearch.xpack.ml.utils.time.TimeUtils;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.EnumMap;
|
||||
import java.util.HashSet;
|
||||
|
@ -57,6 +57,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
public static final ParseField ID = new ParseField("job_id");
|
||||
public static final ParseField JOB_TYPE = new ParseField("job_type");
|
||||
public static final ParseField JOB_VERSION = new ParseField("job_version");
|
||||
public static final ParseField GROUPS = new ParseField("groups");
|
||||
public static final ParseField ANALYSIS_CONFIG = new ParseField("analysis_config");
|
||||
public static final ParseField ANALYSIS_LIMITS = new ParseField("analysis_limits");
|
||||
public static final ParseField CREATE_TIME = new ParseField("create_time");
|
||||
|
@ -94,6 +95,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
parser.declareString(Builder::setId, ID);
|
||||
parser.declareString(Builder::setJobType, JOB_TYPE);
|
||||
parser.declareString(Builder::setJobVersion, JOB_VERSION);
|
||||
parser.declareStringArray(Builder::setGroups, GROUPS);
|
||||
parser.declareStringOrNull(Builder::setDescription, DESCRIPTION);
|
||||
parser.declareField(Builder::setCreateTime, p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
|
@ -148,6 +150,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
@Nullable
|
||||
private final Version jobVersion;
|
||||
|
||||
private final List<String> groups;
|
||||
private final String description;
|
||||
// TODO: Use java.time for the Dates here: x-pack-elasticsearch#829
|
||||
private final Date createTime;
|
||||
|
@ -166,7 +169,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
private final String resultsIndexName;
|
||||
private final boolean deleted;
|
||||
|
||||
private Job(String jobId, String jobType, Version jobVersion, String description, Date createTime,
|
||||
private Job(String jobId, String jobType, Version jobVersion, List<String> groups, String description, Date createTime,
|
||||
Date finishedTime, Date lastDataTime,
|
||||
AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription,
|
||||
ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval,
|
||||
|
@ -176,6 +179,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
this.jobId = jobId;
|
||||
this.jobType = jobType;
|
||||
this.jobVersion = jobVersion;
|
||||
this.groups = groups;
|
||||
this.description = description;
|
||||
this.createTime = createTime;
|
||||
this.finishedTime = finishedTime;
|
||||
|
@ -202,6 +206,11 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
} else {
|
||||
jobVersion = null;
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
groups = in.readList(StreamInput::readString);
|
||||
} else {
|
||||
groups = Collections.emptyList();
|
||||
}
|
||||
description = in.readOptionalString();
|
||||
createTime = new Date(in.readVLong());
|
||||
finishedTime = in.readBoolean() ? new Date(in.readVLong()) : null;
|
||||
|
@ -237,6 +246,10 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
return jobVersion;
|
||||
}
|
||||
|
||||
public List<String> getGroups() {
|
||||
return groups;
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the index storing the job's results and state.
|
||||
* This defaults to {@link #getId()} if a specific index name is not set.
|
||||
|
@ -414,6 +427,9 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
out.writeBoolean(false);
|
||||
}
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
out.writeStringList(groups);
|
||||
}
|
||||
out.writeOptionalString(description);
|
||||
out.writeVLong(createTime.getTime());
|
||||
if (finishedTime != null) {
|
||||
|
@ -458,6 +474,9 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
if (jobVersion != null) {
|
||||
builder.field(JOB_VERSION.getPreferredName(), jobVersion);
|
||||
}
|
||||
if (groups.isEmpty() == false) {
|
||||
builder.field(GROUPS.getPreferredName(), groups);
|
||||
}
|
||||
if (description != null) {
|
||||
builder.field(DESCRIPTION.getPreferredName(), description);
|
||||
}
|
||||
|
@ -519,6 +538,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
return Objects.equals(this.jobId, that.jobId)
|
||||
&& Objects.equals(this.jobType, that.jobType)
|
||||
&& Objects.equals(this.jobVersion, that.jobVersion)
|
||||
&& Objects.equals(this.groups, that.groups)
|
||||
&& Objects.equals(this.description, that.description)
|
||||
&& Objects.equals(this.createTime, that.createTime)
|
||||
&& Objects.equals(this.finishedTime, that.finishedTime)
|
||||
|
@ -538,7 +558,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, jobType, jobVersion, description, createTime, finishedTime, lastDataTime, analysisConfig,
|
||||
return Objects.hash(jobId, jobType, jobVersion, groups, description, createTime, finishedTime, lastDataTime, analysisConfig,
|
||||
analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
|
||||
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
|
||||
modelSnapshotId, resultsIndexName, deleted);
|
||||
|
@ -574,6 +594,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
private String id;
|
||||
private String jobType = ANOMALY_DETECTOR_JOB_TYPE;
|
||||
private Version jobVersion;
|
||||
private List<String> groups = Collections.emptyList();
|
||||
private String description;
|
||||
private AnalysisConfig analysisConfig;
|
||||
private AnalysisLimits analysisLimits;
|
||||
|
@ -602,6 +623,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
this.id = job.getId();
|
||||
this.jobType = job.getJobType();
|
||||
this.jobVersion = job.getJobVersion();
|
||||
this.groups = job.getGroups();
|
||||
this.description = job.getDescription();
|
||||
this.analysisConfig = job.getAnalysisConfig();
|
||||
this.analysisLimits = job.getAnalysisLimits();
|
||||
|
@ -626,6 +648,11 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
if (in.getVersion().onOrAfter(Version.V_5_5_0)) {
|
||||
jobVersion = in.readBoolean() ? Version.readVersion(in) : null;
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
groups = in.readList(StreamInput::readString);
|
||||
} else {
|
||||
groups = Collections.emptyList();
|
||||
}
|
||||
description = in.readOptionalString();
|
||||
createTime = in.readBoolean() ? new Date(in.readVLong()) : null;
|
||||
finishedTime = in.readBoolean() ? new Date(in.readVLong()) : null;
|
||||
|
@ -665,6 +692,15 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
this.jobType = jobType;
|
||||
}
|
||||
|
||||
public void setGroups(List<String> groups) {
|
||||
this.groups = groups == null ? Collections.emptyList() : groups;
|
||||
for (String group : this.groups) {
|
||||
if (MlStrings.isValidId(group) == false) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.INVALID_GROUP, group));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Date getCreateTime() {
|
||||
return createTime;
|
||||
}
|
||||
|
@ -797,6 +833,9 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
out.writeBoolean(false);
|
||||
}
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
out.writeStringList(groups);
|
||||
}
|
||||
out.writeOptionalString(description);
|
||||
if (createTime != null) {
|
||||
out.writeBoolean(true);
|
||||
|
@ -1002,7 +1041,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
}
|
||||
|
||||
return new Job(
|
||||
id, jobType, jobVersion, description, createTime, finishedTime, lastDataTime,
|
||||
id, jobType, jobVersion, groups, description, createTime, finishedTime, lastDataTime,
|
||||
analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
|
||||
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
|
||||
modelSnapshotId, resultsIndexName, deleted);
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -19,6 +20,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
@ -31,6 +34,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), Job.ID);
|
||||
PARSER.declareStringArray(Builder::setGroups, Job.GROUPS);
|
||||
PARSER.declareStringOrNull(Builder::setDescription, Job.DESCRIPTION);
|
||||
PARSER.declareObjectArray(Builder::setDetectorUpdates, DetectorUpdate.PARSER, DETECTORS);
|
||||
PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.CONFIG_PARSER, Job.MODEL_PLOT_CONFIG);
|
||||
|
@ -46,6 +50,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
}
|
||||
|
||||
private final String jobId;
|
||||
private final List<String> groups;
|
||||
private final String description;
|
||||
private final List<DetectorUpdate> detectorUpdates;
|
||||
private final ModelPlotConfig modelPlotConfig;
|
||||
|
@ -58,13 +63,14 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
private final Map<String, Object> customSettings;
|
||||
private final String modelSnapshotId;
|
||||
|
||||
private JobUpdate(String jobId, @Nullable String description, @Nullable List<DetectorUpdate> detectorUpdates,
|
||||
@Nullable ModelPlotConfig modelPlotConfig, @Nullable AnalysisLimits analysisLimits,
|
||||
@Nullable TimeValue backgroundPersistInterval, @Nullable Long renormalizationWindowDays,
|
||||
@Nullable Long resultsRetentionDays, @Nullable Long modelSnapshotRetentionDays,
|
||||
@Nullable List<String> categorisationFilters, @Nullable Map<String, Object> customSettings,
|
||||
@Nullable String modelSnapshotId) {
|
||||
private JobUpdate(String jobId, @Nullable List<String> groups, @Nullable String description,
|
||||
@Nullable List<DetectorUpdate> detectorUpdates, @Nullable ModelPlotConfig modelPlotConfig,
|
||||
@Nullable AnalysisLimits analysisLimits, @Nullable TimeValue backgroundPersistInterval,
|
||||
@Nullable Long renormalizationWindowDays, @Nullable Long resultsRetentionDays,
|
||||
@Nullable Long modelSnapshotRetentionDays, @Nullable List<String> categorisationFilters,
|
||||
@Nullable Map<String, Object> customSettings, @Nullable String modelSnapshotId) {
|
||||
this.jobId = jobId;
|
||||
this.groups = groups;
|
||||
this.description = description;
|
||||
this.detectorUpdates = detectorUpdates;
|
||||
this.modelPlotConfig = modelPlotConfig;
|
||||
|
@ -80,6 +86,12 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
|
||||
public JobUpdate(StreamInput in) throws IOException {
|
||||
jobId = in.readString();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
String[] groupsArray = in.readOptionalStringArray();
|
||||
groups = groupsArray == null ? null : Arrays.asList(groupsArray);
|
||||
} else {
|
||||
groups = null;
|
||||
}
|
||||
description = in.readOptionalString();
|
||||
if (in.readBoolean()) {
|
||||
detectorUpdates = in.readList(DetectorUpdate::new);
|
||||
|
@ -100,9 +112,14 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
customSettings = in.readMap();
|
||||
modelSnapshotId = in.readOptionalString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(jobId);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
String[] groupsArray = groups == null ? null : groups.toArray(new String[groups.size()]);
|
||||
out.writeOptionalStringArray(groupsArray);
|
||||
}
|
||||
out.writeOptionalString(description);
|
||||
out.writeBoolean(detectorUpdates != null);
|
||||
if (detectorUpdates != null) {
|
||||
|
@ -126,6 +143,10 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
return jobId;
|
||||
}
|
||||
|
||||
public List<String> getGroups() {
|
||||
return groups;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
@ -178,6 +199,9 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (groups != null) {
|
||||
builder.field(Job.GROUPS.getPreferredName(), groups);
|
||||
}
|
||||
if (description != null) {
|
||||
builder.field(Job.DESCRIPTION.getPreferredName(), description);
|
||||
}
|
||||
|
@ -223,6 +247,9 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
*/
|
||||
public Job mergeWithJob(Job source) {
|
||||
Job.Builder builder = new Job.Builder(source);
|
||||
if (groups != null) {
|
||||
builder.setGroups(groups);
|
||||
}
|
||||
if (description != null) {
|
||||
builder.setDescription(description);
|
||||
}
|
||||
|
@ -294,6 +321,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
JobUpdate that = (JobUpdate) other;
|
||||
|
||||
return Objects.equals(this.jobId, that.jobId)
|
||||
&& Objects.equals(this.groups, that.groups)
|
||||
&& Objects.equals(this.description, that.description)
|
||||
&& Objects.equals(this.detectorUpdates, that.detectorUpdates)
|
||||
&& Objects.equals(this.modelPlotConfig, that.modelPlotConfig)
|
||||
|
@ -309,7 +337,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, description, detectorUpdates, modelPlotConfig, analysisLimits, renormalizationWindowDays,
|
||||
return Objects.hash(jobId, groups, description, detectorUpdates, modelPlotConfig, analysisLimits, renormalizationWindowDays,
|
||||
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, categorizationFilters, customSettings,
|
||||
modelSnapshotId);
|
||||
}
|
||||
|
@ -410,6 +438,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
public static class Builder {
|
||||
|
||||
private String jobId;
|
||||
private List<String> groups;
|
||||
private String description;
|
||||
private List<DetectorUpdate> detectorUpdates;
|
||||
private ModelPlotConfig modelPlotConfig;
|
||||
|
@ -431,6 +460,11 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder setGroups(List<String> groups) {
|
||||
this.groups = groups;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setDescription(String description) {
|
||||
this.description = description;
|
||||
return this;
|
||||
|
@ -487,7 +521,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
}
|
||||
|
||||
public JobUpdate build() {
|
||||
return new JobUpdate(jobId, description, detectorUpdates, modelPlotConfig, analysisLimits, backgroundPersistInterval,
|
||||
return new JobUpdate(jobId, groups, description, detectorUpdates, modelPlotConfig, analysisLimits, backgroundPersistInterval,
|
||||
renormalizationWindowDays, resultsRetentionDays, modelSnapshotRetentionDays, categorizationFilters, customSettings,
|
||||
modelSnapshotId);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.groups;
|
||||
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* An interface to represent either a job or a group of jobs
|
||||
*/
|
||||
interface GroupOrJob {
|
||||
|
||||
boolean isGroup();
|
||||
List<Job> jobs();
|
||||
|
||||
final class Group implements GroupOrJob {
|
||||
|
||||
private final List<Job> jobs;
|
||||
|
||||
Group(List<Job> jobs) {
|
||||
this.jobs = Collections.unmodifiableList(jobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isGroup() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Job> jobs() {
|
||||
return jobs;
|
||||
}
|
||||
}
|
||||
|
||||
final class SingleJob implements GroupOrJob {
|
||||
|
||||
private final Job job;
|
||||
|
||||
SingleJob(Job job) {
|
||||
this.job = Objects.requireNonNull(job);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isGroup() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Job> jobs() {
|
||||
return Collections.singletonList(job);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.groups;
|
||||
|
||||
import org.elasticsearch.ResourceAlreadyExistsException;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.ml.utils.NameResolver;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* A lookup that allows expanding expressions that may consist of job
|
||||
* IDs, job group names, wildcard patterns or a comma separated combination
|
||||
* of the aforementioned to the matching job IDs.
|
||||
* The lookup is immutable.
|
||||
*/
|
||||
public class GroupOrJobLookup {
|
||||
|
||||
private final SortedMap<String, GroupOrJob> groupOrJobLookup;
|
||||
|
||||
public GroupOrJobLookup(Collection<Job> jobs) {
|
||||
groupOrJobLookup = new TreeMap<>();
|
||||
jobs.forEach(this::put);
|
||||
}
|
||||
|
||||
private void put(Job job) {
|
||||
if (groupOrJobLookup.containsKey(job.getId())) {
|
||||
throw new ResourceAlreadyExistsException(Messages.getMessage(Messages.JOB_AND_GROUP_NAMES_MUST_BE_UNIQUE, job.getId()));
|
||||
}
|
||||
groupOrJobLookup.put(job.getId(), new GroupOrJob.SingleJob(job));
|
||||
for (String groupName : job.getGroups()) {
|
||||
GroupOrJob oldGroup = groupOrJobLookup.get(groupName);
|
||||
if (oldGroup == null) {
|
||||
groupOrJobLookup.put(groupName, new GroupOrJob.Group(Collections.singletonList(job)));
|
||||
} else {
|
||||
if (oldGroup.isGroup() == false) {
|
||||
throw new ResourceAlreadyExistsException(Messages.getMessage(Messages.JOB_AND_GROUP_NAMES_MUST_BE_UNIQUE, groupName));
|
||||
}
|
||||
List<Job> groupJobs = new ArrayList<>(oldGroup.jobs());
|
||||
groupJobs.add(job);
|
||||
groupOrJobLookup.put(groupName, new GroupOrJob.Group(groupJobs));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Set<String> expandJobIds(String expression, boolean allowNoJobs) {
|
||||
return new GroupOrJobResolver().expand(expression, allowNoJobs);
|
||||
}
|
||||
|
||||
private class GroupOrJobResolver extends NameResolver {
|
||||
|
||||
private GroupOrJobResolver() {
|
||||
super(ExceptionsHelper::missingJobException);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<String> keys() {
|
||||
return groupOrJobLookup.keySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<String> nameSet() {
|
||||
return groupOrJobLookup.values().stream()
|
||||
.filter(groupOrJob -> groupOrJob.isGroup() == false)
|
||||
.map(groupOrJob -> groupOrJob.jobs().get(0).getId())
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<String> lookup(String key) {
|
||||
GroupOrJob groupOrJob = groupOrJobLookup.get(key);
|
||||
return groupOrJob == null ? Collections.emptyList() : groupOrJob.jobs().stream().map(Job::getId).collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -30,6 +30,8 @@ public final class Messages {
|
|||
"The date_histogram (or histogram) aggregation cannot have sibling aggregations";
|
||||
public static final String DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO =
|
||||
"Aggregation interval must be greater than 0";
|
||||
public static final String DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_DIVISOR_OF_BUCKET_SPAN =
|
||||
"Aggregation interval [{0}] must be a divisor of the bucket_span [{1}]";
|
||||
public static final String DATAFEED_AGGREGATIONS_INTERVAL_MUST_LESS_OR_EQUAL_TO_BUCKET_SPAN =
|
||||
"Aggregation interval [{0}] must be less than or equal to the bucket_span [{1}]";
|
||||
public static final String DATAFEED_DATA_HISTOGRAM_MUST_HAVE_NESTED_MAX_AGGREGATION =
|
||||
|
@ -40,6 +42,8 @@ public final class Messages {
|
|||
"Inconsistent {0}; ''{1}'' specified in the body differs from ''{2}'' specified as a URL argument";
|
||||
public static final String INVALID_ID = "Invalid {0}; ''{1}'' can contain lowercase alphanumeric (a-z and 0-9), hyphens or " +
|
||||
"underscores; must start and end with alphanumeric";
|
||||
public static final String INVALID_GROUP = "Invalid group id ''{0}''; must be non-empty string and may contain lowercase alphanumeric" +
|
||||
" (a-z and 0-9), hyphens or underscores; must start and end with alphanumeric";
|
||||
|
||||
public static final String JOB_AUDIR_DATAFEED_DATA_SEEN_AGAIN = "Datafeed has started retrieving data again";
|
||||
public static final String JOB_AUDIT_CREATED = "Job created";
|
||||
|
@ -149,6 +153,9 @@ public final class Messages {
|
|||
public static final String JOB_CONFIG_TIME_FIELD_NOT_ALLOWED_IN_ANALYSIS_CONFIG =
|
||||
"data_description.time_field may not be used in the analysis_config";
|
||||
|
||||
public static final String JOB_AND_GROUP_NAMES_MUST_BE_UNIQUE =
|
||||
"job and group names must be unique but job [{0}] and group [{0}] have the same name";
|
||||
|
||||
public static final String JOB_UNKNOWN_ID = "No known job with id ''{0}''";
|
||||
|
||||
public static final String REST_CANNOT_DELETE_HIGHEST_PRIORITY =
|
||||
|
|
|
@ -69,7 +69,7 @@ public abstract class NameResolver {
|
|||
result.addAll(expanded);
|
||||
} else {
|
||||
List<String> matchingNames = lookup(token);
|
||||
if (matchingNames == null) {
|
||||
if (matchingNames.isEmpty()) {
|
||||
throw notFoundExceptionSupplier.apply(token);
|
||||
}
|
||||
result.addAll(matchingNames);
|
||||
|
@ -97,7 +97,6 @@ public abstract class NameResolver {
|
|||
* @param key the key to look up
|
||||
* @return a list of the matching names or {@code null} when no matching names exist
|
||||
*/
|
||||
@Nullable
|
||||
protected abstract List<String> lookup(String key);
|
||||
|
||||
/**
|
||||
|
@ -120,7 +119,7 @@ public abstract class NameResolver {
|
|||
|
||||
@Override
|
||||
protected List<String> lookup(String key) {
|
||||
return nameSet.contains(key) ? Collections.singletonList(key) : null;
|
||||
return nameSet.contains(key) ? Collections.singletonList(key) : Collections.emptyList();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,87 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.bench;
|
||||
|
||||
import org.elasticsearch.common.Randomness;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.metrics.MeanMetric;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.xpack.security.authc.support.Hasher;
|
||||
|
||||
@SuppressForbidden(reason = "benchmark")
|
||||
public class HasherBenchmark {
|
||||
|
||||
private static final int WARMING_ITERS = 1000;
|
||||
private static final int BENCH_ITERS = 10000;
|
||||
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
test(Hasher.SSHA256).print();
|
||||
test(Hasher.MD5).print();
|
||||
test(Hasher.SHA1).print();
|
||||
test(Hasher.BCRYPT4).print();
|
||||
}
|
||||
|
||||
protected static Metrics test(Hasher hasher) {
|
||||
|
||||
Metrics metrics = new Metrics(hasher);
|
||||
|
||||
System.out.print("warming up [" + hasher.name() + "]...");
|
||||
|
||||
for (int i = 0; i < WARMING_ITERS; i++) {
|
||||
SecureString str = new SecureString(RandomStrings.randomAsciiOfLength(Randomness.get(), 8).toCharArray());
|
||||
char[] hash = hasher.hash(str);
|
||||
hasher.verify(str, hash);
|
||||
}
|
||||
|
||||
System.out.println("done!");
|
||||
System.out.print("starting benchmark for [" + hasher.name() + "]...");
|
||||
|
||||
long start;
|
||||
|
||||
for (int i = 0; i < BENCH_ITERS; i++) {
|
||||
SecureString str = new SecureString(RandomStrings.randomAsciiOfLength(Randomness.get(), 8).toCharArray());
|
||||
|
||||
start = System.nanoTime();
|
||||
char[] hash = hasher.hash(str);
|
||||
metrics.hash.inc(System.nanoTime() - start);
|
||||
|
||||
start = System.nanoTime();
|
||||
hasher.verify(str, hash);
|
||||
metrics.verify.inc(System.nanoTime() - start);
|
||||
if (i % 1000000 == 0) {
|
||||
System.out.println("finished " + i + " iterations");
|
||||
}
|
||||
}
|
||||
|
||||
System.out.println("done!");
|
||||
|
||||
return metrics;
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "benchmark")
|
||||
private static class Metrics {
|
||||
|
||||
final String name;
|
||||
final MeanMetric hash = new MeanMetric();
|
||||
final MeanMetric verify = new MeanMetric();
|
||||
|
||||
Metrics(Hasher hasher) {
|
||||
this.name = hasher.name();
|
||||
}
|
||||
|
||||
void print() {
|
||||
System.out.println(name);
|
||||
System.out.println("\tHash (total): " + TimeValue.timeValueNanos(hash.sum()).format());
|
||||
System.out.println("\tHash (avg): " + hash.mean() + " nanos");
|
||||
System.out.println("\tVerify (total): " + TimeValue.timeValueNanos(verify.sum()).format());
|
||||
System.out.println("\tVerify (avg): " + verify.mean() + " nanos");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -123,6 +123,23 @@ public class DatafeedJobValidatorTests extends ESTestCase {
|
|||
assertEquals("Aggregation interval [1800001ms] must be less than or equal to the bucket_span [1800000ms]", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_HistogramIntervalIsDivisorOfBucketSpan() throws IOException {
|
||||
Job.Builder builder = buildJobBuilder("foo");
|
||||
AnalysisConfig.Builder ac = createAnalysisConfig();
|
||||
ac.setSummaryCountFieldName("some_count");
|
||||
ac.setBucketSpan(TimeValue.timeValueMinutes(5));
|
||||
builder.setAnalysisConfig(ac);
|
||||
Job job = builder.build(new Date());
|
||||
DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs(37 * 1000).build();
|
||||
|
||||
ElasticsearchStatusException e = ESTestCase.expectThrows(ElasticsearchStatusException.class,
|
||||
() -> DatafeedJobValidator.validate(datafeedConfig, job));
|
||||
assertEquals("Aggregation interval [37000ms] must be a divisor of the bucket_span [300000ms]", e.getMessage());
|
||||
|
||||
DatafeedConfig goodDatafeedConfig = createValidDatafeedConfigWithAggs(60 * 1000).build();
|
||||
DatafeedJobValidator.validate(goodDatafeedConfig, job);
|
||||
}
|
||||
|
||||
private static Job.Builder buildJobBuilder(String id) {
|
||||
Job.Builder builder = new Job.Builder(id);
|
||||
AnalysisConfig.Builder ac = createAnalysisConfig();
|
||||
|
|
|
@ -442,7 +442,10 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
|||
+ "\"aggs\": {\"timestamp\":{\"max\":{\"field\":\"timestamp\"}},"
|
||||
+ "\"bytes-delta\":{\"derivative\":{\"buckets_path\":\"avg_bytes_out\"}},"
|
||||
+ "\"avg_bytes_out\":{\"avg\":{\"field\":\"network_bytes_out\"}} }}}}}";
|
||||
new DatafeedBuilder(datafeedId, jobId, "network-data", "doc").setAggregations(aggregations).build();
|
||||
new DatafeedBuilder(datafeedId, jobId, "network-data", "doc")
|
||||
.setAggregations(aggregations)
|
||||
.setChunkingTimespan("300s")
|
||||
.build();
|
||||
|
||||
openJob(client(), jobId);
|
||||
|
||||
|
@ -696,6 +699,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
|||
String scriptedFields;
|
||||
String aggregations;
|
||||
String authHeader = BASIC_AUTH_VALUE_SUPER_USER;
|
||||
String chunkingTimespan;
|
||||
|
||||
DatafeedBuilder(String datafeedId, String jobId, String index, String type) {
|
||||
this.datafeedId = datafeedId;
|
||||
|
@ -724,12 +728,19 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
|||
return this;
|
||||
}
|
||||
|
||||
DatafeedBuilder setChunkingTimespan(String timespan) {
|
||||
chunkingTimespan = timespan;
|
||||
return this;
|
||||
}
|
||||
|
||||
Response build() throws IOException {
|
||||
String datafeedConfig = "{"
|
||||
+ "\"job_id\": \"" + jobId + "\",\"indexes\":[\"" + index + "\"],\"types\":[\"" + type + "\"]"
|
||||
+ (source ? ",\"_source\":true" : "")
|
||||
+ (scriptedFields == null ? "" : ",\"script_fields\":" + scriptedFields)
|
||||
+ (aggregations == null ? "" : ",\"aggs\":" + aggregations)
|
||||
+ (chunkingTimespan == null ? "" :
|
||||
",\"chunking_config\":{\"mode\":\"MANUAL\",\"time_span\":\"" + chunkingTimespan + "\"}")
|
||||
+ "}";
|
||||
return client().performRequest("put", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId, Collections.emptyMap(),
|
||||
new StringEntity(datafeedConfig, ContentType.APPLICATION_JSON),
|
||||
|
|
|
@ -21,15 +21,18 @@ import org.elasticsearch.xpack.ml.job.messages.Messages;
|
|||
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
@ -465,6 +468,20 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
|||
assertEquals(expected, new HashSet<>(builder.invalidCreateTimeSettings()));
|
||||
}
|
||||
|
||||
public void testEmptyGroup() {
|
||||
Job.Builder builder = buildJobBuilder("foo");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> builder.setGroups(Arrays.asList("foo-group", "")));
|
||||
assertThat(e.getMessage(), containsString("Invalid group id ''"));
|
||||
}
|
||||
|
||||
public void testInvalidGroup() {
|
||||
Job.Builder builder = buildJobBuilder("foo");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> builder.setGroups(Arrays.asList("foo-group", "$$$")));
|
||||
assertThat(e.getMessage(), containsString("Invalid group id '$$$'"));
|
||||
}
|
||||
|
||||
public static Job.Builder buildJobBuilder(String id, Date date) {
|
||||
Job.Builder builder = new Job.Builder(id);
|
||||
builder.setCreateTime(date);
|
||||
|
@ -500,6 +517,14 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
|||
if (randomBoolean()) {
|
||||
builder.setJobVersion(Version.CURRENT);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int groupsNum = randomIntBetween(0, 10);
|
||||
List<String> groups = new ArrayList<>(groupsNum);
|
||||
for (int i = 0; i < groupsNum; i++) {
|
||||
groups.add(randomValidJobId());
|
||||
}
|
||||
builder.setGroups(groups);
|
||||
}
|
||||
builder.setCreateTime(new Date(randomNonNegativeLong()));
|
||||
if (randomBoolean()) {
|
||||
builder.setFinishedTime(new Date(randomNonNegativeLong()));
|
||||
|
|
|
@ -24,6 +24,14 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
|
|||
@Override
|
||||
protected JobUpdate createTestInstance() {
|
||||
JobUpdate.Builder update = new JobUpdate.Builder(randomAlphaOfLength(4));
|
||||
if (randomBoolean()) {
|
||||
int groupsNum = randomIntBetween(0, 10);
|
||||
List<String> groups = new ArrayList<>(groupsNum);
|
||||
for (int i = 0; i < groupsNum; i++) {
|
||||
groups.add(JobTests.randomValidJobId());
|
||||
}
|
||||
update.setGroups(groups);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
update.setDescription(randomAlphaOfLength(20));
|
||||
}
|
||||
|
@ -106,6 +114,7 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
|
|||
Map<String, Object> customSettings = Collections.singletonMap(randomAlphaOfLength(10), randomAlphaOfLength(10));
|
||||
|
||||
JobUpdate.Builder updateBuilder = new JobUpdate.Builder("foo");
|
||||
updateBuilder.setGroups(Arrays.asList("group-1", "group-2"));
|
||||
updateBuilder.setDescription("updated_description");
|
||||
updateBuilder.setDetectorUpdates(detectorUpdates);
|
||||
updateBuilder.setModelPlotConfig(modelPlotConfig);
|
||||
|
@ -120,6 +129,7 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
|
|||
JobUpdate update = updateBuilder.build();
|
||||
|
||||
Job.Builder jobBuilder = new Job.Builder("foo");
|
||||
jobBuilder.setGroups(Arrays.asList("group-1"));
|
||||
Detector.Builder d1 = new Detector.Builder("info_content", "domain");
|
||||
d1.setOverFieldName("mlcategory");
|
||||
Detector.Builder d2 = new Detector.Builder("min", "field");
|
||||
|
@ -132,6 +142,7 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
|
|||
|
||||
Job updatedJob = update.mergeWithJob(jobBuilder.build());
|
||||
|
||||
assertEquals(update.getGroups(), updatedJob.getGroups());
|
||||
assertEquals(update.getDescription(), updatedJob.getDescription());
|
||||
assertEquals(update.getModelPlotConfig(), updatedJob.getModelPlotConfig());
|
||||
assertEquals(update.getAnalysisLimits(), updatedJob.getAnalysisLimits());
|
||||
|
|
|
@ -0,0 +1,98 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.groups;
|
||||
|
||||
import org.elasticsearch.ResourceAlreadyExistsException;
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class GroupOrJobLookupTests extends ESTestCase {
|
||||
|
||||
public void testEmptyLookup_GivenAllowNoJobs() {
|
||||
GroupOrJobLookup lookup = new GroupOrJobLookup(Collections.emptyList());
|
||||
|
||||
assertThat(lookup.expandJobIds("_all", true).isEmpty(), is(true));
|
||||
assertThat(lookup.expandJobIds("*", true).isEmpty(), is(true));
|
||||
assertThat(lookup.expandJobIds("foo*", true).isEmpty(), is(true));
|
||||
expectThrows(ResourceNotFoundException.class, () -> lookup.expandJobIds("foo", true));
|
||||
}
|
||||
|
||||
public void testEmptyLookup_GivenNotAllowNoJobs() {
|
||||
GroupOrJobLookup lookup = new GroupOrJobLookup(Collections.emptyList());
|
||||
|
||||
expectThrows(ResourceNotFoundException.class, () -> lookup.expandJobIds("_all", false));
|
||||
expectThrows(ResourceNotFoundException.class, () -> lookup.expandJobIds("*", false));
|
||||
expectThrows(ResourceNotFoundException.class, () -> lookup.expandJobIds("foo*", false));
|
||||
expectThrows(ResourceNotFoundException.class, () -> lookup.expandJobIds("foo", true));
|
||||
}
|
||||
|
||||
public void testAllIsNotExpandedInCommaSeparatedExpression() {
|
||||
GroupOrJobLookup lookup = new GroupOrJobLookup(Collections.emptyList());
|
||||
ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, () -> lookup.expandJobIds("foo-*,_all", true));
|
||||
assertThat(e.getMessage(), equalTo("No known job with id '_all'"));
|
||||
}
|
||||
|
||||
public void testConstructor_GivenJobWithSameIdAsPreviousGroupName() {
|
||||
List<Job> jobs = new ArrayList<>();
|
||||
jobs.add(mockJob("foo", Arrays.asList("foo-group")));
|
||||
jobs.add(mockJob("foo-group", Collections.emptyList()));
|
||||
ResourceAlreadyExistsException e = expectThrows(ResourceAlreadyExistsException.class, () -> new GroupOrJobLookup(jobs));
|
||||
assertThat(e.getMessage(),
|
||||
equalTo("job and group names must be unique but job [foo-group] and group [foo-group] have the same name"));
|
||||
}
|
||||
|
||||
public void testConstructor_GivenGroupWithSameNameAsPreviousJobId() {
|
||||
List<Job> jobs = new ArrayList<>();
|
||||
jobs.add(mockJob("foo", Collections.emptyList()));
|
||||
jobs.add(mockJob("foo-2", Arrays.asList("foo")));
|
||||
ResourceAlreadyExistsException e = expectThrows(ResourceAlreadyExistsException.class, () -> new GroupOrJobLookup(jobs));
|
||||
assertThat(e.getMessage(),
|
||||
equalTo("job and group names must be unique but job [foo] and group [foo] have the same name"));
|
||||
}
|
||||
|
||||
public void testLookup() {
|
||||
List<Job> jobs = new ArrayList<>();
|
||||
jobs.add(mockJob("foo-1", Arrays.asList("foo-group", "ones")));
|
||||
jobs.add(mockJob("foo-2", Arrays.asList("foo-group", "twos")));
|
||||
jobs.add(mockJob("bar-1", Arrays.asList("bar-group", "ones")));
|
||||
jobs.add(mockJob("bar-2", Arrays.asList("bar-group", "twos")));
|
||||
jobs.add(mockJob("nogroup", Collections.emptyList()));
|
||||
GroupOrJobLookup groupOrJobLookup = new GroupOrJobLookup(jobs);
|
||||
|
||||
assertThat(groupOrJobLookup.expandJobIds("_all", false), contains("bar-1", "bar-2", "foo-1", "foo-2", "nogroup"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("*", false), contains("bar-1", "bar-2", "foo-1", "foo-2", "nogroup"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("bar-1", false), contains("bar-1"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("foo-1", false), contains("foo-1"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("foo-2, bar-1", false), contains("bar-1", "foo-2"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("foo-group", false), contains("foo-1", "foo-2"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("bar-group", false), contains("bar-1", "bar-2"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("ones", false), contains("bar-1", "foo-1"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("twos", false), contains("bar-2", "foo-2"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("foo-group, nogroup", false), contains("foo-1", "foo-2", "nogroup"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("*-group", false), contains("bar-1", "bar-2", "foo-1", "foo-2"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("foo-group,foo-1,foo-2", false), contains("foo-1", "foo-2"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("foo-group,*-2", false), contains("bar-2", "foo-1", "foo-2"));
|
||||
}
|
||||
|
||||
private static Job mockJob(String jobId, List<String> groups) {
|
||||
Job job = mock(Job.class);
|
||||
when(job.getId()).thenReturn(jobId);
|
||||
when(job.getGroups()).thenReturn(groups);
|
||||
return job;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.groups;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
public class GroupOrJobTests extends ESTestCase {
|
||||
|
||||
public void testSingleJob() {
|
||||
Job job = mock(Job.class);
|
||||
GroupOrJob groupOrJob = new GroupOrJob.SingleJob(job);
|
||||
assertThat(groupOrJob.isGroup(), is(false));
|
||||
assertThat(groupOrJob.jobs(), contains(job));
|
||||
expectThrows(UnsupportedOperationException.class, () -> groupOrJob.jobs().add(mock(Job.class)));
|
||||
}
|
||||
|
||||
public void testGroup() {
|
||||
Job job1 = mock(Job.class);
|
||||
Job job2 = mock(Job.class);
|
||||
GroupOrJob groupOrJob = new GroupOrJob.Group(Arrays.asList(job1, job2));
|
||||
assertThat(groupOrJob.isGroup(), is(true));
|
||||
assertThat(groupOrJob.jobs(), contains(job1, job2));
|
||||
expectThrows(UnsupportedOperationException.class, () -> groupOrJob.jobs().add(mock(Job.class)));
|
||||
}
|
||||
}
|
|
@ -140,7 +140,7 @@ public class NameResolverTests extends ESTestCase {
|
|||
|
||||
@Override
|
||||
protected List<String> lookup(String key) {
|
||||
return lookup.get(key);
|
||||
return lookup.containsKey(key) ? lookup.get(key) : Collections.emptyList();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.monitoring;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.node.MockNode;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
/**
|
||||
* Main class to easily run Monitoring from a IDE.
|
||||
* <p>
|
||||
* In order to run this class set configure the following:
|
||||
* 1) Set `-Des.path.home=` to a directory containing an ES config directory
|
||||
*
|
||||
* It accepts collectors names as program arguments.
|
||||
*/
|
||||
public class MonitoringF {
|
||||
|
||||
public static void main(String[] args) throws Throwable {
|
||||
Settings.Builder settings = Settings.builder();
|
||||
settings.put("security.manager.enabled", "false");
|
||||
settings.put("cluster.name", MonitoringF.class.getSimpleName());
|
||||
settings.put("xpack.monitoring.collection.interval", "1s");
|
||||
if (!CollectionUtils.isEmpty(args)) {
|
||||
settings.putArray("xpack.monitoring.collection.collectors", args);
|
||||
}
|
||||
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
final Node node = new MockNode(settings.build(),
|
||||
Arrays.asList(XPackPlugin.class, XPackPlugin.class, XPackPlugin.class));
|
||||
Runtime.getRuntime().addShutdownHook(new Thread() {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
IOUtils.close(node);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException(e);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
node.start();
|
||||
latch.await();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,123 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.notification.email;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Locale;
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/x-plugins/issues/379")
|
||||
public class ManualPublicSmtpServersTester {
|
||||
|
||||
private static final Terminal terminal = Terminal.DEFAULT;
|
||||
|
||||
public static class Gmail {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
test("gmail", Profile.GMAIL, Settings.builder()
|
||||
.put("xpack.notification.email.account.gmail.smtp.auth", true)
|
||||
.put("xpack.notification.email.account.gmail.smtp.starttls.enable", true)
|
||||
.put("xpack.notification.email.account.gmail.smtp.host", "smtp.gmail.com")
|
||||
.put("xpack.notification.email.account.gmail.smtp.port", 587)
|
||||
.put("xpack.notification.email.account.gmail.smtp.user", terminal.readText("username: "))
|
||||
.put("xpack.notification.email.account.gmail.smtp.password", new String(terminal.readSecret("password: ")))
|
||||
.put("xpack.notification.email.account.gmail.email_defaults.to", terminal.readText("to: "))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public static class OutlookDotCom {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
test("outlook", Profile.STANDARD, Settings.builder()
|
||||
.put("xpack.notification.email.account.outlook.smtp.auth", true)
|
||||
.put("xpack.notification.email.account.outlook.smtp.starttls.enable", true)
|
||||
.put("xpack.notification.email.account.outlook.smtp.host", "smtp-mail.outlook.com")
|
||||
.put("xpack.notification.email.account.outlook.smtp.port", 587)
|
||||
.put("xpack.notification.email.account.outlook.smtp.user", "elastic.user@outlook.com")
|
||||
.put("xpack.notification.email.account.outlook.smtp.password", "fantastic42")
|
||||
.put("xpack.notification.email.account.outlook.email_defaults.to", "elastic.user@outlook.com")
|
||||
.put()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public static class YahooMail {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
test("yahoo", Profile.STANDARD, Settings.builder()
|
||||
.put("xpack.notification.email.account.yahoo.smtp.starttls.enable", true)
|
||||
.put("xpack.notification.email.account.yahoo.smtp.auth", true)
|
||||
.put("xpack.notification.email.account.yahoo.smtp.host", "smtp.mail.yahoo.com")
|
||||
.put("xpack.notification.email.account.yahoo.smtp.port", 587)
|
||||
.put("xpack.notification.email.account.yahoo.smtp.user", "elastic.user@yahoo.com")
|
||||
.put("xpack.notification.email.account.yahoo.smtp.password", "fantastic42")
|
||||
// note: from must be set to the same authenticated user account
|
||||
.put("xpack.notification.email.account.yahoo.email_defaults.from", "elastic.user@yahoo.com")
|
||||
.put("xpack.notification.email.account.yahoo.email_defaults.to", "elastic.user@yahoo.com")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Amazon Simple Email Service
|
||||
public static class SES {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
test("ses", Profile.STANDARD, Settings.builder()
|
||||
.put("xpack.notification.email.account.ses.smtp.auth", true)
|
||||
.put("xpack.notification.email.account.ses.smtp.starttls.enable", true)
|
||||
.put("xpack.notification.email.account.ses.smtp.starttls.required", true)
|
||||
.put("xpack.notification.email.account.ses.smtp.host", "email-smtp.us-east-1.amazonaws.com")
|
||||
.put("xpack.notification.email.account.ses.smtp.port", 587)
|
||||
.put("xpack.notification.email.account.ses.smtp.user", terminal.readText("user: "))
|
||||
.put("xpack.notification.email.account.ses.email_defaults.from", "dummy.user@elasticsearch.com")
|
||||
.put("xpack.notification.email.account.ses.email_defaults.to", terminal.readText("to: "))
|
||||
.put("xpack.notification.email.account.ses.smtp.password",
|
||||
new String(terminal.readSecret("password: ")))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
static void test(String accountName, Profile profile, Settings.Builder settingsBuilder) throws Exception {
|
||||
String path = "/org/elasticsearch/xpack/watcher/actions/email/service/logo.png";
|
||||
if (EmailServiceTests.class.getResourceAsStream(path) == null) {
|
||||
throw new ElasticsearchException("Could not find logo at path {}", path);
|
||||
}
|
||||
|
||||
EmailService service = startEmailService(settingsBuilder);
|
||||
ToXContent content = (xContentBuilder, params) -> xContentBuilder.startObject()
|
||||
.field("key1", "value1")
|
||||
.field("key2", "value2")
|
||||
.field("key3", "value3")
|
||||
.endObject();
|
||||
|
||||
Email email = Email.builder()
|
||||
.id("_id")
|
||||
.subject("_subject")
|
||||
.textBody("_text_body")
|
||||
.htmlBody("<b>html body</b><p/><p/><img src=\"cid:logo.png\"/>")
|
||||
.attach(new Attachment.XContent.Yaml("test.yml", content))
|
||||
.attach(new Attachment.Stream("logo.png", "logo.png", true,
|
||||
() -> EmailServiceTests.class.getResourceAsStream(path)))
|
||||
.build();
|
||||
|
||||
EmailService.EmailSent sent = service.send(email, null, profile, accountName);
|
||||
|
||||
terminal.println(String.format(Locale.ROOT, "email sent via account [%s]", sent.account()));
|
||||
}
|
||||
|
||||
static EmailService startEmailService(Settings.Builder builder) {
|
||||
Settings settings = builder.build();
|
||||
return new EmailService(settings, null,
|
||||
new ClusterSettings(settings, Collections.singleton(EmailService.EMAIL_ACCOUNT_SETTING)));
|
||||
}
|
||||
}
|
|
@ -1,81 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.security;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.node.MockNode;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.NativeRealm;
|
||||
import org.elasticsearch.xpack.security.authc.file.FileRealm;
|
||||
import org.elasticsearch.xpack.security.test.SecurityTestUtils;
|
||||
import org.elasticsearch.test.SecuritySettingsSource;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import static org.elasticsearch.xpack.security.test.SecurityTestUtils.writeFile;
|
||||
|
||||
/**
|
||||
* Main class to easily run X-Pack Security from a IDE.
|
||||
*
|
||||
* During startup an error will be printed that the config directory can't be found, to fix this:
|
||||
* set `-Des.path.home=` to a location where there is a config directory on your machine.
|
||||
*/
|
||||
public class SecurityF {
|
||||
|
||||
public static void main(String[] args) throws Throwable {
|
||||
Settings.Builder settings = Settings.builder();
|
||||
settings.put("http.cors.enabled", "true");
|
||||
settings.put("http.cors.allow-origin", "*");
|
||||
settings.put("xpack.security.enabled", "true");
|
||||
// Disable Monitoring to prevent cluster activity
|
||||
settings.put("xpack.monitoring.enabled", "false");
|
||||
settings.put("cluster.name", SecurityF.class.getSimpleName());
|
||||
|
||||
String homeDir = System.getProperty("es.path.home");
|
||||
if (homeDir == null || Files.exists(PathUtils.get(homeDir)) == false) {
|
||||
throw new IllegalStateException("es.path.home must be set and exist");
|
||||
}
|
||||
final Path config = PathUtils.get(homeDir).resolve("config");
|
||||
SecurityTestUtils.createFolder(config);
|
||||
final Path folder = config.resolve("x-pack");
|
||||
SecurityTestUtils.createFolder(folder);
|
||||
writeFile(folder, "users", SecuritySettingsSource.CONFIG_STANDARD_USER);
|
||||
writeFile(folder, "users_roles", SecuritySettingsSource.CONFIG_STANDARD_USER_ROLES);
|
||||
writeFile(folder, "roles.yml", SecuritySettingsSource.CONFIG_ROLE_ALLOW_ALL);
|
||||
|
||||
settings.put("xpack.security.authc.realms.file.type", FileRealm.TYPE);
|
||||
settings.put("xpack.security.authc.realms.file.order", "0");
|
||||
settings.put("xpack.security.authc.realms.esnative.type", NativeRealm.TYPE);
|
||||
settings.put("xpack.security.authc.realms.esnative.order", "1");
|
||||
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
final Node node = new MockNode(settings.build(), Arrays.asList(XPackPlugin.class), config);
|
||||
Runtime.getRuntime().addShutdownHook(new Thread() {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
IOUtils.close(node);
|
||||
} catch (IOException ex) {
|
||||
throw new ElasticsearchException(ex);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
node.start();
|
||||
latch.await();
|
||||
}
|
||||
}
|
|
@ -1,182 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.watcher;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
|
||||
import org.elasticsearch.AbstractOldXPackIndicesBackwardsCompatibilityTestCase;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
import org.elasticsearch.xpack.common.text.TextTemplate;
|
||||
import org.elasticsearch.xpack.watcher.actions.logging.LoggingAction;
|
||||
import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder;
|
||||
import org.elasticsearch.xpack.watcher.client.WatcherClient;
|
||||
import org.elasticsearch.xpack.watcher.condition.AlwaysCondition;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.stats.WatcherStatsResponse;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule.Interval;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
|
||||
import static org.hamcrest.Matchers.everyItem;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.hasEntry;
|
||||
import static org.hamcrest.Matchers.hasItems;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
/**
|
||||
* Tests for watcher indexes created before 5.0.
|
||||
*/
|
||||
// This will only work, when the upgrade API is in place!
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/1303")
|
||||
public class OldWatcherIndicesBackwardsCompatibilityTests extends AbstractOldXPackIndicesBackwardsCompatibilityTestCase {
|
||||
|
||||
@Override
|
||||
public Settings nodeSettings(int ord) {
|
||||
return Settings.builder()
|
||||
.put(super.nodeSettings(ord))
|
||||
.put(XPackSettings.WATCHER_ENABLED.getKey(), true)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void checkVersion(Version version) throws Exception {
|
||||
// Wait for watcher to actually start....
|
||||
WatcherClient watcherClient = new WatcherClient(client());
|
||||
watcherClient.prepareWatchService().start();
|
||||
assertBusy(() -> {
|
||||
List<WatcherState> states = watcherClient.prepareWatcherStats().get().getNodes()
|
||||
.stream().map(WatcherStatsResponse.Node::getWatcherState).collect(Collectors.toList());
|
||||
assertThat(states, everyItem(is(WatcherState.STARTED)));
|
||||
});
|
||||
|
||||
try {
|
||||
assertOldTemplatesAreDeleted();
|
||||
assertWatchIndexContentsWork(version);
|
||||
assertBasicWatchInteractions();
|
||||
} finally {
|
||||
/* Shut down watcher after every test because watcher can be a bit finicky about shutting down when the node shuts down. This
|
||||
* makes super sure it shuts down *and* causes the test to fail in a sensible spot if it doesn't shut down. */
|
||||
watcherClient.prepareWatchService().stop().get();
|
||||
assertBusy(() -> internalCluster().getInstances(WatcherService.class)
|
||||
.forEach(watcherService -> assertThat(watcherService.state(), is(WatcherState.STOPPED))));
|
||||
}
|
||||
}
|
||||
|
||||
private void assertOldTemplatesAreDeleted() {
|
||||
GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates().get();
|
||||
List<String> templateNames = response.getIndexTemplates().stream().map(IndexTemplateMetaData::getName).collect(Collectors.toList());
|
||||
assertThat(templateNames, not(hasItems(is("watches"), startsWith("watch-history"), is("triggered_watches"))));
|
||||
}
|
||||
|
||||
void assertWatchIndexContentsWork(Version version) throws Exception {
|
||||
WatcherClient watcherClient = new WatcherClient(client());
|
||||
|
||||
// Fetch a basic watch
|
||||
GetWatchResponse bwcWatch = watcherClient.prepareGetWatch("bwc_watch").get();
|
||||
assertTrue(bwcWatch.isFound());
|
||||
assertNotNull(bwcWatch.getSource());
|
||||
Map<String, Object> source = bwcWatch.getSource().getAsMap();
|
||||
assertEquals(1000, source.get("throttle_period_in_millis"));
|
||||
Map<?, ?> input = (Map<?, ?>) source.get("input");
|
||||
Map<?, ?> search = (Map<?, ?>) input.get("search");
|
||||
// We asked for 100s but 2.x converted that to 1.6m which is actually 96s...
|
||||
int timeout = (int) (version.onOrAfter(Version.V_5_0_0_alpha1) ? timeValueSeconds(100).millis() : timeValueSeconds(96).millis());
|
||||
assertEquals(timeout, search.get("timeout_in_millis"));
|
||||
Map<?, ?> actions = (Map<?, ?>) source.get("actions");
|
||||
Map<?, ?> indexPayload = (Map<?, ?>) actions.get("index_payload");
|
||||
Map<?, ?> transform = (Map<?, ?>) indexPayload.get("transform");
|
||||
search = (Map<?, ?>) transform.get("search");
|
||||
assertEquals(timeout, search.get("timeout_in_millis"));
|
||||
Map<?, ?> index = (Map<?, ?>) indexPayload.get("index");
|
||||
assertEquals("bwc_watch_index", index.get("index"));
|
||||
assertEquals("bwc_watch_type", index.get("doc_type"));
|
||||
assertEquals(timeout, index.get("timeout_in_millis"));
|
||||
|
||||
// Fetch a watch with "fun" throttle periods
|
||||
bwcWatch = watcherClient.prepareGetWatch("bwc_throttle_period").get();
|
||||
assertTrue(bwcWatch.isFound());
|
||||
assertNotNull(bwcWatch.getSource());
|
||||
source = bwcWatch.getSource().getAsMap();
|
||||
assertEquals(timeout, source.get("throttle_period_in_millis"));
|
||||
actions = (Map<?, ?>) source.get("actions");
|
||||
indexPayload = (Map<?, ?>) actions.get("index_payload");
|
||||
assertEquals(timeout, indexPayload.get("throttle_period_in_millis"));
|
||||
|
||||
/*
|
||||
* Fetch a watch with a funny timeout to verify loading fractional time
|
||||
* values. This watch is only built in >= 2.3 because email attachments
|
||||
* aren't supported before that.
|
||||
*/
|
||||
bwcWatch = watcherClient.prepareGetWatch("bwc_funny_timeout").get();
|
||||
assertTrue(bwcWatch.isFound());
|
||||
assertNotNull(bwcWatch.getSource());
|
||||
source = bwcWatch.getSource().getAsMap();
|
||||
actions = (Map<?, ?>) source.get("actions");
|
||||
Map<?, ?> work = (Map<?, ?>) actions.get("work");
|
||||
Map<?, ?> email = (Map<?, ?>) work.get("email");
|
||||
Map<?, ?> attachments = (Map<?, ?>) email.get("attachments");
|
||||
Map<?, ?> attachment = (Map<?, ?>) attachments.get("test_report.pdf");
|
||||
Map<?, ?> http = (Map<?, ?>) attachment.get("http");
|
||||
Map<?, ?> request = (Map<?, ?>) http.get("request");
|
||||
assertEquals(timeout, request.get("read_timeout_millis"));
|
||||
assertEquals("https", request.get("scheme"));
|
||||
assertEquals("example.com", request.get("host"));
|
||||
assertEquals("{{ctx.metadata.report_url}}", request.get("path"));
|
||||
assertEquals(8443, request.get("port"));
|
||||
Map<?, ?> auth = (Map<?, ?>) request.get("auth");
|
||||
Map<?, ?> basic = (Map<?, ?>) auth.get("basic");
|
||||
assertThat(basic, hasEntry("username", "Aladdin"));
|
||||
// password doesn't come back because it is hidden
|
||||
assertThat(basic, not(hasKey("password")));
|
||||
|
||||
String watchHistoryPattern = version.onOrAfter(Version.V_5_0_0_alpha1) ? ".watcher-history*" : ".watch_history*";
|
||||
SearchResponse history = client().prepareSearch(watchHistoryPattern).get();
|
||||
assertThat(history.getHits().getTotalHits(), greaterThanOrEqualTo(10L));
|
||||
}
|
||||
|
||||
void assertBasicWatchInteractions() throws Exception {
|
||||
WatcherClient watcherClient = new WatcherClient(client());
|
||||
|
||||
PutWatchResponse put = watcherClient.preparePutWatch("new_watch").setSource(new WatchSourceBuilder()
|
||||
.condition(AlwaysCondition.INSTANCE)
|
||||
.trigger(ScheduleTrigger.builder(new IntervalSchedule(Interval.seconds(1))))
|
||||
.addAction("awesome", LoggingAction.builder(new TextTemplate("test")))).get();
|
||||
assertTrue(put.isCreated());
|
||||
assertEquals(1, put.getVersion());
|
||||
|
||||
put = watcherClient.preparePutWatch("new_watch").setSource(new WatchSourceBuilder()
|
||||
.condition(AlwaysCondition.INSTANCE)
|
||||
.trigger(ScheduleTrigger.builder(new IntervalSchedule(Interval.seconds(1))))
|
||||
.addAction("awesome", LoggingAction.builder(new TextTemplate("test")))).get();
|
||||
assertFalse(put.isCreated());
|
||||
assertEquals(2, put.getVersion());
|
||||
|
||||
GetWatchResponse get = watcherClient.prepareGetWatch(put.getId()).get();
|
||||
assertTrue(get.isFound());
|
||||
{
|
||||
Map<?, ?> source = get.getSource().getAsMap();
|
||||
Map<?, ?> actions = (Map<?, ?>) source.get("actions");
|
||||
Map<?, ?> awesome = (Map<?, ?>) actions.get("awesome");
|
||||
Map<?, ?> logging = (Map<?, ?>) awesome.get("logging");
|
||||
assertEquals("info", logging.get("level"));
|
||||
assertEquals("test", logging.get("text"));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,77 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.watcher;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.MockNode;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
/**
|
||||
* Main class to easily run Watcher from a IDE.
|
||||
* It sets all the options to run the Watcher plugin and access it from Sense, but doesn't run with security.
|
||||
*
|
||||
* In order to run this class set configure the following:
|
||||
* 1) Set `-Des.path.home=` to a directory containing an ES config directory
|
||||
*/
|
||||
public class WatcherF {
|
||||
|
||||
@SuppressForbidden(reason = "not really code or a test")
|
||||
public static void main(String[] args) throws Throwable {
|
||||
Settings.Builder settings = Settings.builder();
|
||||
settings.put("http.cors.enabled", "true");
|
||||
settings.put("http.cors.allow-origin", "*");
|
||||
settings.put("xpack.security.enabled", "false");
|
||||
settings.put("security.manager.enabled", "false");
|
||||
settings.put("cluster.name", WatcherF.class.getSimpleName());
|
||||
|
||||
// this is for the `test-watcher-integration` group level integration in HipChat
|
||||
settings.put("xpack.notification.hipchat.account.integration.profile", "integration");
|
||||
settings.put("xpack.notification.hipchat.account.integration.auth_token", "huuS9v7ccuOy3ZBWWWr1vt8Lqu3sQnLUE81nrLZU");
|
||||
settings.put("xpack.notification.hipchat.account.integration.room", "test-watcher");
|
||||
|
||||
// this is for the Watcher Test account in HipChat
|
||||
settings.put("xpack.notification.hipchat.account.user.profile", "user");
|
||||
settings.put("xpack.notification.hipchat.account.user.auth_token", "12rNQUuQ0wObfRVeoVD8OeoAnosCT8tSTV5UjsII");
|
||||
|
||||
// this is for the `test-watcher-v1` notification token (hipchat)
|
||||
settings.put("xpack.notification.hipchat.account.v1.profile", "v1");
|
||||
settings.put("xpack.notification.hipchat.account.v1.auth_token", "a734baf62df618b96dda55b323fc30");
|
||||
|
||||
// this is for our test slack incoming webhook (under elasticsearch team)
|
||||
System.setProperty("es.xpack.notification.slack.account.a1.url",
|
||||
"https://hooks.slack.com/services/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");
|
||||
|
||||
System.setProperty("es.xpack.notification.pagerduty.account.service1.service_api_key",
|
||||
"fc082467005d4072a914e0bb041882d0");
|
||||
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
final Node node = new MockNode(settings.build(), Arrays.asList(XPackPlugin.class, XPackPlugin.class));
|
||||
Runtime.getRuntime().addShutdownHook(new Thread() {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
IOUtils.close(node);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException(e);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
node.start();
|
||||
latch.await();
|
||||
}
|
||||
|
||||
}
|
|
@ -17,7 +17,9 @@ import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.xpack.watcher.actions.ActionBuilders.loggingAction;
|
||||
|
@ -26,7 +28,7 @@ import static org.elasticsearch.xpack.watcher.input.InputBuilders.simpleInput;
|
|||
import static org.elasticsearch.xpack.watcher.transform.TransformBuilders.scriptTransform;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.TriggerBuilders.schedule;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.interval;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
/**
|
||||
|
@ -69,20 +71,7 @@ public class HistoryTemplateTransformMappingsTests extends AbstractWatcherIntegr
|
|||
return true; // just to have better control over the triggers
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean enableSecurity() {
|
||||
return false; // remove security noise from this test
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/1517")
|
||||
public void testTransformFields() throws Exception {
|
||||
String index = "the-index";
|
||||
String type = "the-type";
|
||||
createIndex(index);
|
||||
index(index, type, "{}");
|
||||
flush();
|
||||
refresh();
|
||||
|
||||
PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("_id1").setSource(watchBuilder()
|
||||
.trigger(schedule(interval("5s")))
|
||||
.input(simpleInput())
|
||||
|
@ -121,12 +110,15 @@ public class HistoryTemplateTransformMappingsTests extends AbstractWatcherIntegr
|
|||
.includeDefaults(true)
|
||||
.get();
|
||||
|
||||
for (Map<String, Map<String, GetFieldMappingsResponse.FieldMappingMetaData>> map : response.mappings().values()) {
|
||||
Map<String, GetFieldMappingsResponse.FieldMappingMetaData> watchRecord = map.get("doc");
|
||||
assertThat(watchRecord, hasKey("result.actions.transform.payload"));
|
||||
GetFieldMappingsResponse.FieldMappingMetaData fieldMappingMetaData = watchRecord.get("result.actions.transform.payload");
|
||||
assertThat(fieldMappingMetaData.isNull(), is(true));
|
||||
}
|
||||
// time might have rolled over to a new day, thus we need to check that this field exists only in one of the history indices
|
||||
List<Boolean> payloadNulls = response.mappings().values().stream()
|
||||
.map(map -> map.get("doc"))
|
||||
.map(map -> map.get("result.actions.transform.payload"))
|
||||
.filter(Objects::nonNull)
|
||||
.map(GetFieldMappingsResponse.FieldMappingMetaData::isNull)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
assertThat(payloadNulls, hasItem(true));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -183,7 +183,6 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase {
|
|||
});
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/pull/1544")
|
||||
public void testMixedTriggeredWatchLoading() throws Exception {
|
||||
createIndex("output");
|
||||
client().prepareIndex("my-index", "foo", "bar")
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.watcher.trigger.schedule.tool;
|
||||
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
|
||||
/**
|
||||
* A small executable tool that can eval crons
|
||||
*/
|
||||
public class EvalCron {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String expression = Terminal.DEFAULT.readText("cron: ");
|
||||
CronEvalTool.main(new String[] { expression });
|
||||
}
|
||||
}
|
|
@ -0,0 +1,245 @@
|
|||
setup:
|
||||
- do:
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-foo-1
|
||||
body: >
|
||||
{
|
||||
"groups": ["foo-group", "ones"],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
- do:
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-foo-2
|
||||
body: >
|
||||
{
|
||||
"groups": ["foo-group", "twos"],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
- do:
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-bar-1
|
||||
body: >
|
||||
{
|
||||
"groups": ["bar-group", "ones"],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
- do:
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-bar-2
|
||||
body: >
|
||||
{
|
||||
"groups": ["bar-group", "twos"],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
- do:
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-nogroup
|
||||
body: >
|
||||
{
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
---
|
||||
"Test put job with id that matches an existing group":
|
||||
|
||||
- do:
|
||||
catch: /resource_already_exists_exception/
|
||||
xpack.ml.put_job:
|
||||
job_id: foo-group
|
||||
body: >
|
||||
{
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
---
|
||||
"Test put job with group that matches an job id":
|
||||
|
||||
- do:
|
||||
catch: /resource_already_exists_exception/
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-job-with-group-matching-existing-job-id
|
||||
body: >
|
||||
{
|
||||
"groups": ["test-job-groups-nogroup"],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
---
|
||||
"Test put job with group that matches its id":
|
||||
|
||||
- do:
|
||||
catch: /resource_already_exists_exception/
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-job-with-group-matching-its-id
|
||||
body: >
|
||||
{
|
||||
"groups": ["test-job-groups-job-with-group-matching-its-id"],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
---
|
||||
"Test put job with empty group":
|
||||
|
||||
- do:
|
||||
catch: /Invalid group id ''; must be non-empty string and may contain lowercase alphanumeric \(a-z and 0-9\), hyphens or underscores; must start and end with alphanumeric/
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-job-with-empty-group
|
||||
body: >
|
||||
{
|
||||
"groups": ["foo-group", ""],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
---
|
||||
"Test put job with invalid group":
|
||||
|
||||
- do:
|
||||
catch: /Invalid group id '___'; must be non-empty string and may contain lowercase alphanumeric \(a-z and 0-9\), hyphens or underscores; must start and end with alphanumeric/
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-job-with-invalid-group
|
||||
body: >
|
||||
{
|
||||
"groups": ["foo", "___"],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
---
|
||||
"Test get job API":
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "_all"
|
||||
- match: { count: 5 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-bar-2"}
|
||||
- match: { jobs.2.job_id: "test-job-groups-foo-1"}
|
||||
- match: { jobs.3.job_id: "test-job-groups-foo-2"}
|
||||
- match: { jobs.4.job_id: "test-job-groups-nogroup"}
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "test-job-groups-bar-1"
|
||||
- match: { count: 1 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "foo-group"
|
||||
- match: { count: 2 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-foo-1"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-foo-2"}
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "bar-group"
|
||||
- match: { count: 2 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-bar-2"}
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "ones"
|
||||
- match: { count: 2 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-foo-1"}
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "twos"
|
||||
- match: { count: 2 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-2"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-foo-2"}
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "*-group"
|
||||
- match: { count: 4 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-bar-2"}
|
||||
- match: { jobs.2.job_id: "test-job-groups-foo-1"}
|
||||
- match: { jobs.3.job_id: "test-job-groups-foo-2"}
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "bar-group,test-job-groups-nogroup"
|
||||
- match: { count: 3 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-bar-2"}
|
||||
- match: { jobs.2.job_id: "test-job-groups-nogroup"}
|
||||
|
||||
---
|
||||
"Test get job stats API":
|
||||
|
||||
- do:
|
||||
xpack.ml.get_job_stats:
|
||||
job_id: "foo-group"
|
||||
- match: { count: 2 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-foo-1"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-foo-2"}
|
||||
|
||||
---
|
||||
"Test close job API":
|
||||
|
||||
- do:
|
||||
xpack.ml.open_job:
|
||||
job_id: "test-job-groups-foo-1"
|
||||
|
||||
- do:
|
||||
xpack.ml.open_job:
|
||||
job_id: "test-job-groups-bar-1"
|
||||
|
||||
- do:
|
||||
xpack.ml.get_job_stats:
|
||||
job_id: "ones"
|
||||
- match: { count: 2 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
- match: { jobs.0.state: opened}
|
||||
- match: { jobs.1.job_id: "test-job-groups-foo-1"}
|
||||
- match: { jobs.1.state: opened}
|
||||
|
||||
- do:
|
||||
xpack.ml.close_job:
|
||||
job_id: "ones"
|
||||
|
||||
- do:
|
||||
xpack.ml.get_job_stats:
|
||||
job_id: "ones"
|
||||
- match: { count: 2 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
- match: { jobs.0.state: closed}
|
||||
- match: { jobs.1.job_id: "test-job-groups-foo-1"}
|
||||
- match: { jobs.1.state: closed}
|
|
@ -296,6 +296,7 @@
|
|||
job_id: jobs-crud-update-job
|
||||
body: >
|
||||
{
|
||||
"groups": ["group-1", "group-2"],
|
||||
"description":"Post update description",
|
||||
"detectors": [{"detector_index": 0, "rules": {"target_field_name": "airline",
|
||||
"rule_conditions": [ { "condition_type": "numerical_actual",
|
||||
|
@ -318,6 +319,7 @@
|
|||
}
|
||||
}
|
||||
- match: { job_id: "jobs-crud-update-job" }
|
||||
- match: { groups: ["group-1", "group-2"] }
|
||||
- match: { description: "Post update description" }
|
||||
- match: { model_plot_config.enabled: false }
|
||||
- match: { model_plot_config.terms: "foobar" }
|
||||
|
|
Loading…
Reference in New Issue