[ML] Add ability to assign groups to jobs (elastic/x-pack-elasticsearch#2155)
In particular: - adds a `groups` field in a job - group names can be used in multi-job APIs relates elastic/x-pack-elasticsearch#2097 Original commit: elastic/x-pack-elasticsearch@c8517221ae
This commit is contained in:
parent
4b531c4dbb
commit
2d9d4c41d8
|
@ -29,6 +29,7 @@ import org.elasticsearch.xpack.ml.datafeed.DatafeedUpdate;
|
|||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobState;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobTaskStatus;
|
||||
import org.elasticsearch.xpack.ml.job.groups.GroupOrJobLookup;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.ml.utils.NameResolver;
|
||||
|
@ -66,10 +67,12 @@ public class MlMetadata implements MetaData.Custom {
|
|||
|
||||
private final SortedMap<String, Job> jobs;
|
||||
private final SortedMap<String, DatafeedConfig> datafeeds;
|
||||
private final GroupOrJobLookup groupOrJobLookup;
|
||||
|
||||
private MlMetadata(SortedMap<String, Job> jobs, SortedMap<String, DatafeedConfig> datafeeds) {
|
||||
this.jobs = Collections.unmodifiableSortedMap(jobs);
|
||||
this.datafeeds = Collections.unmodifiableSortedMap(datafeeds);
|
||||
this.groupOrJobLookup = new GroupOrJobLookup(jobs.values());
|
||||
}
|
||||
|
||||
public Map<String, Job> getJobs() {
|
||||
|
@ -77,8 +80,7 @@ public class MlMetadata implements MetaData.Custom {
|
|||
}
|
||||
|
||||
public Set<String> expandJobIds(String expression, boolean allowNoJobs) {
|
||||
return NameResolver.newUnaliased(jobs.keySet(), jobId -> ExceptionsHelper.missingJobException(jobId))
|
||||
.expand(expression, allowNoJobs);
|
||||
return groupOrJobLookup.expandJobIds(expression, allowNoJobs);
|
||||
}
|
||||
|
||||
public boolean isJobDeleted(String jobId) {
|
||||
|
@ -136,6 +138,8 @@ public class MlMetadata implements MetaData.Custom {
|
|||
datafeeds.put(in.readString(), new DatafeedConfig(in));
|
||||
}
|
||||
this.datafeeds = datafeeds;
|
||||
|
||||
this.groupOrJobLookup = new GroupOrJobLookup(jobs.values());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,7 +16,6 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
|
@ -29,6 +28,7 @@ import org.elasticsearch.xpack.ml.utils.time.TimeUtils;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.EnumMap;
|
||||
import java.util.HashSet;
|
||||
|
@ -57,6 +57,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
public static final ParseField ID = new ParseField("job_id");
|
||||
public static final ParseField JOB_TYPE = new ParseField("job_type");
|
||||
public static final ParseField JOB_VERSION = new ParseField("job_version");
|
||||
public static final ParseField GROUPS = new ParseField("groups");
|
||||
public static final ParseField ANALYSIS_CONFIG = new ParseField("analysis_config");
|
||||
public static final ParseField ANALYSIS_LIMITS = new ParseField("analysis_limits");
|
||||
public static final ParseField CREATE_TIME = new ParseField("create_time");
|
||||
|
@ -94,6 +95,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
parser.declareString(Builder::setId, ID);
|
||||
parser.declareString(Builder::setJobType, JOB_TYPE);
|
||||
parser.declareString(Builder::setJobVersion, JOB_VERSION);
|
||||
parser.declareStringArray(Builder::setGroups, GROUPS);
|
||||
parser.declareStringOrNull(Builder::setDescription, DESCRIPTION);
|
||||
parser.declareField(Builder::setCreateTime, p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
|
@ -148,6 +150,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
@Nullable
|
||||
private final Version jobVersion;
|
||||
|
||||
private final List<String> groups;
|
||||
private final String description;
|
||||
// TODO: Use java.time for the Dates here: x-pack-elasticsearch#829
|
||||
private final Date createTime;
|
||||
|
@ -166,16 +169,17 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
private final String resultsIndexName;
|
||||
private final boolean deleted;
|
||||
|
||||
private Job(String jobId, String jobType, Version jobVersion, String description, Date createTime,
|
||||
Date finishedTime, Date lastDataTime,
|
||||
AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription,
|
||||
ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval,
|
||||
Long modelSnapshotRetentionDays, Long resultsRetentionDays, Map<String, Object> customSettings,
|
||||
String modelSnapshotId, String resultsIndexName, boolean deleted) {
|
||||
private Job(String jobId, String jobType, Version jobVersion, List<String> groups, String description, Date createTime,
|
||||
Date finishedTime, Date lastDataTime,
|
||||
AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription,
|
||||
ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval,
|
||||
Long modelSnapshotRetentionDays, Long resultsRetentionDays, Map<String, Object> customSettings,
|
||||
String modelSnapshotId, String resultsIndexName, boolean deleted) {
|
||||
|
||||
this.jobId = jobId;
|
||||
this.jobType = jobType;
|
||||
this.jobVersion = jobVersion;
|
||||
this.groups = groups;
|
||||
this.description = description;
|
||||
this.createTime = createTime;
|
||||
this.finishedTime = finishedTime;
|
||||
|
@ -202,6 +206,11 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
} else {
|
||||
jobVersion = null;
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
groups = in.readList(StreamInput::readString);
|
||||
} else {
|
||||
groups = Collections.emptyList();
|
||||
}
|
||||
description = in.readOptionalString();
|
||||
createTime = new Date(in.readVLong());
|
||||
finishedTime = in.readBoolean() ? new Date(in.readVLong()) : null;
|
||||
|
@ -237,6 +246,10 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
return jobVersion;
|
||||
}
|
||||
|
||||
public List<String> getGroups() {
|
||||
return groups;
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the index storing the job's results and state.
|
||||
* This defaults to {@link #getId()} if a specific index name is not set.
|
||||
|
@ -414,6 +427,9 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
out.writeBoolean(false);
|
||||
}
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
out.writeStringList(groups);
|
||||
}
|
||||
out.writeOptionalString(description);
|
||||
out.writeVLong(createTime.getTime());
|
||||
if (finishedTime != null) {
|
||||
|
@ -458,6 +474,9 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
if (jobVersion != null) {
|
||||
builder.field(JOB_VERSION.getPreferredName(), jobVersion);
|
||||
}
|
||||
if (groups.isEmpty() == false) {
|
||||
builder.field(GROUPS.getPreferredName(), groups);
|
||||
}
|
||||
if (description != null) {
|
||||
builder.field(DESCRIPTION.getPreferredName(), description);
|
||||
}
|
||||
|
@ -519,6 +538,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
return Objects.equals(this.jobId, that.jobId)
|
||||
&& Objects.equals(this.jobType, that.jobType)
|
||||
&& Objects.equals(this.jobVersion, that.jobVersion)
|
||||
&& Objects.equals(this.groups, that.groups)
|
||||
&& Objects.equals(this.description, that.description)
|
||||
&& Objects.equals(this.createTime, that.createTime)
|
||||
&& Objects.equals(this.finishedTime, that.finishedTime)
|
||||
|
@ -538,7 +558,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, jobType, jobVersion, description, createTime, finishedTime, lastDataTime, analysisConfig,
|
||||
return Objects.hash(jobId, jobType, jobVersion, groups, description, createTime, finishedTime, lastDataTime, analysisConfig,
|
||||
analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
|
||||
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
|
||||
modelSnapshotId, resultsIndexName, deleted);
|
||||
|
@ -574,6 +594,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
private String id;
|
||||
private String jobType = ANOMALY_DETECTOR_JOB_TYPE;
|
||||
private Version jobVersion;
|
||||
private List<String> groups = Collections.emptyList();
|
||||
private String description;
|
||||
private AnalysisConfig analysisConfig;
|
||||
private AnalysisLimits analysisLimits;
|
||||
|
@ -602,6 +623,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
this.id = job.getId();
|
||||
this.jobType = job.getJobType();
|
||||
this.jobVersion = job.getJobVersion();
|
||||
this.groups = job.getGroups();
|
||||
this.description = job.getDescription();
|
||||
this.analysisConfig = job.getAnalysisConfig();
|
||||
this.analysisLimits = job.getAnalysisLimits();
|
||||
|
@ -626,6 +648,11 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
if (in.getVersion().onOrAfter(Version.V_5_5_0)) {
|
||||
jobVersion = in.readBoolean() ? Version.readVersion(in) : null;
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
groups = in.readList(StreamInput::readString);
|
||||
} else {
|
||||
groups = Collections.emptyList();
|
||||
}
|
||||
description = in.readOptionalString();
|
||||
createTime = in.readBoolean() ? new Date(in.readVLong()) : null;
|
||||
finishedTime = in.readBoolean() ? new Date(in.readVLong()) : null;
|
||||
|
@ -665,6 +692,15 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
this.jobType = jobType;
|
||||
}
|
||||
|
||||
public void setGroups(List<String> groups) {
|
||||
this.groups = groups == null ? Collections.emptyList() : groups;
|
||||
for (String group : this.groups) {
|
||||
if (MlStrings.isValidId(group) == false) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.INVALID_GROUP, group));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Date getCreateTime() {
|
||||
return createTime;
|
||||
}
|
||||
|
@ -797,6 +833,9 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
out.writeBoolean(false);
|
||||
}
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
out.writeStringList(groups);
|
||||
}
|
||||
out.writeOptionalString(description);
|
||||
if (createTime != null) {
|
||||
out.writeBoolean(true);
|
||||
|
@ -1002,7 +1041,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
}
|
||||
|
||||
return new Job(
|
||||
id, jobType, jobVersion, description, createTime, finishedTime, lastDataTime,
|
||||
id, jobType, jobVersion, groups, description, createTime, finishedTime, lastDataTime,
|
||||
analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
|
||||
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
|
||||
modelSnapshotId, resultsIndexName, deleted);
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -19,6 +20,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
@ -31,6 +34,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), Job.ID);
|
||||
PARSER.declareStringArray(Builder::setGroups, Job.GROUPS);
|
||||
PARSER.declareStringOrNull(Builder::setDescription, Job.DESCRIPTION);
|
||||
PARSER.declareObjectArray(Builder::setDetectorUpdates, DetectorUpdate.PARSER, DETECTORS);
|
||||
PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.CONFIG_PARSER, Job.MODEL_PLOT_CONFIG);
|
||||
|
@ -46,6 +50,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
}
|
||||
|
||||
private final String jobId;
|
||||
private final List<String> groups;
|
||||
private final String description;
|
||||
private final List<DetectorUpdate> detectorUpdates;
|
||||
private final ModelPlotConfig modelPlotConfig;
|
||||
|
@ -58,13 +63,14 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
private final Map<String, Object> customSettings;
|
||||
private final String modelSnapshotId;
|
||||
|
||||
private JobUpdate(String jobId, @Nullable String description, @Nullable List<DetectorUpdate> detectorUpdates,
|
||||
@Nullable ModelPlotConfig modelPlotConfig, @Nullable AnalysisLimits analysisLimits,
|
||||
@Nullable TimeValue backgroundPersistInterval, @Nullable Long renormalizationWindowDays,
|
||||
@Nullable Long resultsRetentionDays, @Nullable Long modelSnapshotRetentionDays,
|
||||
@Nullable List<String> categorisationFilters, @Nullable Map<String, Object> customSettings,
|
||||
@Nullable String modelSnapshotId) {
|
||||
private JobUpdate(String jobId, @Nullable List<String> groups, @Nullable String description,
|
||||
@Nullable List<DetectorUpdate> detectorUpdates, @Nullable ModelPlotConfig modelPlotConfig,
|
||||
@Nullable AnalysisLimits analysisLimits, @Nullable TimeValue backgroundPersistInterval,
|
||||
@Nullable Long renormalizationWindowDays, @Nullable Long resultsRetentionDays,
|
||||
@Nullable Long modelSnapshotRetentionDays, @Nullable List<String> categorisationFilters,
|
||||
@Nullable Map<String, Object> customSettings, @Nullable String modelSnapshotId) {
|
||||
this.jobId = jobId;
|
||||
this.groups = groups;
|
||||
this.description = description;
|
||||
this.detectorUpdates = detectorUpdates;
|
||||
this.modelPlotConfig = modelPlotConfig;
|
||||
|
@ -80,6 +86,12 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
|
||||
public JobUpdate(StreamInput in) throws IOException {
|
||||
jobId = in.readString();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
String[] groupsArray = in.readOptionalStringArray();
|
||||
groups = groupsArray == null ? null : Arrays.asList(groupsArray);
|
||||
} else {
|
||||
groups = null;
|
||||
}
|
||||
description = in.readOptionalString();
|
||||
if (in.readBoolean()) {
|
||||
detectorUpdates = in.readList(DetectorUpdate::new);
|
||||
|
@ -100,9 +112,14 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
customSettings = in.readMap();
|
||||
modelSnapshotId = in.readOptionalString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(jobId);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
String[] groupsArray = groups == null ? null : groups.toArray(new String[groups.size()]);
|
||||
out.writeOptionalStringArray(groupsArray);
|
||||
}
|
||||
out.writeOptionalString(description);
|
||||
out.writeBoolean(detectorUpdates != null);
|
||||
if (detectorUpdates != null) {
|
||||
|
@ -126,6 +143,10 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
return jobId;
|
||||
}
|
||||
|
||||
public List<String> getGroups() {
|
||||
return groups;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
@ -178,6 +199,9 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (groups != null) {
|
||||
builder.field(Job.GROUPS.getPreferredName(), groups);
|
||||
}
|
||||
if (description != null) {
|
||||
builder.field(Job.DESCRIPTION.getPreferredName(), description);
|
||||
}
|
||||
|
@ -223,6 +247,9 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
*/
|
||||
public Job mergeWithJob(Job source) {
|
||||
Job.Builder builder = new Job.Builder(source);
|
||||
if (groups != null) {
|
||||
builder.setGroups(groups);
|
||||
}
|
||||
if (description != null) {
|
||||
builder.setDescription(description);
|
||||
}
|
||||
|
@ -294,6 +321,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
JobUpdate that = (JobUpdate) other;
|
||||
|
||||
return Objects.equals(this.jobId, that.jobId)
|
||||
&& Objects.equals(this.groups, that.groups)
|
||||
&& Objects.equals(this.description, that.description)
|
||||
&& Objects.equals(this.detectorUpdates, that.detectorUpdates)
|
||||
&& Objects.equals(this.modelPlotConfig, that.modelPlotConfig)
|
||||
|
@ -309,7 +337,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, description, detectorUpdates, modelPlotConfig, analysisLimits, renormalizationWindowDays,
|
||||
return Objects.hash(jobId, groups, description, detectorUpdates, modelPlotConfig, analysisLimits, renormalizationWindowDays,
|
||||
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, categorizationFilters, customSettings,
|
||||
modelSnapshotId);
|
||||
}
|
||||
|
@ -410,6 +438,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
public static class Builder {
|
||||
|
||||
private String jobId;
|
||||
private List<String> groups;
|
||||
private String description;
|
||||
private List<DetectorUpdate> detectorUpdates;
|
||||
private ModelPlotConfig modelPlotConfig;
|
||||
|
@ -431,6 +460,11 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder setGroups(List<String> groups) {
|
||||
this.groups = groups;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setDescription(String description) {
|
||||
this.description = description;
|
||||
return this;
|
||||
|
@ -487,7 +521,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
}
|
||||
|
||||
public JobUpdate build() {
|
||||
return new JobUpdate(jobId, description, detectorUpdates, modelPlotConfig, analysisLimits, backgroundPersistInterval,
|
||||
return new JobUpdate(jobId, groups, description, detectorUpdates, modelPlotConfig, analysisLimits, backgroundPersistInterval,
|
||||
renormalizationWindowDays, resultsRetentionDays, modelSnapshotRetentionDays, categorizationFilters, customSettings,
|
||||
modelSnapshotId);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.groups;
|
||||
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* An interface to represent either a job or a group of jobs
|
||||
*/
|
||||
interface GroupOrJob {
|
||||
|
||||
boolean isGroup();
|
||||
List<Job> jobs();
|
||||
|
||||
final class Group implements GroupOrJob {
|
||||
|
||||
private final List<Job> jobs;
|
||||
|
||||
Group(List<Job> jobs) {
|
||||
this.jobs = Collections.unmodifiableList(jobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isGroup() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Job> jobs() {
|
||||
return jobs;
|
||||
}
|
||||
}
|
||||
|
||||
final class SingleJob implements GroupOrJob {
|
||||
|
||||
private final Job job;
|
||||
|
||||
SingleJob(Job job) {
|
||||
this.job = Objects.requireNonNull(job);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isGroup() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Job> jobs() {
|
||||
return Collections.singletonList(job);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.groups;
|
||||
|
||||
import org.elasticsearch.ResourceAlreadyExistsException;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.ml.utils.NameResolver;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* A lookup that allows expanding expressions that may consist of job
|
||||
* IDs, job group names, wildcard patterns or a comma separated combination
|
||||
* of the aforementioned to the matching job IDs.
|
||||
* The lookup is immutable.
|
||||
*/
|
||||
public class GroupOrJobLookup {
|
||||
|
||||
private final SortedMap<String, GroupOrJob> groupOrJobLookup;
|
||||
|
||||
public GroupOrJobLookup(Collection<Job> jobs) {
|
||||
groupOrJobLookup = new TreeMap<>();
|
||||
jobs.forEach(this::put);
|
||||
}
|
||||
|
||||
private void put(Job job) {
|
||||
if (groupOrJobLookup.containsKey(job.getId())) {
|
||||
throw new ResourceAlreadyExistsException(Messages.getMessage(Messages.JOB_AND_GROUP_NAMES_MUST_BE_UNIQUE, job.getId()));
|
||||
}
|
||||
groupOrJobLookup.put(job.getId(), new GroupOrJob.SingleJob(job));
|
||||
for (String groupName : job.getGroups()) {
|
||||
GroupOrJob oldGroup = groupOrJobLookup.get(groupName);
|
||||
if (oldGroup == null) {
|
||||
groupOrJobLookup.put(groupName, new GroupOrJob.Group(Collections.singletonList(job)));
|
||||
} else {
|
||||
if (oldGroup.isGroup() == false) {
|
||||
throw new ResourceAlreadyExistsException(Messages.getMessage(Messages.JOB_AND_GROUP_NAMES_MUST_BE_UNIQUE, groupName));
|
||||
}
|
||||
List<Job> groupJobs = new ArrayList<>(oldGroup.jobs());
|
||||
groupJobs.add(job);
|
||||
groupOrJobLookup.put(groupName, new GroupOrJob.Group(groupJobs));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Set<String> expandJobIds(String expression, boolean allowNoJobs) {
|
||||
return new GroupOrJobResolver().expand(expression, allowNoJobs);
|
||||
}
|
||||
|
||||
private class GroupOrJobResolver extends NameResolver {
|
||||
|
||||
private GroupOrJobResolver() {
|
||||
super(ExceptionsHelper::missingJobException);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<String> keys() {
|
||||
return groupOrJobLookup.keySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<String> nameSet() {
|
||||
return groupOrJobLookup.values().stream()
|
||||
.filter(groupOrJob -> groupOrJob.isGroup() == false)
|
||||
.map(groupOrJob -> groupOrJob.jobs().get(0).getId())
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<String> lookup(String key) {
|
||||
GroupOrJob groupOrJob = groupOrJobLookup.get(key);
|
||||
return groupOrJob == null ? Collections.emptyList() : groupOrJob.jobs().stream().map(Job::getId).collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -42,6 +42,8 @@ public final class Messages {
|
|||
"Inconsistent {0}; ''{1}'' specified in the body differs from ''{2}'' specified as a URL argument";
|
||||
public static final String INVALID_ID = "Invalid {0}; ''{1}'' can contain lowercase alphanumeric (a-z and 0-9), hyphens or " +
|
||||
"underscores; must start and end with alphanumeric";
|
||||
public static final String INVALID_GROUP = "Invalid group id ''{0}''; must be non-empty string and may contain lowercase alphanumeric" +
|
||||
" (a-z and 0-9), hyphens or underscores; must start and end with alphanumeric";
|
||||
|
||||
public static final String JOB_AUDIR_DATAFEED_DATA_SEEN_AGAIN = "Datafeed has started retrieving data again";
|
||||
public static final String JOB_AUDIT_CREATED = "Job created";
|
||||
|
@ -151,6 +153,9 @@ public final class Messages {
|
|||
public static final String JOB_CONFIG_TIME_FIELD_NOT_ALLOWED_IN_ANALYSIS_CONFIG =
|
||||
"data_description.time_field may not be used in the analysis_config";
|
||||
|
||||
public static final String JOB_AND_GROUP_NAMES_MUST_BE_UNIQUE =
|
||||
"job and group names must be unique but job [{0}] and group [{0}] have the same name";
|
||||
|
||||
public static final String JOB_UNKNOWN_ID = "No known job with id ''{0}''";
|
||||
|
||||
public static final String REST_CANNOT_DELETE_HIGHEST_PRIORITY =
|
||||
|
|
|
@ -69,7 +69,7 @@ public abstract class NameResolver {
|
|||
result.addAll(expanded);
|
||||
} else {
|
||||
List<String> matchingNames = lookup(token);
|
||||
if (matchingNames == null) {
|
||||
if (matchingNames.isEmpty()) {
|
||||
throw notFoundExceptionSupplier.apply(token);
|
||||
}
|
||||
result.addAll(matchingNames);
|
||||
|
@ -97,7 +97,6 @@ public abstract class NameResolver {
|
|||
* @param key the key to look up
|
||||
* @return a list of the matching names or {@code null} when no matching names exist
|
||||
*/
|
||||
@Nullable
|
||||
protected abstract List<String> lookup(String key);
|
||||
|
||||
/**
|
||||
|
@ -120,7 +119,7 @@ public abstract class NameResolver {
|
|||
|
||||
@Override
|
||||
protected List<String> lookup(String key) {
|
||||
return nameSet.contains(key) ? Collections.singletonList(key) : null;
|
||||
return nameSet.contains(key) ? Collections.singletonList(key) : Collections.emptyList();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -21,15 +21,18 @@ import org.elasticsearch.xpack.ml.job.messages.Messages;
|
|||
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
@ -465,6 +468,20 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
|||
assertEquals(expected, new HashSet<>(builder.invalidCreateTimeSettings()));
|
||||
}
|
||||
|
||||
public void testEmptyGroup() {
|
||||
Job.Builder builder = buildJobBuilder("foo");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> builder.setGroups(Arrays.asList("foo-group", "")));
|
||||
assertThat(e.getMessage(), containsString("Invalid group id ''"));
|
||||
}
|
||||
|
||||
public void testInvalidGroup() {
|
||||
Job.Builder builder = buildJobBuilder("foo");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> builder.setGroups(Arrays.asList("foo-group", "$$$")));
|
||||
assertThat(e.getMessage(), containsString("Invalid group id '$$$'"));
|
||||
}
|
||||
|
||||
public static Job.Builder buildJobBuilder(String id, Date date) {
|
||||
Job.Builder builder = new Job.Builder(id);
|
||||
builder.setCreateTime(date);
|
||||
|
@ -500,6 +517,14 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
|||
if (randomBoolean()) {
|
||||
builder.setJobVersion(Version.CURRENT);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int groupsNum = randomIntBetween(0, 10);
|
||||
List<String> groups = new ArrayList<>(groupsNum);
|
||||
for (int i = 0; i < groupsNum; i++) {
|
||||
groups.add(randomValidJobId());
|
||||
}
|
||||
builder.setGroups(groups);
|
||||
}
|
||||
builder.setCreateTime(new Date(randomNonNegativeLong()));
|
||||
if (randomBoolean()) {
|
||||
builder.setFinishedTime(new Date(randomNonNegativeLong()));
|
||||
|
|
|
@ -24,6 +24,14 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
|
|||
@Override
|
||||
protected JobUpdate createTestInstance() {
|
||||
JobUpdate.Builder update = new JobUpdate.Builder(randomAlphaOfLength(4));
|
||||
if (randomBoolean()) {
|
||||
int groupsNum = randomIntBetween(0, 10);
|
||||
List<String> groups = new ArrayList<>(groupsNum);
|
||||
for (int i = 0; i < groupsNum; i++) {
|
||||
groups.add(JobTests.randomValidJobId());
|
||||
}
|
||||
update.setGroups(groups);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
update.setDescription(randomAlphaOfLength(20));
|
||||
}
|
||||
|
@ -106,6 +114,7 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
|
|||
Map<String, Object> customSettings = Collections.singletonMap(randomAlphaOfLength(10), randomAlphaOfLength(10));
|
||||
|
||||
JobUpdate.Builder updateBuilder = new JobUpdate.Builder("foo");
|
||||
updateBuilder.setGroups(Arrays.asList("group-1", "group-2"));
|
||||
updateBuilder.setDescription("updated_description");
|
||||
updateBuilder.setDetectorUpdates(detectorUpdates);
|
||||
updateBuilder.setModelPlotConfig(modelPlotConfig);
|
||||
|
@ -120,6 +129,7 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
|
|||
JobUpdate update = updateBuilder.build();
|
||||
|
||||
Job.Builder jobBuilder = new Job.Builder("foo");
|
||||
jobBuilder.setGroups(Arrays.asList("group-1"));
|
||||
Detector.Builder d1 = new Detector.Builder("info_content", "domain");
|
||||
d1.setOverFieldName("mlcategory");
|
||||
Detector.Builder d2 = new Detector.Builder("min", "field");
|
||||
|
@ -132,6 +142,7 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
|
|||
|
||||
Job updatedJob = update.mergeWithJob(jobBuilder.build());
|
||||
|
||||
assertEquals(update.getGroups(), updatedJob.getGroups());
|
||||
assertEquals(update.getDescription(), updatedJob.getDescription());
|
||||
assertEquals(update.getModelPlotConfig(), updatedJob.getModelPlotConfig());
|
||||
assertEquals(update.getAnalysisLimits(), updatedJob.getAnalysisLimits());
|
||||
|
|
|
@ -0,0 +1,98 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.groups;
|
||||
|
||||
import org.elasticsearch.ResourceAlreadyExistsException;
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class GroupOrJobLookupTests extends ESTestCase {
|
||||
|
||||
public void testEmptyLookup_GivenAllowNoJobs() {
|
||||
GroupOrJobLookup lookup = new GroupOrJobLookup(Collections.emptyList());
|
||||
|
||||
assertThat(lookup.expandJobIds("_all", true).isEmpty(), is(true));
|
||||
assertThat(lookup.expandJobIds("*", true).isEmpty(), is(true));
|
||||
assertThat(lookup.expandJobIds("foo*", true).isEmpty(), is(true));
|
||||
expectThrows(ResourceNotFoundException.class, () -> lookup.expandJobIds("foo", true));
|
||||
}
|
||||
|
||||
public void testEmptyLookup_GivenNotAllowNoJobs() {
|
||||
GroupOrJobLookup lookup = new GroupOrJobLookup(Collections.emptyList());
|
||||
|
||||
expectThrows(ResourceNotFoundException.class, () -> lookup.expandJobIds("_all", false));
|
||||
expectThrows(ResourceNotFoundException.class, () -> lookup.expandJobIds("*", false));
|
||||
expectThrows(ResourceNotFoundException.class, () -> lookup.expandJobIds("foo*", false));
|
||||
expectThrows(ResourceNotFoundException.class, () -> lookup.expandJobIds("foo", true));
|
||||
}
|
||||
|
||||
public void testAllIsNotExpandedInCommaSeparatedExpression() {
|
||||
GroupOrJobLookup lookup = new GroupOrJobLookup(Collections.emptyList());
|
||||
ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, () -> lookup.expandJobIds("foo-*,_all", true));
|
||||
assertThat(e.getMessage(), equalTo("No known job with id '_all'"));
|
||||
}
|
||||
|
||||
public void testConstructor_GivenJobWithSameIdAsPreviousGroupName() {
|
||||
List<Job> jobs = new ArrayList<>();
|
||||
jobs.add(mockJob("foo", Arrays.asList("foo-group")));
|
||||
jobs.add(mockJob("foo-group", Collections.emptyList()));
|
||||
ResourceAlreadyExistsException e = expectThrows(ResourceAlreadyExistsException.class, () -> new GroupOrJobLookup(jobs));
|
||||
assertThat(e.getMessage(),
|
||||
equalTo("job and group names must be unique but job [foo-group] and group [foo-group] have the same name"));
|
||||
}
|
||||
|
||||
public void testConstructor_GivenGroupWithSameNameAsPreviousJobId() {
|
||||
List<Job> jobs = new ArrayList<>();
|
||||
jobs.add(mockJob("foo", Collections.emptyList()));
|
||||
jobs.add(mockJob("foo-2", Arrays.asList("foo")));
|
||||
ResourceAlreadyExistsException e = expectThrows(ResourceAlreadyExistsException.class, () -> new GroupOrJobLookup(jobs));
|
||||
assertThat(e.getMessage(),
|
||||
equalTo("job and group names must be unique but job [foo] and group [foo] have the same name"));
|
||||
}
|
||||
|
||||
public void testLookup() {
|
||||
List<Job> jobs = new ArrayList<>();
|
||||
jobs.add(mockJob("foo-1", Arrays.asList("foo-group", "ones")));
|
||||
jobs.add(mockJob("foo-2", Arrays.asList("foo-group", "twos")));
|
||||
jobs.add(mockJob("bar-1", Arrays.asList("bar-group", "ones")));
|
||||
jobs.add(mockJob("bar-2", Arrays.asList("bar-group", "twos")));
|
||||
jobs.add(mockJob("nogroup", Collections.emptyList()));
|
||||
GroupOrJobLookup groupOrJobLookup = new GroupOrJobLookup(jobs);
|
||||
|
||||
assertThat(groupOrJobLookup.expandJobIds("_all", false), contains("bar-1", "bar-2", "foo-1", "foo-2", "nogroup"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("*", false), contains("bar-1", "bar-2", "foo-1", "foo-2", "nogroup"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("bar-1", false), contains("bar-1"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("foo-1", false), contains("foo-1"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("foo-2, bar-1", false), contains("bar-1", "foo-2"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("foo-group", false), contains("foo-1", "foo-2"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("bar-group", false), contains("bar-1", "bar-2"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("ones", false), contains("bar-1", "foo-1"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("twos", false), contains("bar-2", "foo-2"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("foo-group, nogroup", false), contains("foo-1", "foo-2", "nogroup"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("*-group", false), contains("bar-1", "bar-2", "foo-1", "foo-2"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("foo-group,foo-1,foo-2", false), contains("foo-1", "foo-2"));
|
||||
assertThat(groupOrJobLookup.expandJobIds("foo-group,*-2", false), contains("bar-2", "foo-1", "foo-2"));
|
||||
}
|
||||
|
||||
private static Job mockJob(String jobId, List<String> groups) {
|
||||
Job job = mock(Job.class);
|
||||
when(job.getId()).thenReturn(jobId);
|
||||
when(job.getGroups()).thenReturn(groups);
|
||||
return job;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.groups;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
public class GroupOrJobTests extends ESTestCase {
|
||||
|
||||
public void testSingleJob() {
|
||||
Job job = mock(Job.class);
|
||||
GroupOrJob groupOrJob = new GroupOrJob.SingleJob(job);
|
||||
assertThat(groupOrJob.isGroup(), is(false));
|
||||
assertThat(groupOrJob.jobs(), contains(job));
|
||||
expectThrows(UnsupportedOperationException.class, () -> groupOrJob.jobs().add(mock(Job.class)));
|
||||
}
|
||||
|
||||
public void testGroup() {
|
||||
Job job1 = mock(Job.class);
|
||||
Job job2 = mock(Job.class);
|
||||
GroupOrJob groupOrJob = new GroupOrJob.Group(Arrays.asList(job1, job2));
|
||||
assertThat(groupOrJob.isGroup(), is(true));
|
||||
assertThat(groupOrJob.jobs(), contains(job1, job2));
|
||||
expectThrows(UnsupportedOperationException.class, () -> groupOrJob.jobs().add(mock(Job.class)));
|
||||
}
|
||||
}
|
|
@ -140,7 +140,7 @@ public class NameResolverTests extends ESTestCase {
|
|||
|
||||
@Override
|
||||
protected List<String> lookup(String key) {
|
||||
return lookup.get(key);
|
||||
return lookup.containsKey(key) ? lookup.get(key) : Collections.emptyList();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,245 @@
|
|||
setup:
|
||||
- do:
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-foo-1
|
||||
body: >
|
||||
{
|
||||
"groups": ["foo-group", "ones"],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
- do:
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-foo-2
|
||||
body: >
|
||||
{
|
||||
"groups": ["foo-group", "twos"],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
- do:
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-bar-1
|
||||
body: >
|
||||
{
|
||||
"groups": ["bar-group", "ones"],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
- do:
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-bar-2
|
||||
body: >
|
||||
{
|
||||
"groups": ["bar-group", "twos"],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
- do:
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-nogroup
|
||||
body: >
|
||||
{
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
---
|
||||
"Test put job with id that matches an existing group":
|
||||
|
||||
- do:
|
||||
catch: /resource_already_exists_exception/
|
||||
xpack.ml.put_job:
|
||||
job_id: foo-group
|
||||
body: >
|
||||
{
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
---
|
||||
"Test put job with group that matches an job id":
|
||||
|
||||
- do:
|
||||
catch: /resource_already_exists_exception/
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-job-with-group-matching-existing-job-id
|
||||
body: >
|
||||
{
|
||||
"groups": ["test-job-groups-nogroup"],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
---
|
||||
"Test put job with group that matches its id":
|
||||
|
||||
- do:
|
||||
catch: /resource_already_exists_exception/
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-job-with-group-matching-its-id
|
||||
body: >
|
||||
{
|
||||
"groups": ["test-job-groups-job-with-group-matching-its-id"],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
---
|
||||
"Test put job with empty group":
|
||||
|
||||
- do:
|
||||
catch: /Invalid group id ''; must be non-empty string and may contain lowercase alphanumeric \(a-z and 0-9\), hyphens or underscores; must start and end with alphanumeric/
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-job-with-empty-group
|
||||
body: >
|
||||
{
|
||||
"groups": ["foo-group", ""],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
---
|
||||
"Test put job with invalid group":
|
||||
|
||||
- do:
|
||||
catch: /Invalid group id '___'; must be non-empty string and may contain lowercase alphanumeric \(a-z and 0-9\), hyphens or underscores; must start and end with alphanumeric/
|
||||
xpack.ml.put_job:
|
||||
job_id: test-job-groups-job-with-invalid-group
|
||||
body: >
|
||||
{
|
||||
"groups": ["foo", "___"],
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {}
|
||||
}
|
||||
|
||||
---
|
||||
"Test get job API":
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "_all"
|
||||
- match: { count: 5 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-bar-2"}
|
||||
- match: { jobs.2.job_id: "test-job-groups-foo-1"}
|
||||
- match: { jobs.3.job_id: "test-job-groups-foo-2"}
|
||||
- match: { jobs.4.job_id: "test-job-groups-nogroup"}
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "test-job-groups-bar-1"
|
||||
- match: { count: 1 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "foo-group"
|
||||
- match: { count: 2 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-foo-1"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-foo-2"}
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "bar-group"
|
||||
- match: { count: 2 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-bar-2"}
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "ones"
|
||||
- match: { count: 2 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-foo-1"}
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "twos"
|
||||
- match: { count: 2 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-2"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-foo-2"}
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "*-group"
|
||||
- match: { count: 4 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-bar-2"}
|
||||
- match: { jobs.2.job_id: "test-job-groups-foo-1"}
|
||||
- match: { jobs.3.job_id: "test-job-groups-foo-2"}
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
job_id: "bar-group,test-job-groups-nogroup"
|
||||
- match: { count: 3 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-bar-2"}
|
||||
- match: { jobs.2.job_id: "test-job-groups-nogroup"}
|
||||
|
||||
---
|
||||
"Test get job stats API":
|
||||
|
||||
- do:
|
||||
xpack.ml.get_job_stats:
|
||||
job_id: "foo-group"
|
||||
- match: { count: 2 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-foo-1"}
|
||||
- match: { jobs.1.job_id: "test-job-groups-foo-2"}
|
||||
|
||||
---
|
||||
"Test close job API":
|
||||
|
||||
- do:
|
||||
xpack.ml.open_job:
|
||||
job_id: "test-job-groups-foo-1"
|
||||
|
||||
- do:
|
||||
xpack.ml.open_job:
|
||||
job_id: "test-job-groups-bar-1"
|
||||
|
||||
- do:
|
||||
xpack.ml.get_job_stats:
|
||||
job_id: "ones"
|
||||
- match: { count: 2 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
- match: { jobs.0.state: opened}
|
||||
- match: { jobs.1.job_id: "test-job-groups-foo-1"}
|
||||
- match: { jobs.1.state: opened}
|
||||
|
||||
- do:
|
||||
xpack.ml.close_job:
|
||||
job_id: "ones"
|
||||
|
||||
- do:
|
||||
xpack.ml.get_job_stats:
|
||||
job_id: "ones"
|
||||
- match: { count: 2 }
|
||||
- match: { jobs.0.job_id: "test-job-groups-bar-1"}
|
||||
- match: { jobs.0.state: closed}
|
||||
- match: { jobs.1.job_id: "test-job-groups-foo-1"}
|
||||
- match: { jobs.1.state: closed}
|
|
@ -296,6 +296,7 @@
|
|||
job_id: jobs-crud-update-job
|
||||
body: >
|
||||
{
|
||||
"groups": ["group-1", "group-2"],
|
||||
"description":"Post update description",
|
||||
"detectors": [{"detector_index": 0, "rules": {"target_field_name": "airline",
|
||||
"rule_conditions": [ { "condition_type": "numerical_actual",
|
||||
|
@ -318,6 +319,7 @@
|
|||
}
|
||||
}
|
||||
- match: { job_id: "jobs-crud-update-job" }
|
||||
- match: { groups: ["group-1", "group-2"] }
|
||||
- match: { description: "Post update description" }
|
||||
- match: { model_plot_config.enabled: false }
|
||||
- match: { model_plot_config.terms: "foobar" }
|
||||
|
|
Loading…
Reference in New Issue