[ML] Add a detector_index field to detectors, and use it for updates (elastic/x-pack-elasticsearch#1613)

Detectors now have a field called detector_index.  This is also now the
field that needs to be supplied when updating a detector.  (Previously
it was simply index, which was confusing.)

When detectors are added to an analysis_config it will reassign
ascending detector_index values starting from 0.  The intention is
never to allow deletion of detectors from an analysis_config, but
possibly to allow disabling them in the future.  This ensures that
detector_index values in results will always tie up with detector_ids
in the detectors that created them.

relates elastic/x-pack-elasticsearch#1275

Original commit: elastic/x-pack-elasticsearch@20a660b07b
This commit is contained in:
David Roberts 2017-06-02 10:26:01 +01:00 committed by GitHub
parent 36b8fe9b29
commit babe27afe0
19 changed files with 144 additions and 159 deletions

View File

@ -67,7 +67,8 @@ The API returns the following results:
"function": "mean", "function": "mean",
"field_name": "responsetime", "field_name": "responsetime",
"partition_field_name": "airline", "partition_field_name": "airline",
"detector_rules": [] "detector_rules": [],
"detector_index": 0
} }
], ],
"influencers": [ "influencers": [

View File

@ -207,6 +207,10 @@ LEAVE UNDOCUMENTED
(array) TBD (array) TBD
//// ////
`detector_index`::
(integer) Unique ID for the detector, used when updating it.
Based on the order of detectors within the `analysis_config`, starting at zero.
[float] [float]
[[ml-datadescription]] [[ml-datadescription]]
===== Data Description Objects ===== Data Description Objects

View File

@ -93,7 +93,8 @@ When the job is created, you receive the following results:
"detector_description": "low_sum(events_per_min)", "detector_description": "low_sum(events_per_min)",
"function": "low_sum", "function": "low_sum",
"field_name": "events_per_min", "field_name": "events_per_min",
"detector_rules": [] "detector_rules": [],
"detector_index": 0
} }
], ],
"influencers": [] "influencers": []

View File

@ -55,7 +55,7 @@ if the job is open when you make the update, you must stop the data feed, close
the job, then restart the data feed and open the job for the changes to take the job, then restart the data feed and open the job for the changes to take
effect. effect.
//|`analysis_config`: `detectors`: `index` | A unique identifier of the //|`analysis_config`: `detectors`: `detector_index` | A unique identifier of the
//detector. Matches the order of detectors returned by //detector. Matches the order of detectors returned by
//<<ml-get-job,GET job>>, starting from 0. | No //<<ml-get-job,GET job>>, starting from 0. | No
//|`analysis_config`: `detectors`: `detector_description` |A description of the //|`analysis_config`: `detectors`: `detector_description` |A description of the
@ -126,7 +126,8 @@ information, including the updated property values. For example:
"detector_description": "Unusual message counts", "detector_description": "Unusual message counts",
"function": "count", "function": "count",
"by_field_name": "mlcategory", "by_field_name": "mlcategory",
"detector_rules": [] "detector_rules": [],
"detector_index": 0
} }
], ],
"influencers": [] "influencers": []

View File

@ -421,7 +421,7 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
private boolean usePerPartitionNormalization = false; private boolean usePerPartitionNormalization = false;
public Builder(List<Detector> detectors) { public Builder(List<Detector> detectors) {
this.detectors = detectors; setDetectors(detectors);
} }
public Builder(AnalysisConfig analysisConfig) { public Builder(AnalysisConfig analysisConfig) {
@ -440,7 +440,19 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
} }
public void setDetectors(List<Detector> detectors) { public void setDetectors(List<Detector> detectors) {
this.detectors = detectors; if (detectors == null) {
this.detectors = null;
return;
}
// We always assign sequential IDs to the detectors that are correct for this analysis config
int detectorIndex = 0;
List<Detector> sequentialIndexDetectors = new ArrayList<>(detectors.size());
for (Detector origDetector : detectors) {
Detector.Builder builder = new Detector.Builder(origDetector);
builder.setDetectorIndex(detectorIndex++);
sequentialIndexDetectors.add(builder.build());
};
this.detectors = sequentialIndexDetectors;
} }
public void setBucketSpan(TimeValue bucketSpan) { public void setBucketSpan(TimeValue bucketSpan) {
@ -515,6 +527,8 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
checkFieldIsNotNegativeIfSpecified(RESULT_FINALIZATION_WINDOW.getPreferredName(), resultFinalizationWindow); checkFieldIsNotNegativeIfSpecified(RESULT_FINALIZATION_WINDOW.getPreferredName(), resultFinalizationWindow);
verifyMultipleBucketSpans(); verifyMultipleBucketSpans();
verifyNoMetricFunctionsWhenSummaryCountFieldNameIsSet();
overlappingBuckets = verifyOverlappingBucketsConfig(overlappingBuckets, detectors); overlappingBuckets = verifyOverlappingBucketsConfig(overlappingBuckets, detectors);
if (usePerPartitionNormalization) { if (usePerPartitionNormalization) {
@ -529,6 +543,14 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
resultFinalizationWindow, multivariateByFields, multipleBucketSpans, usePerPartitionNormalization); resultFinalizationWindow, multivariateByFields, multipleBucketSpans, usePerPartitionNormalization);
} }
private void verifyNoMetricFunctionsWhenSummaryCountFieldNameIsSet() {
if (Strings.isNullOrEmpty(summaryCountFieldName) == false &&
detectors.stream().anyMatch(d -> Detector.METRIC.equals(d.getFunction()))) {
throw ExceptionsHelper.badRequestException(
Messages.getMessage(Messages.JOB_CONFIG_FUNCTION_INCOMPATIBLE_PRESUMMARIZED, Detector.METRIC));
}
}
private static void checkFieldIsNotNegativeIfSpecified(String fieldName, Long value) { private static void checkFieldIsNotNegativeIfSpecified(String fieldName, Long value) {
if (value != null && value < 0) { if (value != null && value < 0) {
String msg = Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW, fieldName, 0, value); String msg = Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW, fieldName, 0, value);

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.ml.job.config; package org.elasticsearch.xpack.ml.job.config;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
@ -78,6 +79,7 @@ public class Detector extends ToXContentToBytes implements Writeable {
public static final ParseField USE_NULL_FIELD = new ParseField("use_null"); public static final ParseField USE_NULL_FIELD = new ParseField("use_null");
public static final ParseField EXCLUDE_FREQUENT_FIELD = new ParseField("exclude_frequent"); public static final ParseField EXCLUDE_FREQUENT_FIELD = new ParseField("exclude_frequent");
public static final ParseField DETECTOR_RULES_FIELD = new ParseField("detector_rules"); public static final ParseField DETECTOR_RULES_FIELD = new ParseField("detector_rules");
public static final ParseField DETECTOR_INDEX = new ParseField("detector_index");
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("detector", Builder::new); public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("detector", Builder::new);
@ -97,6 +99,7 @@ public class Detector extends ToXContentToBytes implements Writeable {
}, EXCLUDE_FREQUENT_FIELD, ObjectParser.ValueType.STRING); }, EXCLUDE_FREQUENT_FIELD, ObjectParser.ValueType.STRING);
PARSER.declareObjectArray(Builder::setDetectorRules, PARSER.declareObjectArray(Builder::setDetectorRules,
(parser, parseFieldMatcher) -> DetectionRule.PARSER.apply(parser, parseFieldMatcher).build(), DETECTOR_RULES_FIELD); (parser, parseFieldMatcher) -> DetectionRule.PARSER.apply(parser, parseFieldMatcher).build(), DETECTOR_RULES_FIELD);
PARSER.declareInt(Builder::setDetectorIndex, DETECTOR_INDEX);
} }
public static final String COUNT = "count"; public static final String COUNT = "count";
@ -313,6 +316,7 @@ public class Detector extends ToXContentToBytes implements Writeable {
private final boolean useNull; private final boolean useNull;
private final ExcludeFrequent excludeFrequent; private final ExcludeFrequent excludeFrequent;
private final List<DetectionRule> detectorRules; private final List<DetectionRule> detectorRules;
private final int detectorIndex;
public Detector(StreamInput in) throws IOException { public Detector(StreamInput in) throws IOException {
detectorDescription = in.readString(); detectorDescription = in.readString();
@ -324,6 +328,12 @@ public class Detector extends ToXContentToBytes implements Writeable {
useNull = in.readBoolean(); useNull = in.readBoolean();
excludeFrequent = in.readBoolean() ? ExcludeFrequent.readFromStream(in) : null; excludeFrequent = in.readBoolean() ? ExcludeFrequent.readFromStream(in) : null;
detectorRules = in.readList(DetectionRule::new); detectorRules = in.readList(DetectionRule::new);
if (in.getVersion().onOrAfter(Version.V_5_5_0)) {
detectorIndex = in.readInt();
} else {
// negative means unknown, and is expected for 5.4 jobs
detectorIndex = -1;
}
} }
@Override @Override
@ -342,6 +352,9 @@ public class Detector extends ToXContentToBytes implements Writeable {
out.writeBoolean(false); out.writeBoolean(false);
} }
out.writeList(detectorRules); out.writeList(detectorRules);
if (out.getVersion().onOrAfter(Version.V_5_5_0)) {
out.writeInt(detectorIndex);
}
} }
@Override @Override
@ -368,12 +381,17 @@ public class Detector extends ToXContentToBytes implements Writeable {
builder.field(EXCLUDE_FREQUENT_FIELD.getPreferredName(), excludeFrequent); builder.field(EXCLUDE_FREQUENT_FIELD.getPreferredName(), excludeFrequent);
} }
builder.field(DETECTOR_RULES_FIELD.getPreferredName(), detectorRules); builder.field(DETECTOR_RULES_FIELD.getPreferredName(), detectorRules);
// negative means "unknown", which should only happen for a 5.4 job
if (detectorIndex >= 0) {
builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex);
}
builder.endObject(); builder.endObject();
return builder; return builder;
} }
private Detector(String detectorDescription, String function, String fieldName, String byFieldName, String overFieldName, private Detector(String detectorDescription, String function, String fieldName, String byFieldName, String overFieldName,
String partitionFieldName, boolean useNull, ExcludeFrequent excludeFrequent, List<DetectionRule> detectorRules) { String partitionFieldName, boolean useNull, ExcludeFrequent excludeFrequent, List<DetectionRule> detectorRules,
int detectorIndex) {
this.function = function; this.function = function;
this.fieldName = fieldName; this.fieldName = fieldName;
this.byFieldName = byFieldName; this.byFieldName = byFieldName;
@ -381,10 +399,9 @@ public class Detector extends ToXContentToBytes implements Writeable {
this.partitionFieldName = partitionFieldName; this.partitionFieldName = partitionFieldName;
this.useNull = useNull; this.useNull = useNull;
this.excludeFrequent = excludeFrequent; this.excludeFrequent = excludeFrequent;
// REMOVE THIS LINE WHEN REMOVING JACKSON_DATABIND:
detectorRules = detectorRules != null ? detectorRules : Collections.emptyList();
this.detectorRules = Collections.unmodifiableList(detectorRules); this.detectorRules = Collections.unmodifiableList(detectorRules);
this.detectorDescription = detectorDescription != null ? detectorDescription : DefaultDetectorDescription.of(this); this.detectorDescription = detectorDescription != null ? detectorDescription : DefaultDetectorDescription.of(this);
this.detectorIndex = detectorIndex;
} }
public String getDetectorDescription() { public String getDetectorDescription() {
@ -462,6 +479,13 @@ public class Detector extends ToXContentToBytes implements Writeable {
return detectorRules; return detectorRules;
} }
/**
* @return the detector index or a negative number if unknown
*/
public int getDetectorIndex() {
return detectorIndex;
}
/** /**
* Returns a list with the byFieldName, overFieldName and partitionFieldName that are not null * Returns a list with the byFieldName, overFieldName and partitionFieldName that are not null
* *
@ -516,14 +540,15 @@ public class Detector extends ToXContentToBytes implements Writeable {
Objects.equals(this.partitionFieldName, that.partitionFieldName) && Objects.equals(this.partitionFieldName, that.partitionFieldName) &&
Objects.equals(this.useNull, that.useNull) && Objects.equals(this.useNull, that.useNull) &&
Objects.equals(this.excludeFrequent, that.excludeFrequent) && Objects.equals(this.excludeFrequent, that.excludeFrequent) &&
Objects.equals(this.detectorRules, that.detectorRules); Objects.equals(this.detectorRules, that.detectorRules) &&
this.detectorIndex == that.detectorIndex;
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(detectorDescription, function, fieldName, byFieldName, return Objects.hash(detectorDescription, function, fieldName, byFieldName,
overFieldName, partitionFieldName, useNull, excludeFrequent, overFieldName, partitionFieldName, useNull, excludeFrequent,
detectorRules); detectorRules, detectorIndex);
} }
public static class Builder { public static class Builder {
@ -547,6 +572,8 @@ public class Detector extends ToXContentToBytes implements Writeable {
private boolean useNull = false; private boolean useNull = false;
private ExcludeFrequent excludeFrequent; private ExcludeFrequent excludeFrequent;
private List<DetectionRule> detectorRules = Collections.emptyList(); private List<DetectionRule> detectorRules = Collections.emptyList();
// negative means unknown, and is expected for v5.4 jobs
private int detectorIndex = -1;
public Builder() { public Builder() {
} }
@ -562,6 +589,7 @@ public class Detector extends ToXContentToBytes implements Writeable {
excludeFrequent = detector.excludeFrequent; excludeFrequent = detector.excludeFrequent;
detectorRules = new ArrayList<>(detector.detectorRules.size()); detectorRules = new ArrayList<>(detector.detectorRules.size());
detectorRules.addAll(detector.getDetectorRules()); detectorRules.addAll(detector.getDetectorRules());
detectorIndex = detector.detectorIndex;
} }
public Builder(String function, String fieldName) { public Builder(String function, String fieldName) {
@ -605,15 +633,11 @@ public class Detector extends ToXContentToBytes implements Writeable {
this.detectorRules = detectorRules; this.detectorRules = detectorRules;
} }
public List<DetectionRule> getDetectorRules() { public void setDetectorIndex(int detectorIndex) {
return detectorRules; this.detectorIndex = detectorIndex;
} }
public Detector build() { public Detector build() {
return build(false);
}
public Detector build(boolean isSummarised) {
boolean emptyField = Strings.isEmpty(fieldName); boolean emptyField = Strings.isEmpty(fieldName);
boolean emptyByField = Strings.isEmpty(byFieldName); boolean emptyByField = Strings.isEmpty(byFieldName);
boolean emptyOverField = Strings.isEmpty(overFieldName); boolean emptyOverField = Strings.isEmpty(overFieldName);
@ -628,11 +652,6 @@ public class Detector extends ToXContentToBytes implements Writeable {
} }
} }
if (isSummarised && Detector.METRIC.equals(function)) {
throw ExceptionsHelper.badRequestException(
Messages.getMessage(Messages.JOB_CONFIG_FUNCTION_INCOMPATIBLE_PRESUMMARIZED, Detector.METRIC));
}
// check functions have required fields // check functions have required fields
if (emptyField && Detector.FIELD_NAME_FUNCTIONS.contains(function)) { if (emptyField && Detector.FIELD_NAME_FUNCTIONS.contains(function)) {
@ -729,7 +748,7 @@ public class Detector extends ToXContentToBytes implements Writeable {
} }
return new Detector(detectorDescription, function, fieldName, byFieldName, overFieldName, partitionFieldName, return new Detector(detectorDescription, function, fieldName, byFieldName, overFieldName, partitionFieldName,
useNull, excludeFrequent, detectorRules); useNull, excludeFrequent, detectorRules, detectorIndex);
} }
public List<String> extractAnalysisFields() { public List<String> extractAnalysisFields() {

View File

@ -15,6 +15,7 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
@ -39,7 +40,7 @@ public class JobUpdate implements Writeable, ToXContent {
PARSER.declareLong(Builder::setResultsRetentionDays, Job.RESULTS_RETENTION_DAYS); PARSER.declareLong(Builder::setResultsRetentionDays, Job.RESULTS_RETENTION_DAYS);
PARSER.declareLong(Builder::setModelSnapshotRetentionDays, Job.MODEL_SNAPSHOT_RETENTION_DAYS); PARSER.declareLong(Builder::setModelSnapshotRetentionDays, Job.MODEL_SNAPSHOT_RETENTION_DAYS);
PARSER.declareStringArray(Builder::setCategorizationFilters, AnalysisConfig.CATEGORIZATION_FILTERS); PARSER.declareStringArray(Builder::setCategorizationFilters, AnalysisConfig.CATEGORIZATION_FILTERS);
PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), Job.CUSTOM_SETTINGS, ObjectParser.ValueType.OBJECT); PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), Job.CUSTOM_SETTINGS, ObjectParser.ValueType.OBJECT);
PARSER.declareString(Builder::setModelSnapshotId, Job.MODEL_SNAPSHOT_ID); PARSER.declareString(Builder::setModelSnapshotId, Job.MODEL_SNAPSHOT_ID);
} }
@ -228,18 +229,19 @@ public class JobUpdate implements Writeable, ToXContent {
AnalysisConfig ac = source.getAnalysisConfig(); AnalysisConfig ac = source.getAnalysisConfig();
int numDetectors = ac.getDetectors().size(); int numDetectors = ac.getDetectors().size();
for (DetectorUpdate dd : detectorUpdates) { for (DetectorUpdate dd : detectorUpdates) {
if (dd.getIndex() >= numDetectors) { if (dd.getDetectorIndex() >= numDetectors) {
throw new IllegalArgumentException("Detector index is >= the number of detectors"); throw ExceptionsHelper.badRequestException("Supplied detector_index [{}] is >= the number of detectors [{}]",
dd.getDetectorIndex(), numDetectors);
} }
Detector.Builder detectorbuilder = new Detector.Builder(ac.getDetectors().get(dd.getIndex())); Detector.Builder detectorbuilder = new Detector.Builder(ac.getDetectors().get(dd.getDetectorIndex()));
if (dd.getDescription() != null) { if (dd.getDescription() != null) {
detectorbuilder.setDetectorDescription(dd.getDescription()); detectorbuilder.setDetectorDescription(dd.getDescription());
} }
if (dd.getRules() != null) { if (dd.getRules() != null) {
detectorbuilder.setDetectorRules(dd.getRules()); detectorbuilder.setDetectorRules(dd.getRules());
} }
ac.getDetectors().set(dd.getIndex(), detectorbuilder.build()); ac.getDetectors().set(dd.getDetectorIndex(), detectorbuilder.build());
} }
AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(ac); AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(ac);
@ -317,28 +319,27 @@ public class JobUpdate implements Writeable, ToXContent {
new ConstructingObjectParser<>("detector_update", a -> new DetectorUpdate((int) a[0], (String) a[1], new ConstructingObjectParser<>("detector_update", a -> new DetectorUpdate((int) a[0], (String) a[1],
(List<DetectionRule>) a[2])); (List<DetectionRule>) a[2]));
public static final ParseField INDEX = new ParseField("index");
public static final ParseField RULES = new ParseField("rules"); public static final ParseField RULES = new ParseField("rules");
static { static {
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), INDEX); PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), Detector.DETECTOR_INDEX);
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), Job.DESCRIPTION); PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), Job.DESCRIPTION);
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(),
(parser, parseFieldMatcher) -> DetectionRule.PARSER.apply(parser, parseFieldMatcher).build(), RULES); (parser, parseFieldMatcher) -> DetectionRule.PARSER.apply(parser, parseFieldMatcher).build(), RULES);
} }
private int index; private int detectorIndex;
private String description; private String description;
private List<DetectionRule> rules; private List<DetectionRule> rules;
public DetectorUpdate(int index, String description, List<DetectionRule> rules) { public DetectorUpdate(int detectorIndex, String description, List<DetectionRule> rules) {
this.index = index; this.detectorIndex = detectorIndex;
this.description = description; this.description = description;
this.rules = rules; this.rules = rules;
} }
public DetectorUpdate(StreamInput in) throws IOException { public DetectorUpdate(StreamInput in) throws IOException {
index = in.readInt(); detectorIndex = in.readInt();
description = in.readOptionalString(); description = in.readOptionalString();
if (in.readBoolean()) { if (in.readBoolean()) {
rules = in.readList(DetectionRule::new); rules = in.readList(DetectionRule::new);
@ -347,8 +348,8 @@ public class JobUpdate implements Writeable, ToXContent {
} }
} }
public int getIndex() { public int getDetectorIndex() {
return index; return detectorIndex;
} }
public String getDescription() { public String getDescription() {
@ -361,7 +362,7 @@ public class JobUpdate implements Writeable, ToXContent {
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeInt(index); out.writeInt(detectorIndex);
out.writeOptionalString(description); out.writeOptionalString(description);
out.writeBoolean(rules != null); out.writeBoolean(rules != null);
if (rules != null) { if (rules != null) {
@ -373,7 +374,7 @@ public class JobUpdate implements Writeable, ToXContent {
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
builder.field(INDEX.getPreferredName(), index); builder.field(Detector.DETECTOR_INDEX.getPreferredName(), detectorIndex);
if (description != null) { if (description != null) {
builder.field(Job.DESCRIPTION.getPreferredName(), description); builder.field(Job.DESCRIPTION.getPreferredName(), description);
} }
@ -387,7 +388,7 @@ public class JobUpdate implements Writeable, ToXContent {
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(index, description, rules); return Objects.hash(detectorIndex, description, rules);
} }
@Override @Override
@ -400,7 +401,7 @@ public class JobUpdate implements Writeable, ToXContent {
} }
DetectorUpdate that = (DetectorUpdate) other; DetectorUpdate that = (DetectorUpdate) other;
return this.index == that.index && Objects.equals(this.description, that.description) return this.detectorIndex == that.detectorIndex && Objects.equals(this.description, that.description)
&& Objects.equals(this.rules, that.rules); && Objects.equals(this.rules, that.rules);
} }
} }

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.ml.job.persistence; package org.elasticsearch.xpack.ml.job.persistence;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.ml.job.config.Detector;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats;
@ -306,7 +307,7 @@ public class ElasticsearchMappings {
* @throws IOException On write error * @throws IOException On write error
*/ */
private static void addAnomalyRecordFieldsToMapping(XContentBuilder builder) throws IOException { private static void addAnomalyRecordFieldsToMapping(XContentBuilder builder) throws IOException {
builder.startObject(AnomalyRecord.DETECTOR_INDEX.getPreferredName()) builder.startObject(Detector.DETECTOR_INDEX.getPreferredName())
.field(TYPE, INTEGER) .field(TYPE, INTEGER)
.endObject() .endObject()
.startObject(AnomalyRecord.ACTUAL.getPreferredName()) .startObject(AnomalyRecord.ACTUAL.getPreferredName())

View File

@ -161,7 +161,7 @@ public class AutodetectCommunicator implements Closeable {
if (updates != null) { if (updates != null) {
for (JobUpdate.DetectorUpdate update : updates) { for (JobUpdate.DetectorUpdate update : updates) {
if (update.getRules() != null) { if (update.getRules() != null) {
autodetectProcess.writeUpdateDetectorRulesMessage(update.getIndex(), update.getRules()); autodetectProcess.writeUpdateDetectorRulesMessage(update.getDetectorIndex(), update.getRules());
} }
} }
} }

View File

@ -16,6 +16,7 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.xpack.ml.job.config.Detector;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.ml.utils.time.TimeUtils; import org.elasticsearch.xpack.ml.utils.time.TimeUtils;
@ -43,7 +44,6 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
/** /**
* Result fields (all detector types) * Result fields (all detector types)
*/ */
public static final ParseField DETECTOR_INDEX = new ParseField("detector_index");
public static final ParseField SEQUENCE_NUM = new ParseField("sequence_num"); public static final ParseField SEQUENCE_NUM = new ParseField("sequence_num");
public static final ParseField PROBABILITY = new ParseField("probability"); public static final ParseField PROBABILITY = new ParseField("probability");
public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name"); public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name");
@ -99,7 +99,7 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY); PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY);
PARSER.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE); PARSER.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE);
PARSER.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE); PARSER.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE);
PARSER.declareInt(AnomalyRecord::setDetectorIndex, DETECTOR_INDEX); PARSER.declareInt(AnomalyRecord::setDetectorIndex, Detector.DETECTOR_INDEX);
PARSER.declareBoolean(AnomalyRecord::setInterim, Result.IS_INTERIM); PARSER.declareBoolean(AnomalyRecord::setInterim, Result.IS_INTERIM);
PARSER.declareString(AnomalyRecord::setByFieldName, BY_FIELD_NAME); PARSER.declareString(AnomalyRecord::setByFieldName, BY_FIELD_NAME);
PARSER.declareString(AnomalyRecord::setByFieldValue, BY_FIELD_VALUE); PARSER.declareString(AnomalyRecord::setByFieldValue, BY_FIELD_VALUE);
@ -253,7 +253,7 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
builder.field(RECORD_SCORE.getPreferredName(), recordScore); builder.field(RECORD_SCORE.getPreferredName(), recordScore);
builder.field(INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore); builder.field(INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore);
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex); builder.field(Detector.DETECTOR_INDEX.getPreferredName(), detectorIndex);
builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); builder.field(Result.IS_INTERIM.getPreferredName(), isInterim);
builder.dateField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); builder.dateField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime());
if (byFieldName != null) { if (byFieldName != null) {

View File

@ -5,6 +5,7 @@
*/ */
package org.elasticsearch.xpack.ml.job.results; package org.elasticsearch.xpack.ml.job.results;
import org.elasticsearch.xpack.ml.job.config.Detector;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats;
@ -54,7 +55,6 @@ public final class ReservedFieldNames {
AnomalyCause.INFLUENCERS.getPreferredName(), AnomalyCause.INFLUENCERS.getPreferredName(),
AnomalyCause.FIELD_NAME.getPreferredName(), AnomalyCause.FIELD_NAME.getPreferredName(),
AnomalyRecord.DETECTOR_INDEX.getPreferredName(),
AnomalyRecord.PROBABILITY.getPreferredName(), AnomalyRecord.PROBABILITY.getPreferredName(),
AnomalyRecord.BY_FIELD_NAME.getPreferredName(), AnomalyRecord.BY_FIELD_NAME.getPreferredName(),
AnomalyRecord.BY_FIELD_VALUE.getPreferredName(), AnomalyRecord.BY_FIELD_VALUE.getPreferredName(),
@ -109,6 +109,8 @@ public final class ReservedFieldNames {
DataCounts.LATEST_EMPTY_BUCKET_TIME.getPreferredName(), DataCounts.LATEST_EMPTY_BUCKET_TIME.getPreferredName(),
DataCounts.LATEST_SPARSE_BUCKET_TIME.getPreferredName(), DataCounts.LATEST_SPARSE_BUCKET_TIME.getPreferredName(),
Detector.DETECTOR_INDEX.getPreferredName(),
Influence.INFLUENCER_FIELD_NAME.getPreferredName(), Influence.INFLUENCER_FIELD_NAME.getPreferredName(),
Influence.INFLUENCER_FIELD_VALUES.getPreferredName(), Influence.INFLUENCER_FIELD_VALUES.getPreferredName(),

View File

@ -91,7 +91,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
public void testFieldConfiguration_singleDetector_notPreSummarised() { public void testFieldConfiguration_singleDetector_notPreSummarised() {
// Single detector, not pre-summarised // Single detector, not pre-summarised
Detector.Builder det = new Detector.Builder("metric", "responsetime"); Detector.Builder det = new Detector.Builder("max", "responsetime");
det.setByFieldName("airline"); det.setByFieldName("airline");
det.setPartitionFieldName("sourcetype"); det.setPartitionFieldName("sourcetype");
AnalysisConfig ac = createConfigWithDetectors(Collections.singletonList(det.build())); AnalysisConfig ac = createConfigWithDetectors(Collections.singletonList(det.build()));
@ -144,6 +144,8 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
} }
assertEquals("summaryCount", ac.getSummaryCountFieldName()); assertEquals("summaryCount", ac.getSummaryCountFieldName());
assertEquals(1, ac.getDetectors().size());
assertEquals(0, ac.getDetectors().get(0).getDetectorIndex());
} }
public void testFieldConfiguration_multipleDetectors_NotPreSummarised() { public void testFieldConfiguration_multipleDetectors_NotPreSummarised() {
@ -169,7 +171,6 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
builder.setInfluencers(Collections.singletonList("Influencer_Field")); builder.setInfluencers(Collections.singletonList("Influencer_Field"));
AnalysisConfig ac = builder.build(); AnalysisConfig ac = builder.build();
Set<String> termFields = new TreeSet<>(Arrays.asList( Set<String> termFields = new TreeSet<>(Arrays.asList(
"by_one", "by_two", "over_field", "by_one", "by_two", "over_field",
"partition_one", "partition_two", "Influencer_Field")); "partition_one", "partition_two", "Influencer_Field"));
@ -212,9 +213,15 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
assertTrue(ac.partitionFields().contains("partition_two")); assertTrue(ac.partitionFields().contains("partition_two"));
assertNull(ac.getSummaryCountFieldName()); assertNull(ac.getSummaryCountFieldName());
assertEquals(3, ac.getDetectors().size());
int expectedDetectorIndex = 0;
for (Detector detector : ac.getDetectors()) {
assertEquals(expectedDetectorIndex++, detector.getDetectorIndex());
}
} }
public void testFieldConfiguration_multipleDetectors_PreSummarised() { public void testFieldConfiguration_singleDetector_PreSummarised() {
// Multiple detectors, pre-summarised // Multiple detectors, pre-summarised
AnalysisConfig.Builder builder = createConfigBuilder(); AnalysisConfig.Builder builder = createConfigBuilder();
builder.setSummaryCountFieldName("summaryCount"); builder.setSummaryCountFieldName("summaryCount");
@ -231,6 +238,9 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
assertTrue(ac.getMultipleBucketSpans().contains(TimeValue.timeValueSeconds(5000))); assertTrue(ac.getMultipleBucketSpans().contains(TimeValue.timeValueSeconds(5000)));
assertTrue(ac.getMultipleBucketSpans().contains(TimeValue.timeValueSeconds(10000))); assertTrue(ac.getMultipleBucketSpans().contains(TimeValue.timeValueSeconds(10000)));
assertTrue(ac.getMultipleBucketSpans().contains(TimeValue.timeValueSeconds(24000))); assertTrue(ac.getMultipleBucketSpans().contains(TimeValue.timeValueSeconds(24000)));
assertEquals(1, ac.getDetectors().size());
assertEquals(0, ac.getDetectors().get(0).getDetectorIndex());
} }
public void testBuild_GivenMlCategoryUsedAsByFieldButNoCategorizationFieldName() { public void testBuild_GivenMlCategoryUsedAsByFieldButNoCategorizationFieldName() {
@ -544,7 +554,6 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_NO_DETECTORS), e.getMessage()); assertEquals(Messages.getMessage(Messages.JOB_CONFIG_NO_DETECTORS), e.getMessage());
} }
public void testVerify_GivenValidConfig() { public void testVerify_GivenValidConfig() {
AnalysisConfig.Builder analysisConfig = createValidConfig(); AnalysisConfig.Builder analysisConfig = createValidConfig();
analysisConfig.build(); analysisConfig.build();
@ -645,6 +654,14 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
assertFalse(ac.getOverlappingBuckets()); assertFalse(ac.getOverlappingBuckets());
} }
public void testVerify_GivenMetricAndSummaryCountField() {
Detector d = new Detector.Builder("metric", "my_metric").build();
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Collections.singletonList(d));
ac.setSummaryCountFieldName("my_summary_count");
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, ac::build);
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_FUNCTION_INCOMPATIBLE_PRESUMMARIZED, Detector.METRIC), e.getMessage());
}
public void testMultipleBucketsConfig() { public void testMultipleBucketsConfig() {
AnalysisConfig.Builder ac = createValidConfig(); AnalysisConfig.Builder ac = createValidConfig();
ac.setMultipleBucketSpans(Arrays.asList( ac.setMultipleBucketSpans(Arrays.asList(

View File

@ -207,12 +207,12 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
String character = (String) args[0]; String character = (String) args[0];
boolean valid = (boolean) args[1]; boolean valid = (boolean) args[1];
Detector.Builder detector = createDetectorWithValidFieldNames(); Detector.Builder detector = createDetectorWithValidFieldNames();
verifyFieldNameGivenPresummarised(detector, character, valid); verifyFieldName(detector, character, valid);
detector = createDetectorWithValidFieldNames(); detector = createDetectorWithValidFieldNames();
verifyByFieldNameGivenPresummarised(new Detector.Builder(detector.build()), character, valid); verifyByFieldName(new Detector.Builder(detector.build()), character, valid);
verifyOverFieldNameGivenPresummarised(new Detector.Builder(detector.build()), character, valid); verifyOverFieldName(new Detector.Builder(detector.build()), character, valid);
verifyByFieldNameGivenPresummarised(new Detector.Builder(detector.build()), character, valid); verifyByFieldName(new Detector.Builder(detector.build()), character, valid);
verifyPartitionFieldNameGivenPresummarised(new Detector.Builder(detector.build()), character, valid); verifyPartitionFieldName(new Detector.Builder(detector.build()), character, valid);
} }
} }
@ -244,26 +244,6 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
} }
} }
private static void verifyFieldNameGivenPresummarised(Detector.Builder detector, String character, boolean valid) {
Detector.Builder updated = createDetectorWithSpecificFieldName(detector.build().getFieldName() + character);
expectThrows(ElasticsearchException.class , () -> updated.build(true));
}
private static void verifyByFieldNameGivenPresummarised(Detector.Builder detector, String character, boolean valid) {
detector.setByFieldName(detector.build().getByFieldName() + character);
expectThrows(ElasticsearchException.class , () -> detector.build(true));
}
private static void verifyOverFieldNameGivenPresummarised(Detector.Builder detector, String character, boolean valid) {
detector.setOverFieldName(detector.build().getOverFieldName() + character);
expectThrows(ElasticsearchException.class , () -> detector.build(true));
}
private static void verifyPartitionFieldNameGivenPresummarised(Detector.Builder detector, String character, boolean valid) {
detector.setPartitionFieldName(detector.build().getPartitionFieldName() + character);
expectThrows(ElasticsearchException.class , () -> detector.build(true));
}
private static Detector.Builder createDetectorWithValidFieldNames() { private static Detector.Builder createDetectorWithValidFieldNames() {
Detector.Builder d = new Detector.Builder("metric", "field"); Detector.Builder d = new Detector.Builder("metric", "field");
d.setByFieldName("by_field"); d.setByFieldName("by_field");
@ -302,7 +282,6 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
// if nothing else is set the count functions (excluding distinct count) // if nothing else is set the count functions (excluding distinct count)
// are the only allowable functions // are the only allowable functions
new Detector.Builder(Detector.COUNT, null).build(); new Detector.Builder(Detector.COUNT, null).build();
new Detector.Builder(Detector.COUNT, null).build(true);
Set<String> difference = new HashSet<String>(Detector.ANALYSIS_FUNCTIONS); Set<String> difference = new HashSet<String>(Detector.ANALYSIS_FUNCTIONS);
difference.remove(Detector.COUNT); difference.remove(Detector.COUNT);
@ -322,11 +301,6 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
Assert.fail("ElasticsearchException not thrown when expected"); Assert.fail("ElasticsearchException not thrown when expected");
} catch (ElasticsearchException e) { } catch (ElasticsearchException e) {
} }
try {
new Detector.Builder(f, null).build(true);
Assert.fail("ElasticsearchException not thrown when expected");
} catch (ElasticsearchException e) {
}
} }
// certain fields aren't allowed with certain functions // certain fields aren't allowed with certain functions
@ -341,11 +315,6 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
Assert.fail("ElasticsearchException not thrown when expected"); Assert.fail("ElasticsearchException not thrown when expected");
} catch (ElasticsearchException e) { } catch (ElasticsearchException e) {
} }
try {
builder.build(true);
Assert.fail("ElasticsearchException not thrown when expected");
} catch (ElasticsearchException e) {
}
} }
// these functions cannot have just an over field // these functions cannot have just an over field
@ -363,11 +332,6 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
Assert.fail("ElasticsearchException not thrown when expected"); Assert.fail("ElasticsearchException not thrown when expected");
} catch (ElasticsearchException e) { } catch (ElasticsearchException e) {
} }
try {
builder.build(true);
Assert.fail("ElasticsearchException not thrown when expected");
} catch (ElasticsearchException e) {
}
} }
// these functions can have just an over field // these functions can have just an over field
@ -376,7 +340,6 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
Detector.Builder builder = new Detector.Builder(f, null); Detector.Builder builder = new Detector.Builder(f, null);
builder.setOverFieldName("over_field"); builder.setOverFieldName("over_field");
builder.build(); builder.build();
builder.build(true);
} }
for (String f : new String[]{Detector.RARE, Detector.FREQ_RARE}) { for (String f : new String[]{Detector.RARE, Detector.FREQ_RARE}) {
@ -384,10 +347,8 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
builder.setOverFieldName("over_field"); builder.setOverFieldName("over_field");
builder.setByFieldName("by_field"); builder.setByFieldName("by_field");
builder.build(); builder.build();
builder.build(true);
} }
// some functions require a fieldname // some functions require a fieldname
for (String f : new String[]{Detector.DISTINCT_COUNT, Detector.DC, for (String f : new String[]{Detector.DISTINCT_COUNT, Detector.DC,
Detector.HIGH_DISTINCT_COUNT, Detector.HIGH_DC, Detector.LOW_DISTINCT_COUNT, Detector.LOW_DC, Detector.HIGH_DISTINCT_COUNT, Detector.HIGH_DC, Detector.LOW_DISTINCT_COUNT, Detector.LOW_DC,
@ -400,13 +361,6 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
Detector.Builder builder = new Detector.Builder(f, "f"); Detector.Builder builder = new Detector.Builder(f, "f");
builder.setOverFieldName("over_field"); builder.setOverFieldName("over_field");
builder.build(); builder.build();
try {
builder.build(true);
Assert.assertFalse(Detector.METRIC.equals(f));
} catch (ElasticsearchException e) {
// "metric" is not allowed as the function for pre-summarised input
Assert.assertEquals(Detector.METRIC, f);
}
} }
// these functions cannot have a field name // these functions cannot have a field name
@ -450,11 +404,6 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
Assert.fail("ElasticsearchException not thrown when expected"); Assert.fail("ElasticsearchException not thrown when expected");
} catch (ElasticsearchException e) { } catch (ElasticsearchException e) {
} }
try {
builder.build(true);
Assert.fail("ElasticsearchException not thrown when expected");
} catch (ElasticsearchException e) {
}
} }
// these can have a by field // these can have a by field
@ -464,14 +413,12 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
Detector.Builder builder = new Detector.Builder(f, null); Detector.Builder builder = new Detector.Builder(f, null);
builder.setByFieldName("b"); builder.setByFieldName("b");
builder.build(); builder.build();
builder.build(true);
} }
Detector.Builder builder = new Detector.Builder(Detector.FREQ_RARE, null); Detector.Builder builder = new Detector.Builder(Detector.FREQ_RARE, null);
builder.setOverFieldName("over_field"); builder.setOverFieldName("over_field");
builder.setByFieldName("b"); builder.setByFieldName("b");
builder.build(); builder.build();
builder.build(true);
builder = new Detector.Builder(Detector.FREQ_RARE, null); builder = new Detector.Builder(Detector.FREQ_RARE, null);
builder.setOverFieldName("over_field"); builder.setOverFieldName("over_field");
builder.setByFieldName("b"); builder.setByFieldName("b");
@ -484,13 +431,6 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
builder = new Detector.Builder(f, "f"); builder = new Detector.Builder(f, "f");
builder.setByFieldName("b"); builder.setByFieldName("b");
builder.build(); builder.build();
try {
builder.build(true);
Assert.assertFalse(Detector.METRIC.equals(f));
} catch (ElasticsearchException e) {
// "metric" is not allowed as the function for pre-summarised input
Assert.assertEquals(Detector.METRIC, f);
}
} }
Assert.assertEquals(Detector.FIELD_NAME_FUNCTIONS.size(), testedFunctionsCount); Assert.assertEquals(Detector.FIELD_NAME_FUNCTIONS.size(), testedFunctionsCount);
@ -505,13 +445,6 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
Assert.fail("ElasticsearchException not thrown when expected"); Assert.fail("ElasticsearchException not thrown when expected");
} catch (ElasticsearchException e) { } catch (ElasticsearchException e) {
} }
try {
builder = new Detector.Builder(f, "field");
builder.setByFieldName("b");
builder.build(true);
Assert.fail("ElasticsearchException not thrown when expected");
} catch (ElasticsearchException e) {
}
} }
Assert.assertEquals(Detector.COUNT_WITHOUT_FIELD_FUNCTIONS.size(), testedFunctionsCount); Assert.assertEquals(Detector.COUNT_WITHOUT_FIELD_FUNCTIONS.size(), testedFunctionsCount);
@ -523,19 +456,12 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
Assert.fail("ElasticsearchException not thrown when expected"); Assert.fail("ElasticsearchException not thrown when expected");
} catch (ElasticsearchException e) { } catch (ElasticsearchException e) {
} }
try {
builder.build(true);
Assert.fail("ElasticsearchException not thrown when expected");
} catch (ElasticsearchException e) {
}
for (String f : new String[]{Detector.HIGH_COUNT, for (String f : new String[]{Detector.HIGH_COUNT,
Detector.LOW_COUNT, Detector.NON_ZERO_COUNT, Detector.NZC}) { Detector.LOW_COUNT, Detector.NON_ZERO_COUNT, Detector.NZC}) {
builder = new Detector.Builder(f, null); builder = new Detector.Builder(f, null);
builder.setByFieldName("by_field"); builder.setByFieldName("by_field");
builder.build(); builder.build();
builder.build(true);
} }
for (String f : new String[]{Detector.COUNT, Detector.HIGH_COUNT, for (String f : new String[]{Detector.COUNT, Detector.HIGH_COUNT,
@ -543,7 +469,6 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
builder = new Detector.Builder(f, null); builder = new Detector.Builder(f, null);
builder.setOverFieldName("over_field"); builder.setOverFieldName("over_field");
builder.build(); builder.build();
builder.build(true);
} }
for (String f : new String[]{Detector.HIGH_COUNT, for (String f : new String[]{Detector.HIGH_COUNT,
@ -552,7 +477,6 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
builder.setByFieldName("by_field"); builder.setByFieldName("by_field");
builder.setOverFieldName("over_field"); builder.setOverFieldName("over_field");
builder.build(); builder.build();
builder.build(true);
} }
for (String f : new String[]{Detector.NON_ZERO_COUNT, Detector.NZC}) { for (String f : new String[]{Detector.NON_ZERO_COUNT, Detector.NZC}) {
@ -564,14 +488,6 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
Assert.fail("ElasticsearchException not thrown when expected"); Assert.fail("ElasticsearchException not thrown when expected");
} catch (ElasticsearchException e) { } catch (ElasticsearchException e) {
} }
try {
builder = new Detector.Builder(f, "field");
builder.setByFieldName("by_field");
builder.setOverFieldName("over_field");
builder.build(true);
Assert.fail("ElasticsearchException not thrown when expected");
} catch (ElasticsearchException e) {
}
} }
} }

View File

@ -179,7 +179,7 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
Detector.Builder d1 = new Detector.Builder("max", "field"); Detector.Builder d1 = new Detector.Builder("max", "field");
d1.setByFieldName("by_field"); d1.setByFieldName("by_field");
Detector.Builder d2 = new Detector.Builder("metric", "field2"); Detector.Builder d2 = new Detector.Builder("median", "field2");
d2.setOverFieldName("over_field"); d2.setOverFieldName("over_field");
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Arrays.asList(d1.build(), d2.build())); AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Arrays.asList(d1.build(), d2.build()));
@ -195,6 +195,7 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
assertTrue(analysisFields.contains("over_field")); assertTrue(analysisFields.contains("over_field"));
assertFalse(analysisFields.contains("max")); assertFalse(analysisFields.contains("max"));
assertFalse(analysisFields.contains("median"));
assertFalse(analysisFields.contains("")); assertFalse(analysisFields.contains(""));
assertFalse(analysisFields.contains(null)); assertFalse(analysisFields.contains(null));
@ -216,6 +217,7 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
assertFalse(analysisFields.contains("count")); assertFalse(analysisFields.contains("count"));
assertFalse(analysisFields.contains("max")); assertFalse(analysisFields.contains("max"));
assertFalse(analysisFields.contains("median"));
assertFalse(analysisFields.contains("")); assertFalse(analysisFields.contains(""));
assertFalse(analysisFields.contains(null)); assertFalse(analysisFields.contains(null));
} }
@ -233,7 +235,7 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
public void testCheckValidId_IdTooLong() { public void testCheckValidId_IdTooLong() {
Job.Builder builder = buildJobBuilder("foo"); Job.Builder builder = buildJobBuilder("foo");
builder.setId("averyveryveryaveryveryveryaveryveryveryaveryveryveryaveryveryveryaveryveryverylongid"); builder.setId("averyveryveryaveryveryveryaveryveryveryaveryveryveryaveryveryveryaveryveryverylongid");
expectThrows(IllegalArgumentException.class, () -> builder.build()); expectThrows(IllegalArgumentException.class, builder::build);
} }
public void testCheckValidId_GivenAllValidChars() { public void testCheckValidId_GivenAllValidChars() {
@ -356,7 +358,7 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
public void testBuilder_withInvalidIndexNameThrows() { public void testBuilder_withInvalidIndexNameThrows() {
Job.Builder builder = buildJobBuilder("foo"); Job.Builder builder = buildJobBuilder("foo");
builder.setResultsIndexName("_bad^name"); builder.setResultsIndexName("_bad^name");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.build()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, builder::build);
assertEquals(Messages.getMessage(Messages.INVALID_ID, Job.RESULTS_INDEX_NAME.getPreferredName(), "_bad^name"), e.getMessage()); assertEquals(Messages.getMessage(Messages.INVALID_ID, Job.RESULTS_INDEX_NAME.getPreferredName(), "_bad^name"), e.getMessage());
} }
@ -426,8 +428,7 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
Detector.Builder d1 = new Detector.Builder("info_content", "domain"); Detector.Builder d1 = new Detector.Builder("info_content", "domain");
d1.setOverFieldName("client"); d1.setOverFieldName("client");
Detector.Builder d2 = new Detector.Builder("min", "field"); Detector.Builder d2 = new Detector.Builder("min", "field");
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Arrays.asList(d1.build(), d2.build())); return new AnalysisConfig.Builder(Arrays.asList(d1.build(), d2.build()));
return ac;
} }
public static Job createRandomizedJob() { public static Job createRandomizedJob() {

View File

@ -28,9 +28,8 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
update.setDescription(randomAlphaOfLength(20)); update.setDescription(randomAlphaOfLength(20));
} }
if (randomBoolean()) { if (randomBoolean()) {
List<JobUpdate.DetectorUpdate> detectorUpdates = null;
int size = randomInt(10); int size = randomInt(10);
detectorUpdates = new ArrayList<>(size); List<JobUpdate.DetectorUpdate> detectorUpdates = new ArrayList<>(size);
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
String detectorDescription = null; String detectorDescription = null;
if (randomBoolean()) { if (randomBoolean()) {
@ -144,12 +143,12 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
assertEquals(update.getCustomSettings(), updatedJob.getCustomSettings()); assertEquals(update.getCustomSettings(), updatedJob.getCustomSettings());
assertEquals(update.getModelSnapshotId(), updatedJob.getModelSnapshotId()); assertEquals(update.getModelSnapshotId(), updatedJob.getModelSnapshotId());
for (JobUpdate.DetectorUpdate detectorUpdate : update.getDetectorUpdates()) { for (JobUpdate.DetectorUpdate detectorUpdate : update.getDetectorUpdates()) {
assertNotNull(updatedJob.getAnalysisConfig().getDetectors().get(detectorUpdate.getIndex()).getDetectorDescription()); assertNotNull(updatedJob.getAnalysisConfig().getDetectors().get(detectorUpdate.getDetectorIndex()).getDetectorDescription());
assertEquals(detectorUpdate.getDescription(), assertEquals(detectorUpdate.getDescription(),
updatedJob.getAnalysisConfig().getDetectors().get(detectorUpdate.getIndex()).getDetectorDescription()); updatedJob.getAnalysisConfig().getDetectors().get(detectorUpdate.getDetectorIndex()).getDetectorDescription());
assertNotNull(updatedJob.getAnalysisConfig().getDetectors().get(detectorUpdate.getIndex()).getDetectorDescription()); assertNotNull(updatedJob.getAnalysisConfig().getDetectors().get(detectorUpdate.getDetectorIndex()).getDetectorDescription());
assertEquals(detectorUpdate.getRules(), assertEquals(detectorUpdate.getRules(),
updatedJob.getAnalysisConfig().getDetectors().get(detectorUpdate.getIndex()).getDetectorRules()); updatedJob.getAnalysisConfig().getDetectors().get(detectorUpdate.getDetectorIndex()).getDetectorRules());
} }
} }
@ -158,7 +157,7 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
assertFalse(update.isAutodetectProcessUpdate()); assertFalse(update.isAutodetectProcessUpdate());
update = new JobUpdate.Builder("foo").setModelPlotConfig(new ModelPlotConfig(true, "ff")).build(); update = new JobUpdate.Builder("foo").setModelPlotConfig(new ModelPlotConfig(true, "ff")).build();
assertTrue(update.isAutodetectProcessUpdate()); assertTrue(update.isAutodetectProcessUpdate());
update = new JobUpdate.Builder("foo").setDetectorUpdates(Arrays.asList(mock(JobUpdate.DetectorUpdate.class))).build(); update = new JobUpdate.Builder("foo").setDetectorUpdates(Collections.singletonList(mock(JobUpdate.DetectorUpdate.class))).build();
assertTrue(update.isAutodetectProcessUpdate()); assertTrue(update.isAutodetectProcessUpdate());
} }
} }

View File

@ -24,7 +24,6 @@ import org.elasticsearch.xpack.ml.job.results.Result;
import java.io.BufferedInputStream; import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;

View File

@ -33,7 +33,7 @@ public class ProcessCtrlTests extends ESTestCase {
Environment env = new Environment(settings); Environment env = new Environment(settings);
Job.Builder job = buildJobBuilder("unit-test-job"); Job.Builder job = buildJobBuilder("unit-test-job");
Detector.Builder detectorBuilder = new Detector.Builder("metric", "value"); Detector.Builder detectorBuilder = new Detector.Builder("mean", "value");
detectorBuilder.setPartitionFieldName("foo"); detectorBuilder.setPartitionFieldName("foo");
AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(Collections.singletonList(detectorBuilder.build())); AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(Collections.singletonList(detectorBuilder.build()));
acBuilder.setBucketSpan(TimeValue.timeValueSeconds(120)); acBuilder.setBucketSpan(TimeValue.timeValueSeconds(120));

View File

@ -71,9 +71,8 @@ public class ScoresUpdaterTests extends ESTestCase {
Job.Builder jobBuilder = new Job.Builder(JOB_ID); Job.Builder jobBuilder = new Job.Builder(JOB_ID);
jobBuilder.setRenormalizationWindowDays(1L); jobBuilder.setRenormalizationWindowDays(1L);
List<Detector> detectors = new ArrayList<>(); Detector.Builder d = new Detector.Builder("mean", "responsetime");
detectors.add(mock(Detector.class)); AnalysisConfig.Builder configBuilder = new AnalysisConfig.Builder(Collections.singletonList(d.build()));
AnalysisConfig.Builder configBuilder = new AnalysisConfig.Builder(detectors);
configBuilder.setBucketSpan(TimeValue.timeValueSeconds(DEFAULT_BUCKET_SPAN)); configBuilder.setBucketSpan(TimeValue.timeValueSeconds(DEFAULT_BUCKET_SPAN));
jobBuilder.setAnalysisConfig(configBuilder); jobBuilder.setAnalysisConfig(configBuilder);
jobBuilder.setDataDescription(new DataDescription.Builder()); jobBuilder.setDataDescription(new DataDescription.Builder());

View File

@ -218,10 +218,10 @@
body: > body: >
{ {
"description":"Post update description", "description":"Post update description",
"detectors": [{"index": 0, "rules": {"target_field_name": "airline", "detectors": [{"detector_index": 0, "rules": {"target_field_name": "airline",
"rule_conditions": [ { "condition_type": "numerical_actual", "rule_conditions": [ { "condition_type": "numerical_actual",
"condition": {"operator": "gt", "value": "10" } } ] } }, "condition": {"operator": "gt", "value": "10" } } ] } },
{"index": 1, "description": "updated description"}], {"detector_index": 1, "description": "updated description"}],
"model_plot_config": { "model_plot_config": {
"enabled": false, "enabled": false,
"terms": "foobar" "terms": "foobar"
@ -245,7 +245,9 @@
- match: { analysis_limits.model_memory_limit: 20 } - match: { analysis_limits.model_memory_limit: 20 }
- match: { analysis_config.categorization_filters: ["cat3.*"] } - match: { analysis_config.categorization_filters: ["cat3.*"] }
- match: { analysis_config.detectors.0.detector_rules.0.target_field_name: "airline" } - match: { analysis_config.detectors.0.detector_rules.0.target_field_name: "airline" }
- match: { analysis_config.detectors.0.detector_index: 0 }
- match: { analysis_config.detectors.1.detector_description: "updated description" } - match: { analysis_config.detectors.1.detector_description: "updated description" }
- match: { analysis_config.detectors.1.detector_index: 1 }
- match: { renormalization_window_days: 10 } - match: { renormalization_window_days: 10 }
- match: { background_persist_interval: "3h" } - match: { background_persist_interval: "3h" }
- match: { model_snapshot_retention_days: 30 } - match: { model_snapshot_retention_days: 30 }