[7.x][ML] Add categorizer stats ML result type (#58001)
This type of result will store stats about how well categorization is performing. When per-partition categorization is in use, separate documents will be written for every partition so that it is possible to see if categorization is working well for some partitions but not others. This PR is a minimal implementation to allow the C++ side changes to be made. More Java side changes related to per-partition categorization will be in followup PRs. However, even in the long term I do not see a major benefit in introducing dedicated APIs for querying categorizer stats. Like forecast request stats the categorizer stats can be read directly from the job's results alias. Backport of #57978
This commit is contained in:
parent
2da8e57f59
commit
93b693527a
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.core.ml.job.process.autodetect.state;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* The status of categorization for a job. OK is default, WARN
|
||||
* means that inappropriate numbers of categories are being found
|
||||
*/
|
||||
public enum CategorizationStatus implements Writeable {
|
||||
OK, WARN;
|
||||
|
||||
public static CategorizationStatus fromString(String statusName) {
|
||||
return valueOf(statusName.trim().toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
public static CategorizationStatus readFromStream(StreamInput in) throws IOException {
|
||||
return in.readEnum(CategorizationStatus.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeEnum(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,359 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.core.ml.job.process.autodetect.state;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.core.common.time.TimeUtils;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.core.ml.job.results.ReservedFieldNames;
|
||||
import org.elasticsearch.xpack.core.ml.job.results.Result;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.Instant;
|
||||
import java.util.Objects;
|
||||
|
||||
public class CategorizerStats implements ToXContentObject, Writeable {
|
||||
|
||||
/**
|
||||
* Result type
|
||||
*/
|
||||
public static final String RESULT_TYPE_VALUE = "categorizer_stats";
|
||||
public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE);
|
||||
|
||||
/**
|
||||
* Field Names
|
||||
*/
|
||||
public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name");
|
||||
public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value");
|
||||
public static final ParseField CATEGORIZED_DOC_COUNT_FIELD = new ParseField("categorized_doc_count");
|
||||
public static final ParseField TOTAL_CATEGORY_COUNT_FIELD = new ParseField("total_category_count");
|
||||
public static final ParseField FREQUENT_CATEGORY_COUNT_FIELD = new ParseField("frequent_category_count");
|
||||
public static final ParseField RARE_CATEGORY_COUNT_FIELD = new ParseField("rare_category_count");
|
||||
public static final ParseField DEAD_CATEGORY_COUNT_FIELD = new ParseField("dead_category_count");
|
||||
public static final ParseField FAILED_CATEGORY_COUNT_FIELD = new ParseField("failed_category_count");
|
||||
public static final ParseField CATEGORIZATION_STATUS_FIELD = new ParseField("categorization_status");
|
||||
public static final ParseField LOG_TIME_FIELD = new ParseField("log_time");
|
||||
public static final ParseField TIMESTAMP_FIELD = new ParseField("timestamp");
|
||||
|
||||
public static final ConstructingObjectParser<Builder, Void> STRICT_PARSER = createParser(false);
|
||||
public static final ConstructingObjectParser<Builder, Void> LENIENT_PARSER = createParser(true);
|
||||
|
||||
private static ConstructingObjectParser<Builder, Void> createParser(boolean ignoreUnknownFields) {
|
||||
ConstructingObjectParser<Builder, Void> parser = new ConstructingObjectParser<>(RESULT_TYPE_FIELD.getPreferredName(),
|
||||
ignoreUnknownFields, a -> new Builder((String) a[0]));
|
||||
|
||||
parser.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||
parser.declareString((modelSizeStat, s) -> {}, Result.RESULT_TYPE);
|
||||
parser.declareString(Builder::setPartitionFieldName, PARTITION_FIELD_NAME);
|
||||
parser.declareString(Builder::setPartitionFieldValue, PARTITION_FIELD_VALUE);
|
||||
parser.declareLong(Builder::setCategorizedDocCount, CATEGORIZED_DOC_COUNT_FIELD);
|
||||
parser.declareLong(Builder::setTotalCategoryCount, TOTAL_CATEGORY_COUNT_FIELD);
|
||||
parser.declareLong(Builder::setFrequentCategoryCount, FREQUENT_CATEGORY_COUNT_FIELD);
|
||||
parser.declareLong(Builder::setRareCategoryCount, RARE_CATEGORY_COUNT_FIELD);
|
||||
parser.declareLong(Builder::setDeadCategoryCount, DEAD_CATEGORY_COUNT_FIELD);
|
||||
parser.declareLong(Builder::setFailedCategoryCount, FAILED_CATEGORY_COUNT_FIELD);
|
||||
parser.declareField(Builder::setCategorizationStatus,
|
||||
p -> CategorizationStatus.fromString(p.text()), CATEGORIZATION_STATUS_FIELD, ValueType.STRING);
|
||||
parser.declareField(Builder::setLogTime,
|
||||
p -> TimeUtils.parseTimeFieldToInstant(p, LOG_TIME_FIELD.getPreferredName()), LOG_TIME_FIELD, ValueType.VALUE);
|
||||
parser.declareField(Builder::setTimestamp,
|
||||
p -> TimeUtils.parseTimeFieldToInstant(p, TIMESTAMP_FIELD.getPreferredName()), TIMESTAMP_FIELD, ValueType.VALUE);
|
||||
|
||||
return parser;
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private final String partitionFieldName;
|
||||
private final String partitionFieldValue;
|
||||
private final long categorizedDocCount;
|
||||
private final long totalCategoryCount;
|
||||
private final long frequentCategoryCount;
|
||||
private final long rareCategoryCount;
|
||||
private final long deadCategoryCount;
|
||||
private final long failedCategoryCount;
|
||||
private final CategorizationStatus categorizationStatus;
|
||||
private final Instant timestamp;
|
||||
private final Instant logTime;
|
||||
|
||||
private CategorizerStats(String jobId, @Nullable String partitionFieldName, @Nullable String partitionFieldValue,
|
||||
long categorizedDocCount, long totalCategoryCount, long frequentCategoryCount, long rareCategoryCount,
|
||||
long deadCategoryCount, long failedCategoryCount, CategorizationStatus categorizationStatus, Instant timestamp,
|
||||
Instant logTime) {
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
this.partitionFieldName = partitionFieldName;
|
||||
this.partitionFieldValue = partitionFieldValue;
|
||||
this.categorizedDocCount = categorizedDocCount;
|
||||
this.totalCategoryCount = totalCategoryCount;
|
||||
this.frequentCategoryCount = frequentCategoryCount;
|
||||
this.rareCategoryCount = rareCategoryCount;
|
||||
this.deadCategoryCount = deadCategoryCount;
|
||||
this.failedCategoryCount = failedCategoryCount;
|
||||
this.categorizationStatus = Objects.requireNonNull(categorizationStatus);
|
||||
this.timestamp = Instant.ofEpochMilli(timestamp.toEpochMilli());
|
||||
this.logTime = Instant.ofEpochMilli(logTime.toEpochMilli());
|
||||
}
|
||||
|
||||
public CategorizerStats(StreamInput in) throws IOException {
|
||||
jobId = in.readString();
|
||||
partitionFieldName = in.readOptionalString();
|
||||
partitionFieldValue = in.readOptionalString();
|
||||
categorizedDocCount = in.readVLong();
|
||||
totalCategoryCount = in.readVLong();
|
||||
frequentCategoryCount = in.readVLong();
|
||||
rareCategoryCount = in.readVLong();
|
||||
deadCategoryCount = in.readVLong();
|
||||
failedCategoryCount = in.readVLong();
|
||||
categorizationStatus = CategorizationStatus.readFromStream(in);
|
||||
logTime = in.readInstant();
|
||||
timestamp = in.readInstant();
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return documentIdPrefix(jobId) + logTime.toEpochMilli();
|
||||
}
|
||||
|
||||
public static String documentIdPrefix(String jobId) {
|
||||
return jobId + "_categorizer_stats_";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(jobId);
|
||||
out.writeOptionalString(partitionFieldName);
|
||||
out.writeOptionalString(partitionFieldValue);
|
||||
out.writeVLong(categorizedDocCount);
|
||||
out.writeVLong(totalCategoryCount);
|
||||
out.writeVLong(frequentCategoryCount);
|
||||
out.writeVLong(rareCategoryCount);
|
||||
out.writeVLong(deadCategoryCount);
|
||||
out.writeVLong(failedCategoryCount);
|
||||
categorizationStatus.writeTo(out);
|
||||
out.writeInstant(logTime);
|
||||
out.writeInstant(timestamp);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
|
||||
if (partitionFieldName != null) {
|
||||
builder.field(PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName);
|
||||
builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue);
|
||||
if (ReservedFieldNames.isValidFieldName(partitionFieldName)) {
|
||||
builder.field(partitionFieldName, partitionFieldValue);
|
||||
}
|
||||
}
|
||||
builder.field(CATEGORIZED_DOC_COUNT_FIELD.getPreferredName(), categorizedDocCount);
|
||||
builder.field(TOTAL_CATEGORY_COUNT_FIELD.getPreferredName(), totalCategoryCount);
|
||||
builder.field(FREQUENT_CATEGORY_COUNT_FIELD.getPreferredName(), frequentCategoryCount);
|
||||
builder.field(RARE_CATEGORY_COUNT_FIELD.getPreferredName(), rareCategoryCount);
|
||||
builder.field(DEAD_CATEGORY_COUNT_FIELD.getPreferredName(), deadCategoryCount);
|
||||
builder.field(FAILED_CATEGORY_COUNT_FIELD.getPreferredName(), failedCategoryCount);
|
||||
builder.field(CATEGORIZATION_STATUS_FIELD.getPreferredName(), categorizationStatus);
|
||||
builder.timeField(LOG_TIME_FIELD.getPreferredName(), LOG_TIME_FIELD.getPreferredName() + "_string", logTime.toEpochMilli());
|
||||
builder.timeField(TIMESTAMP_FIELD.getPreferredName(), TIMESTAMP_FIELD.getPreferredName() + "_string", timestamp.toEpochMilli());
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public String getPartitionFieldName() {
|
||||
return partitionFieldName;
|
||||
}
|
||||
|
||||
public String getPartitionFieldValue() {
|
||||
return partitionFieldValue;
|
||||
}
|
||||
|
||||
public long getCategorizedDocCount() {
|
||||
return categorizedDocCount;
|
||||
}
|
||||
|
||||
public long getTotalCategoryCount() {
|
||||
return totalCategoryCount;
|
||||
}
|
||||
|
||||
public long getFrequentCategoryCount() {
|
||||
return frequentCategoryCount;
|
||||
}
|
||||
|
||||
public long getRareCategoryCount() {
|
||||
return rareCategoryCount;
|
||||
}
|
||||
|
||||
public long getDeadCategoryCount() {
|
||||
return deadCategoryCount;
|
||||
}
|
||||
|
||||
public long getFailedCategoryCount() {
|
||||
return deadCategoryCount;
|
||||
}
|
||||
|
||||
public CategorizationStatus getCategorizationStatus() {
|
||||
return categorizationStatus;
|
||||
}
|
||||
|
||||
/**
|
||||
* The model timestamp when these stats were created.
|
||||
* @return The model time
|
||||
*/
|
||||
public Instant getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* The wall clock time at the point when this instance was created.
|
||||
* @return The wall clock time
|
||||
*/
|
||||
public Instant getLogTime() {
|
||||
return logTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, partitionFieldName, partitionFieldValue, categorizedDocCount, totalCategoryCount,
|
||||
frequentCategoryCount, rareCategoryCount, deadCategoryCount, failedCategoryCount, categorizationStatus, timestamp, logTime);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare all the fields.
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other instanceof CategorizerStats == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
CategorizerStats that = (CategorizerStats) other;
|
||||
|
||||
return Objects.equals(this.jobId, that.jobId)
|
||||
&& Objects.equals(this.partitionFieldName, that.partitionFieldName)
|
||||
&& Objects.equals(this.partitionFieldValue, that.partitionFieldValue)
|
||||
&& Objects.equals(this.categorizedDocCount, that.categorizedDocCount)
|
||||
&& Objects.equals(this.totalCategoryCount, that.totalCategoryCount)
|
||||
&& Objects.equals(this.frequentCategoryCount, that.frequentCategoryCount)
|
||||
&& Objects.equals(this.rareCategoryCount, that.rareCategoryCount)
|
||||
&& Objects.equals(this.deadCategoryCount, that.deadCategoryCount)
|
||||
&& Objects.equals(this.failedCategoryCount, that.failedCategoryCount)
|
||||
&& Objects.equals(this.categorizationStatus, that.categorizationStatus)
|
||||
&& Objects.equals(this.timestamp, that.timestamp)
|
||||
&& Objects.equals(this.logTime, that.logTime);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private final String jobId;
|
||||
private String partitionFieldName;
|
||||
private String partitionFieldValue;
|
||||
private long categorizedDocCount;
|
||||
private long totalCategoryCount;
|
||||
private long frequentCategoryCount;
|
||||
private long rareCategoryCount;
|
||||
private long deadCategoryCount;
|
||||
private long failedCategoryCount;
|
||||
private CategorizationStatus categorizationStatus = CategorizationStatus.OK;
|
||||
private Instant timestamp = Instant.EPOCH;
|
||||
private Instant logTime = Instant.EPOCH;
|
||||
|
||||
public Builder(String jobId) {
|
||||
this.jobId = Objects.requireNonNull(jobId, "[" + Job.ID.getPreferredName() + "] must not be null");
|
||||
}
|
||||
|
||||
public Builder(CategorizerStats categorizerStats) {
|
||||
this.jobId = categorizerStats.jobId;
|
||||
this.partitionFieldName = categorizerStats.partitionFieldName;
|
||||
this.partitionFieldValue = categorizerStats.partitionFieldValue;
|
||||
this.categorizedDocCount = categorizerStats.categorizedDocCount;
|
||||
this.totalCategoryCount = categorizerStats.totalCategoryCount;
|
||||
this.frequentCategoryCount = categorizerStats.frequentCategoryCount;
|
||||
this.rareCategoryCount = categorizerStats.rareCategoryCount;
|
||||
this.deadCategoryCount = categorizerStats.deadCategoryCount;
|
||||
this.failedCategoryCount = categorizerStats.failedCategoryCount;
|
||||
this.categorizationStatus = categorizerStats.categorizationStatus;
|
||||
this.timestamp = categorizerStats.timestamp;
|
||||
this.logTime = categorizerStats.logTime;
|
||||
}
|
||||
|
||||
public Builder setPartitionFieldName(String partitionFieldName) {
|
||||
this.partitionFieldName = partitionFieldName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setPartitionFieldValue(String partitionFieldValue) {
|
||||
this.partitionFieldValue = partitionFieldValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setCategorizedDocCount(long categorizedDocCount) {
|
||||
this.categorizedDocCount = categorizedDocCount;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setTotalCategoryCount(long totalCategoryCount) {
|
||||
this.totalCategoryCount = totalCategoryCount;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setFrequentCategoryCount(long frequentCategoryCount) {
|
||||
this.frequentCategoryCount = frequentCategoryCount;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setRareCategoryCount(long rareCategoryCount) {
|
||||
this.rareCategoryCount = rareCategoryCount;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setDeadCategoryCount(long deadCategoryCount) {
|
||||
this.deadCategoryCount = deadCategoryCount;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setFailedCategoryCount(long failedCategoryCount) {
|
||||
this.failedCategoryCount = failedCategoryCount;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setCategorizationStatus(CategorizationStatus categorizationStatus) {
|
||||
this.categorizationStatus = Objects.requireNonNull(categorizationStatus,
|
||||
"[" + CATEGORIZATION_STATUS_FIELD.getPreferredName() + "] must not be null");;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setTimestamp(Instant timestamp) {
|
||||
this.timestamp = Objects.requireNonNull(timestamp, "[" + TIMESTAMP_FIELD.getPreferredName() + "] must not be null");
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setLogTime(Instant logTime) {
|
||||
this.logTime = Objects.requireNonNull(logTime, "[" + LOG_TIME_FIELD.getPreferredName() + "] must not be null");
|
||||
return this;
|
||||
}
|
||||
|
||||
public CategorizerStats build() {
|
||||
return new CategorizerStats(jobId, partitionFieldName, partitionFieldValue, categorizedDocCount, totalCategoryCount,
|
||||
frequentCategoryCount, rareCategoryCount, deadCategoryCount, failedCategoryCount, categorizationStatus, timestamp,
|
||||
logTime);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -116,32 +116,6 @@ public class ModelSizeStats implements ToXContentObject, Writeable {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The status of categorization for a job. OK is default, WARN
|
||||
* means that inappropriate numbers of categories are being found
|
||||
*/
|
||||
public enum CategorizationStatus implements Writeable {
|
||||
OK, WARN;
|
||||
|
||||
public static CategorizationStatus fromString(String statusName) {
|
||||
return valueOf(statusName.trim().toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
public static CategorizationStatus readFromStream(StreamInput in) throws IOException {
|
||||
return in.readEnum(CategorizationStatus.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeEnum(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private final long modelBytes;
|
||||
private final Long modelBytesExceeded;
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.core.ml.job.process.autodetect.state;
|
||||
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||
|
||||
public class CategorizerStatsTests extends AbstractSerializingTestCase<CategorizerStats> {
|
||||
|
||||
public void testDefaultConstructor() {
|
||||
CategorizerStats stats = new CategorizerStats.Builder("foo").build();
|
||||
assertNull(stats.getPartitionFieldName());
|
||||
assertNull(stats.getPartitionFieldValue());
|
||||
assertEquals(0, stats.getCategorizedDocCount());
|
||||
assertEquals(0, stats.getTotalCategoryCount());
|
||||
assertEquals(0, stats.getFrequentCategoryCount());
|
||||
assertEquals(0, stats.getRareCategoryCount());
|
||||
assertEquals(0, stats.getDeadCategoryCount());
|
||||
assertEquals(0, stats.getFailedCategoryCount());
|
||||
assertEquals(CategorizationStatus.OK, stats.getCategorizationStatus());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CategorizerStats createTestInstance() {
|
||||
return createRandomized("foo");
|
||||
}
|
||||
|
||||
public static CategorizerStats createRandomized(String jobId) {
|
||||
CategorizerStats.Builder stats = new CategorizerStats.Builder(jobId);
|
||||
if (randomBoolean()) {
|
||||
stats.setPartitionFieldName(randomAlphaOfLength(10));
|
||||
stats.setPartitionFieldValue(randomAlphaOfLength(20));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setCategorizedDocCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setTotalCategoryCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setFrequentCategoryCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setRareCategoryCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setDeadCategoryCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setFailedCategoryCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setCategorizationStatus(randomFrom(CategorizationStatus.values()));
|
||||
}
|
||||
return stats.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Writeable.Reader<CategorizerStats> instanceReader() {
|
||||
return CategorizerStats::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CategorizerStats doParseInstance(XContentParser parser) {
|
||||
// Lenient because the partitionFieldName/Value pair is added as a separate field
|
||||
return CategorizerStats.LENIENT_PARSER.apply(parser, null).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
// Because the partitionFieldName/Value pair is added as a separate field
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -10,7 +10,6 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats.CategorizationStatus;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats.MemoryStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
|
@ -43,6 +43,8 @@ import org.elasticsearch.xpack.core.ml.job.config.Detector;
|
|||
import org.elasticsearch.xpack.core.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.JobTests;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerStatsTests;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles;
|
||||
|
@ -189,6 +191,8 @@ public class AutodetectResultProcessorIT extends MlSingleNodeTestCase {
|
|||
resultsBuilder.addInfluencers(influencers);
|
||||
CategoryDefinition categoryDefinition = createCategoryDefinition();
|
||||
resultsBuilder.addCategoryDefinition(categoryDefinition);
|
||||
CategorizerStats categorizerStats = createCategorizerStats();
|
||||
resultsBuilder.addCategorizerStats(categorizerStats);
|
||||
ModelPlot modelPlot = createModelPlot();
|
||||
resultsBuilder.addModelPlot(modelPlot);
|
||||
Annotation annotation = createAnnotation();
|
||||
|
@ -224,6 +228,10 @@ public class AutodetectResultProcessorIT extends MlSingleNodeTestCase {
|
|||
assertEquals(1, persistedDefinition.count());
|
||||
assertEquals(categoryDefinition, persistedDefinition.results().get(0));
|
||||
|
||||
QueryPage<CategorizerStats> persistedCategorizerStats = jobResultsProvider.categorizerStats(JOB_ID, 0, 100);
|
||||
assertEquals(1, persistedCategorizerStats.count());
|
||||
assertEquals(categorizerStats, persistedCategorizerStats.results().get(0));
|
||||
|
||||
QueryPage<ModelPlot> persistedModelPlot = jobResultsProvider.modelPlot(JOB_ID, 0, 100);
|
||||
assertEquals(1, persistedModelPlot.count());
|
||||
assertEquals(modelPlot, persistedModelPlot.results().get(0));
|
||||
|
@ -474,7 +482,11 @@ public class AutodetectResultProcessorIT extends MlSingleNodeTestCase {
|
|||
}
|
||||
|
||||
private static CategoryDefinition createCategoryDefinition() {
|
||||
return new CategoryDefinitionTests().createTestInstance(JOB_ID);
|
||||
return CategoryDefinitionTests.createTestInstance(JOB_ID);
|
||||
}
|
||||
|
||||
private static CategorizerStats createCategorizerStats() {
|
||||
return CategorizerStatsTests.createRandomized(JOB_ID);
|
||||
}
|
||||
|
||||
private static ModelPlot createModelPlot() {
|
||||
|
@ -519,53 +531,59 @@ public class AutodetectResultProcessorIT extends MlSingleNodeTestCase {
|
|||
private final List<AutodetectResult> results = new ArrayList<>();
|
||||
|
||||
ResultsBuilder addBucket(Bucket bucket) {
|
||||
Objects.requireNonNull(bucket);
|
||||
results.add(
|
||||
new AutodetectResult(Objects.requireNonNull(bucket), null, null, null, null, null, null, null, null, null, null, null));
|
||||
new AutodetectResult(bucket, null, null, null, null, null, null, null, null, null, null, null, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
ResultsBuilder addRecords(List<AnomalyRecord> records) {
|
||||
results.add(new AutodetectResult(null, records, null, null, null, null, null, null, null, null, null, null));
|
||||
results.add(new AutodetectResult(null, records, null, null, null, null, null, null, null, null, null, null, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
ResultsBuilder addInfluencers(List<Influencer> influencers) {
|
||||
results.add(new AutodetectResult(null, null, influencers, null, null, null, null, null, null, null, null, null));
|
||||
results.add(new AutodetectResult(null, null, influencers, null, null, null, null, null, null, null, null, null, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
ResultsBuilder addCategoryDefinition(CategoryDefinition categoryDefinition) {
|
||||
results.add(new AutodetectResult(null, null, null, null, null, null, null, null, null, null, categoryDefinition, null));
|
||||
results.add(new AutodetectResult(null, null, null, null, null, null, null, null, null, null, categoryDefinition, null, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
ResultsBuilder addCategorizerStats(CategorizerStats categorizerStats) {
|
||||
results.add(new AutodetectResult(null, null, null, null, null, null, null, null, null, null, null, categorizerStats, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
ResultsBuilder addModelPlot(ModelPlot modelPlot) {
|
||||
results.add(new AutodetectResult(null, null, null, null, null, null, modelPlot, null, null, null, null, null));
|
||||
results.add(new AutodetectResult(null, null, null, null, null, null, modelPlot, null, null, null, null, null, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
ResultsBuilder addAnnotation(Annotation annotation) {
|
||||
results.add(new AutodetectResult(null, null, null, null, null, null, null, annotation, null, null, null, null));
|
||||
results.add(new AutodetectResult(null, null, null, null, null, null, null, annotation, null, null, null, null, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
ResultsBuilder addModelSizeStats(ModelSizeStats modelSizeStats) {
|
||||
results.add(new AutodetectResult(null, null, null, null, null, modelSizeStats, null, null, null, null, null, null));
|
||||
results.add(new AutodetectResult(null, null, null, null, null, modelSizeStats, null, null, null, null, null, null, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
ResultsBuilder addModelSnapshot(ModelSnapshot modelSnapshot) {
|
||||
results.add(new AutodetectResult(null, null, null, null, modelSnapshot, null, null, null, null, null, null, null));
|
||||
results.add(new AutodetectResult(null, null, null, null, modelSnapshot, null, null, null, null, null, null, null, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
ResultsBuilder addQuantiles(Quantiles quantiles) {
|
||||
results.add(new AutodetectResult(null, null, null, quantiles, null, null, null, null, null, null, null, null));
|
||||
results.add(new AutodetectResult(null, null, null, quantiles, null, null, null, null, null, null, null, null, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
ResultsBuilder addFlushAcknowledgement(FlushAcknowledgement flushAcknowledgement) {
|
||||
results.add(new AutodetectResult(null, null, null, null, null, null, null, null, null, null, null, flushAcknowledgement));
|
||||
results.add(new AutodetectResult(null, null, null, null, null, null, null, null, null, null, null, null, flushAcknowledgement));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -574,7 +592,6 @@ public class AutodetectResultProcessorIT extends MlSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
private <T extends ToXContent & Writeable> void assertResultsAreSame(List<T> expected, QueryPage<T> actual) {
|
||||
assertEquals(expected.size(), actual.count());
|
||||
assertEquals(actual.results().size(), actual.count());
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.xpack.core.ml.annotations.AnnotationIndex;
|
|||
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
|
||||
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles;
|
||||
|
@ -197,6 +198,13 @@ public class JobResultsPersister {
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder persistCategorizerStats(CategorizerStats categorizerStats) {
|
||||
logger.trace("[{}] ES BULK ACTION: index categorizer stats to index [{}] with ID [{}]",
|
||||
jobId, indexName, categorizerStats.getId());
|
||||
indexResult(categorizerStats.getId(), categorizerStats, "categorizer stats");
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder persistForecast(Forecast forecast) {
|
||||
logger.trace("[{}] ES BULK ACTION: index forecast to index [{}] with ID [{}]", jobId, indexName, forecast.getId());
|
||||
indexResult(forecast.getId(), forecast, Forecast.RESULT_TYPE_VALUE);
|
||||
|
|
|
@ -96,6 +96,7 @@ import org.elasticsearch.xpack.core.ml.job.config.MlFilter;
|
|||
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
|
||||
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
|
@ -1121,6 +1122,37 @@ public class JobResultsProvider {
|
|||
return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD);
|
||||
}
|
||||
|
||||
public QueryPage<CategorizerStats> categorizerStats(String jobId, int from, int size) {
|
||||
SearchResponse searchResponse;
|
||||
String indexName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId);
|
||||
LOGGER.trace("ES API CALL: search categorizer stats from index {} from {} size {}", indexName, from, size);
|
||||
|
||||
try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashWithOrigin(ML_ORIGIN)) {
|
||||
searchResponse = client.prepareSearch(indexName)
|
||||
.setIndicesOptions(MlIndicesUtils.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS))
|
||||
.setQuery(new TermsQueryBuilder(Result.RESULT_TYPE.getPreferredName(), CategorizerStats.RESULT_TYPE_VALUE))
|
||||
.setFrom(from).setSize(size)
|
||||
.setTrackTotalHits(true)
|
||||
.get();
|
||||
}
|
||||
|
||||
List<CategorizerStats> results = new ArrayList<>();
|
||||
|
||||
for (SearchHit hit : searchResponse.getHits().getHits()) {
|
||||
BytesReference source = hit.getSourceRef();
|
||||
try (InputStream stream = source.streamInput();
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON)
|
||||
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
|
||||
CategorizerStats categorizerStats = CategorizerStats.LENIENT_PARSER.apply(parser, null).build();
|
||||
results.add(categorizerStats);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("failed to parse categorizerStats", e);
|
||||
}
|
||||
}
|
||||
|
||||
return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the job's model size stats.
|
||||
*/
|
||||
|
|
|
@ -20,7 +20,6 @@ import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapsho
|
|||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles;
|
||||
import org.elasticsearch.xpack.ml.job.results.AutodetectResult;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
|
@ -72,17 +71,17 @@ public class BlackHoleAutodetectProcess implements AutodetectProcess {
|
|||
if (Arrays.asList(record).contains(MAGIC_FAILURE_VALUE)) {
|
||||
open = false;
|
||||
onProcessCrash.accept("simulated failure");
|
||||
AutodetectResult result = new AutodetectResult(null, null, null, null, null, null, null, null, null, null, null, null);
|
||||
AutodetectResult result = new AutodetectResult(null, null, null, null, null, null, null, null, null, null, null, null, null);
|
||||
results.add(result);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeResetBucketsControlMessage(DataLoadParams params) throws IOException {
|
||||
public void writeResetBucketsControlMessage(DataLoadParams params) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeUpdateModelPlotMessage(ModelPlotConfig modelPlotConfig) throws IOException {
|
||||
public void writeUpdateModelPlotMessage(ModelPlotConfig modelPlotConfig) {
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -90,15 +89,15 @@ public class BlackHoleAutodetectProcess implements AutodetectProcess {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void writeUpdateDetectorRulesMessage(int detectorIndex, List<DetectionRule> rules) throws IOException {
|
||||
public void writeUpdateDetectorRulesMessage(int detectorIndex, List<DetectionRule> rules) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeUpdateFiltersMessage(List<MlFilter> filters) throws IOException {
|
||||
public void writeUpdateFiltersMessage(List<MlFilter> filters) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeUpdateScheduledEventsMessage(List<ScheduledEvent> events, TimeValue bucketSpan) throws IOException {
|
||||
public void writeUpdateScheduledEventsMessage(List<ScheduledEvent> events, TimeValue bucketSpan) {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -107,10 +106,10 @@ public class BlackHoleAutodetectProcess implements AutodetectProcess {
|
|||
* @return {@link #FLUSH_ID}
|
||||
*/
|
||||
@Override
|
||||
public String flushJob(FlushJobParams params) throws IOException {
|
||||
public String flushJob(FlushJobParams params) {
|
||||
FlushAcknowledgement flushAcknowledgement = new FlushAcknowledgement(FLUSH_ID, null);
|
||||
AutodetectResult result =
|
||||
new AutodetectResult(null, null, null, null, null, null, null, null, null, null, null,flushAcknowledgement);
|
||||
new AutodetectResult(null, null, null, null, null, null, null, null, null, null, null, null, flushAcknowledgement);
|
||||
results.add(result);
|
||||
return FLUSH_ID;
|
||||
}
|
||||
|
@ -124,17 +123,18 @@ public class BlackHoleAutodetectProcess implements AutodetectProcess {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
public void close() {
|
||||
if (open) {
|
||||
Quantiles quantiles = new Quantiles(jobId, new Date(), "black hole quantiles");
|
||||
AutodetectResult result = new AutodetectResult(null, null, null, quantiles, null, null, null, null, null, null, null, null);
|
||||
AutodetectResult result =
|
||||
new AutodetectResult(null, null, null, quantiles, null, null, null, null, null, null, null, null, null);
|
||||
results.add(result);
|
||||
open = false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void kill() throws IOException {
|
||||
public void kill() {
|
||||
open = false;
|
||||
}
|
||||
|
||||
|
@ -199,6 +199,6 @@ public class BlackHoleAutodetectProcess implements AutodetectProcess {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void forecastJob(ForecastParams params) throws IOException {
|
||||
public void forecastJob(ForecastParams params) {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,8 @@ import org.elasticsearch.xpack.core.ml.MachineLearningField;
|
|||
import org.elasticsearch.xpack.core.ml.action.PutJobAction;
|
||||
import org.elasticsearch.xpack.core.ml.action.UpdateJobAction;
|
||||
import org.elasticsearch.xpack.core.ml.annotations.Annotation;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizationStatus;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerStats;
|
||||
import org.elasticsearch.xpack.ml.annotations.AnnotationPersister;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.JobUpdate;
|
||||
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
|
||||
|
@ -269,6 +271,10 @@ public class AutodetectResultProcessor {
|
|||
if (categoryDefinition != null) {
|
||||
persister.persistCategoryDefinition(categoryDefinition, this::isAlive);
|
||||
}
|
||||
CategorizerStats categorizerStats = result.getCategorizerStats();
|
||||
if (categorizerStats != null) {
|
||||
bulkResultsPersister.persistCategorizerStats(categorizerStats);
|
||||
}
|
||||
ModelPlot modelPlot = result.getModelPlot();
|
||||
if (modelPlot != null) {
|
||||
bulkResultsPersister.persistModelPlot(modelPlot);
|
||||
|
@ -414,9 +420,9 @@ public class AutodetectResultProcessor {
|
|||
}
|
||||
|
||||
private void notifyCategorizationStatusChange(ModelSizeStats modelSizeStats) {
|
||||
ModelSizeStats.CategorizationStatus categorizationStatus = modelSizeStats.getCategorizationStatus();
|
||||
CategorizationStatus categorizationStatus = modelSizeStats.getCategorizationStatus();
|
||||
if (categorizationStatus != latestModelSizeStats.getCategorizationStatus()) {
|
||||
if (categorizationStatus == ModelSizeStats.CategorizationStatus.WARN) {
|
||||
if (categorizationStatus == CategorizationStatus.WARN) {
|
||||
auditor.warning(jobId, Messages.getMessage(Messages.JOB_AUDIT_CATEGORIZATION_STATUS_WARN, categorizationStatus,
|
||||
priorRunsBucketCount + currentRunBucketCount));
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.core.ml.annotations.Annotation;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles;
|
||||
|
@ -40,7 +41,8 @@ public class AutodetectResult implements ToXContentObject, Writeable {
|
|||
TYPE.getPreferredName(), a -> new AutodetectResult((Bucket) a[0], (List<AnomalyRecord>) a[1], (List<Influencer>) a[2],
|
||||
(Quantiles) a[3], a[4] == null ? null : ((ModelSnapshot.Builder) a[4]).build(),
|
||||
a[5] == null ? null : ((ModelSizeStats.Builder) a[5]).build(), (ModelPlot) a[6], (Annotation) a[7],
|
||||
(Forecast) a[8], (ForecastRequestStats) a[9], (CategoryDefinition) a[10], (FlushAcknowledgement) a[11]));
|
||||
(Forecast) a[8], (ForecastRequestStats) a[9], (CategoryDefinition) a[10],
|
||||
a[11] == null ? null : ((CategorizerStats.Builder) a[11]).build(), (FlushAcknowledgement) a[12]));
|
||||
|
||||
static {
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), Bucket.STRICT_PARSER, Bucket.RESULT_TYPE_FIELD);
|
||||
|
@ -57,6 +59,8 @@ public class AutodetectResult implements ToXContentObject, Writeable {
|
|||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ForecastRequestStats.STRICT_PARSER,
|
||||
ForecastRequestStats.RESULTS_FIELD);
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), CategoryDefinition.STRICT_PARSER, CategoryDefinition.TYPE);
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), CategorizerStats.STRICT_PARSER,
|
||||
CategorizerStats.RESULT_TYPE_FIELD);
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), FlushAcknowledgement.PARSER, FlushAcknowledgement.TYPE);
|
||||
}
|
||||
|
||||
|
@ -71,12 +75,13 @@ public class AutodetectResult implements ToXContentObject, Writeable {
|
|||
private final Forecast forecast;
|
||||
private final ForecastRequestStats forecastRequestStats;
|
||||
private final CategoryDefinition categoryDefinition;
|
||||
private final CategorizerStats categorizerStats;
|
||||
private final FlushAcknowledgement flushAcknowledgement;
|
||||
|
||||
public AutodetectResult(Bucket bucket, List<AnomalyRecord> records, List<Influencer> influencers, Quantiles quantiles,
|
||||
ModelSnapshot modelSnapshot, ModelSizeStats modelSizeStats, ModelPlot modelPlot, Annotation annotation,
|
||||
Forecast forecast, ForecastRequestStats forecastRequestStats, CategoryDefinition categoryDefinition,
|
||||
FlushAcknowledgement flushAcknowledgement) {
|
||||
ModelSnapshot modelSnapshot, ModelSizeStats modelSizeStats, ModelPlot modelPlot, Annotation annotation,
|
||||
Forecast forecast, ForecastRequestStats forecastRequestStats, CategoryDefinition categoryDefinition,
|
||||
CategorizerStats categorizerStats, FlushAcknowledgement flushAcknowledgement) {
|
||||
this.bucket = bucket;
|
||||
this.records = records;
|
||||
this.influencers = influencers;
|
||||
|
@ -88,6 +93,7 @@ public class AutodetectResult implements ToXContentObject, Writeable {
|
|||
this.forecast = forecast;
|
||||
this.forecastRequestStats = forecastRequestStats;
|
||||
this.categoryDefinition = categoryDefinition;
|
||||
this.categorizerStats = categorizerStats;
|
||||
this.flushAcknowledgement = flushAcknowledgement;
|
||||
}
|
||||
|
||||
|
@ -141,25 +147,24 @@ public class AutodetectResult implements ToXContentObject, Writeable {
|
|||
} else {
|
||||
this.categoryDefinition = null;
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_7_9_0)) {
|
||||
this.categorizerStats = in.readOptionalWriteable(CategorizerStats::new);
|
||||
} else {
|
||||
this.categorizerStats = null;
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
this.flushAcknowledgement = new FlushAcknowledgement(in);
|
||||
} else {
|
||||
this.flushAcknowledgement = null;
|
||||
}
|
||||
|
||||
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
if (in.readBoolean()) {
|
||||
this.forecast = new Forecast(in);
|
||||
} else {
|
||||
this.forecast = null;
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
this.forecastRequestStats = new ForecastRequestStats(in);
|
||||
} else {
|
||||
this.forecastRequestStats = null;
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
this.forecast = new Forecast(in);
|
||||
} else {
|
||||
this.forecast = null;
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
this.forecastRequestStats = new ForecastRequestStats(in);
|
||||
} else {
|
||||
this.forecastRequestStats = null;
|
||||
}
|
||||
}
|
||||
|
@ -177,12 +182,12 @@ public class AutodetectResult implements ToXContentObject, Writeable {
|
|||
writeNullable(annotation, out);
|
||||
}
|
||||
writeNullable(categoryDefinition, out);
|
||||
writeNullable(flushAcknowledgement, out);
|
||||
|
||||
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
writeNullable(forecast, out);
|
||||
writeNullable(forecastRequestStats, out);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_9_0)) {
|
||||
out.writeOptionalWriteable(categorizerStats);
|
||||
}
|
||||
writeNullable(flushAcknowledgement, out);
|
||||
writeNullable(forecast, out);
|
||||
writeNullable(forecastRequestStats, out);
|
||||
}
|
||||
|
||||
private static void writeNullable(Writeable writeable, StreamOutput out) throws IOException {
|
||||
|
@ -215,6 +220,7 @@ public class AutodetectResult implements ToXContentObject, Writeable {
|
|||
addNullableField(Forecast.RESULTS_FIELD, forecast, builder);
|
||||
addNullableField(ForecastRequestStats.RESULTS_FIELD, forecastRequestStats, builder);
|
||||
addNullableField(CategoryDefinition.TYPE, categoryDefinition, builder);
|
||||
addNullableField(CategorizerStats.RESULT_TYPE_FIELD, categorizerStats, builder);
|
||||
addNullableField(FlushAcknowledgement.TYPE, flushAcknowledgement, builder);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
|
@ -276,14 +282,18 @@ public class AutodetectResult implements ToXContentObject, Writeable {
|
|||
return categoryDefinition;
|
||||
}
|
||||
|
||||
public CategorizerStats getCategorizerStats() {
|
||||
return categorizerStats;
|
||||
}
|
||||
|
||||
public FlushAcknowledgement getFlushAcknowledgement() {
|
||||
return flushAcknowledgement;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(bucket, records, influencers, categoryDefinition, flushAcknowledgement, modelPlot, annotation, forecast,
|
||||
forecastRequestStats, modelSizeStats, modelSnapshot, quantiles);
|
||||
return Objects.hash(bucket, records, influencers, categoryDefinition, categorizerStats, flushAcknowledgement, modelPlot, annotation,
|
||||
forecast, forecastRequestStats, modelSizeStats, modelSnapshot, quantiles);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -299,6 +309,7 @@ public class AutodetectResult implements ToXContentObject, Writeable {
|
|||
Objects.equals(records, other.records) &&
|
||||
Objects.equals(influencers, other.influencers) &&
|
||||
Objects.equals(categoryDefinition, other.categoryDefinition) &&
|
||||
Objects.equals(categorizerStats, other.categorizerStats) &&
|
||||
Objects.equals(flushAcknowledgement, other.flushAcknowledgement) &&
|
||||
Objects.equals(modelPlot, other.modelPlot) &&
|
||||
Objects.equals(annotation, other.annotation) &&
|
||||
|
@ -308,5 +319,4 @@ public class AutodetectResult implements ToXContentObject, Writeable {
|
|||
Objects.equals(modelSnapshot, other.modelSnapshot) &&
|
||||
Objects.equals(quantiles, other.quantiles);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.xpack.core.ml.annotations.AnnotationTests;
|
|||
import org.elasticsearch.xpack.core.ml.job.config.JobUpdate;
|
||||
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizationStatus;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles;
|
||||
|
@ -348,17 +349,17 @@ public class AutodetectResultProcessorTests extends ESTestCase {
|
|||
|
||||
// First one with ok
|
||||
ModelSizeStats modelSizeStats =
|
||||
new ModelSizeStats.Builder(JOB_ID).setCategorizationStatus(ModelSizeStats.CategorizationStatus.OK).build();
|
||||
new ModelSizeStats.Builder(JOB_ID).setCategorizationStatus(CategorizationStatus.OK).build();
|
||||
when(result.getModelSizeStats()).thenReturn(modelSizeStats);
|
||||
processorUnderTest.processResult(result);
|
||||
|
||||
// Now one with warn
|
||||
modelSizeStats = new ModelSizeStats.Builder(JOB_ID).setCategorizationStatus(ModelSizeStats.CategorizationStatus.WARN).build();
|
||||
modelSizeStats = new ModelSizeStats.Builder(JOB_ID).setCategorizationStatus(CategorizationStatus.WARN).build();
|
||||
when(result.getModelSizeStats()).thenReturn(modelSizeStats);
|
||||
processorUnderTest.processResult(result);
|
||||
|
||||
// Another with warn
|
||||
modelSizeStats = new ModelSizeStats.Builder(JOB_ID).setCategorizationStatus(ModelSizeStats.CategorizationStatus.WARN).build();
|
||||
modelSizeStats = new ModelSizeStats.Builder(JOB_ID).setCategorizationStatus(CategorizationStatus.WARN).build();
|
||||
when(result.getModelSizeStats()).thenReturn(modelSizeStats);
|
||||
processorUnderTest.processResult(result);
|
||||
|
||||
|
@ -374,7 +375,7 @@ public class AutodetectResultProcessorTests extends ESTestCase {
|
|||
|
||||
// First one with warn - this works because a default constructed ModelSizeStats has CategorizationStatus.OK
|
||||
ModelSizeStats modelSizeStats =
|
||||
new ModelSizeStats.Builder(JOB_ID).setCategorizationStatus(ModelSizeStats.CategorizationStatus.WARN).build();
|
||||
new ModelSizeStats.Builder(JOB_ID).setCategorizationStatus(CategorizationStatus.WARN).build();
|
||||
when(result.getModelSizeStats()).thenReturn(modelSizeStats);
|
||||
processorUnderTest.processResult(result);
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ import org.elasticsearch.test.AbstractSerializingTestCase;
|
|||
import org.elasticsearch.xpack.core.ml.annotations.Annotation;
|
||||
import org.elasticsearch.xpack.core.ml.annotations.AnnotationTests;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshotTests;
|
||||
|
@ -47,6 +48,7 @@ public class AutodetectResultTests extends AbstractSerializingTestCase<Autodetec
|
|||
Forecast forecast;
|
||||
ForecastRequestStats forecastRequestStats;
|
||||
CategoryDefinition categoryDefinition;
|
||||
CategorizerStats.Builder categorizerStats;
|
||||
FlushAcknowledgement flushAcknowledgement;
|
||||
String jobId = "foo";
|
||||
if (randomBoolean()) {
|
||||
|
@ -85,8 +87,7 @@ public class AutodetectResultTests extends AbstractSerializingTestCase<Autodetec
|
|||
modelSnapshot = null;
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
modelSizeStats = new ModelSizeStats.Builder(jobId);
|
||||
modelSizeStats.setModelBytes(randomNonNegativeLong());
|
||||
modelSizeStats = new ModelSizeStats.Builder(jobId).setModelBytes(randomNonNegativeLong());
|
||||
} else {
|
||||
modelSizeStats = null;
|
||||
}
|
||||
|
@ -118,19 +119,22 @@ public class AutodetectResultTests extends AbstractSerializingTestCase<Autodetec
|
|||
categoryDefinition = null;
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
flushAcknowledgement = new FlushAcknowledgement(randomAlphaOfLengthBetween(1, 20),
|
||||
randomDate());
|
||||
categorizerStats = new CategorizerStats.Builder(jobId).setCategorizedDocCount(randomNonNegativeLong());
|
||||
} else {
|
||||
categorizerStats = null;
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
flushAcknowledgement = new FlushAcknowledgement(randomAlphaOfLengthBetween(1, 20), randomDate());
|
||||
} else {
|
||||
flushAcknowledgement = null;
|
||||
}
|
||||
return new AutodetectResult(bucket, records, influencers, quantiles, modelSnapshot,
|
||||
modelSizeStats == null ? null : modelSizeStats.build(), modelPlot, annotation, forecast, forecastRequestStats,
|
||||
categoryDefinition, flushAcknowledgement);
|
||||
categoryDefinition, categorizerStats == null ? null : categorizerStats.build(), flushAcknowledgement);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Reader<AutodetectResult> instanceReader() {
|
||||
return AutodetectResult::new;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ import static org.hamcrest.Matchers.containsString;
|
|||
|
||||
public class CategoryDefinitionTests extends AbstractBWCSerializationTestCase<CategoryDefinition> {
|
||||
|
||||
public CategoryDefinition createTestInstance(String jobId) {
|
||||
public static CategoryDefinition createTestInstance(String jobId) {
|
||||
CategoryDefinition categoryDefinition = new CategoryDefinition(jobId);
|
||||
categoryDefinition.setCategoryId(randomLong());
|
||||
if (randomBoolean()) {
|
||||
|
|
|
@ -15,6 +15,7 @@ import org.elasticsearch.common.xcontent.XContentHelper;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction.Response.JobStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.JobState;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizationStatus;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.TimingStats;
|
||||
|
@ -104,7 +105,7 @@ public class JobStatsMonitoringDocTests extends BaseMonitoringDocTestCase<JobSta
|
|||
.setRareCategoryCount(2)
|
||||
.setDeadCategoryCount(1)
|
||||
.setFailedCategoryCount(3)
|
||||
.setCategorizationStatus(ModelSizeStats.CategorizationStatus.WARN)
|
||||
.setCategorizationStatus(CategorizationStatus.WARN)
|
||||
.setTimestamp(date1)
|
||||
.setLogTime(date2)
|
||||
.build();
|
||||
|
|
Loading…
Reference in New Issue