[ML] Add ML result classes to protocol library (#32587)
This commit adds the ML results classes to the X-Pack protocol library used by the high level REST client. (Other commits will add the config classes and stats classes.) These classes: - Are publically immutable - Are privately mutable - this is perhaps not as nice as the config classes, but to do otherwise would require adding builders and the corresponding server-side classes that the old transport client used don't have builders - Have little/no validation of field values beyond null checks - Are convertible to and from X-Content, but NOT wire transportable - Have lenient parsers to maximize compatibility across versions - Have the same class names and getter names as the corresponding classes in X-Pack core to ease migration for transport client users - Don't reproduce all the methods that do calculations or transformations that the the corresponding classes in X-Pack core have
This commit is contained in:
parent
ada80d7fc8
commit
b99aa81fe4
|
@ -0,0 +1,292 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Anomaly Cause POJO.
|
||||
* Used as a nested level inside population anomaly records.
|
||||
*/
|
||||
public class AnomalyCause implements ToXContentObject {
|
||||
|
||||
public static final ParseField ANOMALY_CAUSE = new ParseField("anomaly_cause");
|
||||
|
||||
/**
|
||||
* Result fields
|
||||
*/
|
||||
public static final ParseField PROBABILITY = new ParseField("probability");
|
||||
public static final ParseField OVER_FIELD_NAME = new ParseField("over_field_name");
|
||||
public static final ParseField OVER_FIELD_VALUE = new ParseField("over_field_value");
|
||||
public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name");
|
||||
public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value");
|
||||
public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlated_by_field_value");
|
||||
public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name");
|
||||
public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value");
|
||||
public static final ParseField FUNCTION = new ParseField("function");
|
||||
public static final ParseField FUNCTION_DESCRIPTION = new ParseField("function_description");
|
||||
public static final ParseField TYPICAL = new ParseField("typical");
|
||||
public static final ParseField ACTUAL = new ParseField("actual");
|
||||
public static final ParseField INFLUENCERS = new ParseField("influencers");
|
||||
|
||||
/**
|
||||
* Metric Results
|
||||
*/
|
||||
public static final ParseField FIELD_NAME = new ParseField("field_name");
|
||||
|
||||
public static final ObjectParser<AnomalyCause, Void> PARSER =
|
||||
new ObjectParser<>(ANOMALY_CAUSE.getPreferredName(), true, AnomalyCause::new);
|
||||
|
||||
static {
|
||||
PARSER.declareDouble(AnomalyCause::setProbability, PROBABILITY);
|
||||
PARSER.declareString(AnomalyCause::setByFieldName, BY_FIELD_NAME);
|
||||
PARSER.declareString(AnomalyCause::setByFieldValue, BY_FIELD_VALUE);
|
||||
PARSER.declareString(AnomalyCause::setCorrelatedByFieldValue, CORRELATED_BY_FIELD_VALUE);
|
||||
PARSER.declareString(AnomalyCause::setPartitionFieldName, PARTITION_FIELD_NAME);
|
||||
PARSER.declareString(AnomalyCause::setPartitionFieldValue, PARTITION_FIELD_VALUE);
|
||||
PARSER.declareString(AnomalyCause::setFunction, FUNCTION);
|
||||
PARSER.declareString(AnomalyCause::setFunctionDescription, FUNCTION_DESCRIPTION);
|
||||
PARSER.declareDoubleArray(AnomalyCause::setTypical, TYPICAL);
|
||||
PARSER.declareDoubleArray(AnomalyCause::setActual, ACTUAL);
|
||||
PARSER.declareString(AnomalyCause::setFieldName, FIELD_NAME);
|
||||
PARSER.declareString(AnomalyCause::setOverFieldName, OVER_FIELD_NAME);
|
||||
PARSER.declareString(AnomalyCause::setOverFieldValue, OVER_FIELD_VALUE);
|
||||
PARSER.declareObjectArray(AnomalyCause::setInfluencers, Influence.PARSER, INFLUENCERS);
|
||||
}
|
||||
|
||||
private double probability;
|
||||
private String byFieldName;
|
||||
private String byFieldValue;
|
||||
private String correlatedByFieldValue;
|
||||
private String partitionFieldName;
|
||||
private String partitionFieldValue;
|
||||
private String function;
|
||||
private String functionDescription;
|
||||
private List<Double> typical;
|
||||
private List<Double> actual;
|
||||
private String fieldName;
|
||||
private String overFieldName;
|
||||
private String overFieldValue;
|
||||
|
||||
private List<Influence> influencers;
|
||||
|
||||
AnomalyCause() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(PROBABILITY.getPreferredName(), probability);
|
||||
if (byFieldName != null) {
|
||||
builder.field(BY_FIELD_NAME.getPreferredName(), byFieldName);
|
||||
}
|
||||
if (byFieldValue != null) {
|
||||
builder.field(BY_FIELD_VALUE.getPreferredName(), byFieldValue);
|
||||
}
|
||||
if (correlatedByFieldValue != null) {
|
||||
builder.field(CORRELATED_BY_FIELD_VALUE.getPreferredName(), correlatedByFieldValue);
|
||||
}
|
||||
if (partitionFieldName != null) {
|
||||
builder.field(PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName);
|
||||
}
|
||||
if (partitionFieldValue != null) {
|
||||
builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue);
|
||||
}
|
||||
if (function != null) {
|
||||
builder.field(FUNCTION.getPreferredName(), function);
|
||||
}
|
||||
if (functionDescription != null) {
|
||||
builder.field(FUNCTION_DESCRIPTION.getPreferredName(), functionDescription);
|
||||
}
|
||||
if (typical != null) {
|
||||
builder.field(TYPICAL.getPreferredName(), typical);
|
||||
}
|
||||
if (actual != null) {
|
||||
builder.field(ACTUAL.getPreferredName(), actual);
|
||||
}
|
||||
if (fieldName != null) {
|
||||
builder.field(FIELD_NAME.getPreferredName(), fieldName);
|
||||
}
|
||||
if (overFieldName != null) {
|
||||
builder.field(OVER_FIELD_NAME.getPreferredName(), overFieldName);
|
||||
}
|
||||
if (overFieldValue != null) {
|
||||
builder.field(OVER_FIELD_VALUE.getPreferredName(), overFieldValue);
|
||||
}
|
||||
if (influencers != null) {
|
||||
builder.field(INFLUENCERS.getPreferredName(), influencers);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public double getProbability() {
|
||||
return probability;
|
||||
}
|
||||
|
||||
void setProbability(double value) {
|
||||
probability = value;
|
||||
}
|
||||
|
||||
public String getByFieldName() {
|
||||
return byFieldName;
|
||||
}
|
||||
|
||||
void setByFieldName(String value) {
|
||||
byFieldName = value;
|
||||
}
|
||||
|
||||
public String getByFieldValue() {
|
||||
return byFieldValue;
|
||||
}
|
||||
|
||||
void setByFieldValue(String value) {
|
||||
byFieldValue = value;
|
||||
}
|
||||
|
||||
public String getCorrelatedByFieldValue() {
|
||||
return correlatedByFieldValue;
|
||||
}
|
||||
|
||||
void setCorrelatedByFieldValue(String value) {
|
||||
correlatedByFieldValue = value;
|
||||
}
|
||||
|
||||
public String getPartitionFieldName() {
|
||||
return partitionFieldName;
|
||||
}
|
||||
|
||||
void setPartitionFieldName(String field) {
|
||||
partitionFieldName = field;
|
||||
}
|
||||
|
||||
public String getPartitionFieldValue() {
|
||||
return partitionFieldValue;
|
||||
}
|
||||
|
||||
void setPartitionFieldValue(String value) {
|
||||
partitionFieldValue = value;
|
||||
}
|
||||
|
||||
public String getFunction() {
|
||||
return function;
|
||||
}
|
||||
|
||||
void setFunction(String name) {
|
||||
function = name;
|
||||
}
|
||||
|
||||
public String getFunctionDescription() {
|
||||
return functionDescription;
|
||||
}
|
||||
|
||||
void setFunctionDescription(String functionDescription) {
|
||||
this.functionDescription = functionDescription;
|
||||
}
|
||||
|
||||
public List<Double> getTypical() {
|
||||
return typical;
|
||||
}
|
||||
|
||||
void setTypical(List<Double> typical) {
|
||||
this.typical = Collections.unmodifiableList(typical);
|
||||
}
|
||||
|
||||
public List<Double> getActual() {
|
||||
return actual;
|
||||
}
|
||||
|
||||
void setActual(List<Double> actual) {
|
||||
this.actual = Collections.unmodifiableList(actual);
|
||||
}
|
||||
|
||||
public String getFieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
void setFieldName(String field) {
|
||||
fieldName = field;
|
||||
}
|
||||
|
||||
public String getOverFieldName() {
|
||||
return overFieldName;
|
||||
}
|
||||
|
||||
void setOverFieldName(String name) {
|
||||
overFieldName = name;
|
||||
}
|
||||
|
||||
public String getOverFieldValue() {
|
||||
return overFieldValue;
|
||||
}
|
||||
|
||||
void setOverFieldValue(String value) {
|
||||
overFieldValue = value;
|
||||
}
|
||||
|
||||
public List<Influence> getInfluencers() {
|
||||
return influencers;
|
||||
}
|
||||
|
||||
void setInfluencers(List<Influence> influencers) {
|
||||
this.influencers = Collections.unmodifiableList(influencers);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(probability, actual, typical, byFieldName, byFieldValue, correlatedByFieldValue, fieldName, function,
|
||||
functionDescription, overFieldName, overFieldValue, partitionFieldName, partitionFieldValue, influencers);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AnomalyCause that = (AnomalyCause)other;
|
||||
|
||||
return this.probability == that.probability &&
|
||||
Objects.equals(this.typical, that.typical) &&
|
||||
Objects.equals(this.actual, that.actual) &&
|
||||
Objects.equals(this.function, that.function) &&
|
||||
Objects.equals(this.functionDescription, that.functionDescription) &&
|
||||
Objects.equals(this.fieldName, that.fieldName) &&
|
||||
Objects.equals(this.byFieldName, that.byFieldName) &&
|
||||
Objects.equals(this.byFieldValue, that.byFieldValue) &&
|
||||
Objects.equals(this.correlatedByFieldValue, that.correlatedByFieldValue) &&
|
||||
Objects.equals(this.partitionFieldName, that.partitionFieldName) &&
|
||||
Objects.equals(this.partitionFieldValue, that.partitionFieldValue) &&
|
||||
Objects.equals(this.overFieldName, that.overFieldName) &&
|
||||
Objects.equals(this.overFieldValue, that.overFieldValue) &&
|
||||
Objects.equals(this.influencers, that.influencers);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,428 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Anomaly Record POJO.
|
||||
* Uses the object wrappers Boolean and Double so <code>null</code> values
|
||||
* can be returned if the members have not been set.
|
||||
*/
|
||||
public class AnomalyRecord implements ToXContentObject {
|
||||
|
||||
/**
|
||||
* Result type
|
||||
*/
|
||||
public static final String RESULT_TYPE_VALUE = "record";
|
||||
|
||||
/**
|
||||
* Result fields (all detector types)
|
||||
*/
|
||||
public static final ParseField PROBABILITY = new ParseField("probability");
|
||||
public static final ParseField DETECTOR_INDEX = new ParseField("detector_index");
|
||||
public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name");
|
||||
public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value");
|
||||
public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlated_by_field_value");
|
||||
public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name");
|
||||
public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value");
|
||||
public static final ParseField FUNCTION = new ParseField("function");
|
||||
public static final ParseField FUNCTION_DESCRIPTION = new ParseField("function_description");
|
||||
public static final ParseField TYPICAL = new ParseField("typical");
|
||||
public static final ParseField ACTUAL = new ParseField("actual");
|
||||
public static final ParseField INFLUENCERS = new ParseField("influencers");
|
||||
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
|
||||
|
||||
// Used for QueryPage
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("records");
|
||||
|
||||
/**
|
||||
* Metric Results (including population metrics)
|
||||
*/
|
||||
public static final ParseField FIELD_NAME = new ParseField("field_name");
|
||||
|
||||
/**
|
||||
* Population results
|
||||
*/
|
||||
public static final ParseField OVER_FIELD_NAME = new ParseField("over_field_name");
|
||||
public static final ParseField OVER_FIELD_VALUE = new ParseField("over_field_value");
|
||||
public static final ParseField CAUSES = new ParseField("causes");
|
||||
|
||||
/**
|
||||
* Normalization
|
||||
*/
|
||||
public static final ParseField RECORD_SCORE = new ParseField("record_score");
|
||||
public static final ParseField INITIAL_RECORD_SCORE = new ParseField("initial_record_score");
|
||||
|
||||
public static final ConstructingObjectParser<AnomalyRecord, Void> PARSER =
|
||||
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, true, a -> new AnomalyRecord((String) a[0], (Date) a[1], (long) a[2]));
|
||||
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID);
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
||||
return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli());
|
||||
}
|
||||
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
|
||||
+ Result.TIMESTAMP.getPreferredName() + "]");
|
||||
}, Result.TIMESTAMP, ValueType.VALUE);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
||||
PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE);
|
||||
PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY);
|
||||
PARSER.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE);
|
||||
PARSER.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE);
|
||||
PARSER.declareInt(AnomalyRecord::setDetectorIndex, DETECTOR_INDEX);
|
||||
PARSER.declareBoolean(AnomalyRecord::setInterim, Result.IS_INTERIM);
|
||||
PARSER.declareString(AnomalyRecord::setByFieldName, BY_FIELD_NAME);
|
||||
PARSER.declareString(AnomalyRecord::setByFieldValue, BY_FIELD_VALUE);
|
||||
PARSER.declareString(AnomalyRecord::setCorrelatedByFieldValue, CORRELATED_BY_FIELD_VALUE);
|
||||
PARSER.declareString(AnomalyRecord::setPartitionFieldName, PARTITION_FIELD_NAME);
|
||||
PARSER.declareString(AnomalyRecord::setPartitionFieldValue, PARTITION_FIELD_VALUE);
|
||||
PARSER.declareString(AnomalyRecord::setFunction, FUNCTION);
|
||||
PARSER.declareString(AnomalyRecord::setFunctionDescription, FUNCTION_DESCRIPTION);
|
||||
PARSER.declareDoubleArray(AnomalyRecord::setTypical, TYPICAL);
|
||||
PARSER.declareDoubleArray(AnomalyRecord::setActual, ACTUAL);
|
||||
PARSER.declareString(AnomalyRecord::setFieldName, FIELD_NAME);
|
||||
PARSER.declareString(AnomalyRecord::setOverFieldName, OVER_FIELD_NAME);
|
||||
PARSER.declareString(AnomalyRecord::setOverFieldValue, OVER_FIELD_VALUE);
|
||||
PARSER.declareObjectArray(AnomalyRecord::setCauses, AnomalyCause.PARSER, CAUSES);
|
||||
PARSER.declareObjectArray(AnomalyRecord::setInfluencers, Influence.PARSER, INFLUENCERS);
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private int detectorIndex;
|
||||
private double probability;
|
||||
private String byFieldName;
|
||||
private String byFieldValue;
|
||||
private String correlatedByFieldValue;
|
||||
private String partitionFieldName;
|
||||
private String partitionFieldValue;
|
||||
private String function;
|
||||
private String functionDescription;
|
||||
private List<Double> typical;
|
||||
private List<Double> actual;
|
||||
private boolean isInterim;
|
||||
|
||||
private String fieldName;
|
||||
|
||||
private String overFieldName;
|
||||
private String overFieldValue;
|
||||
private List<AnomalyCause> causes;
|
||||
|
||||
private double recordScore;
|
||||
|
||||
private double initialRecordScore;
|
||||
|
||||
private final Date timestamp;
|
||||
private final long bucketSpan;
|
||||
|
||||
private List<Influence> influences;
|
||||
|
||||
AnomalyRecord(String jobId, Date timestamp, long bucketSpan) {
|
||||
this.jobId = jobId;
|
||||
this.timestamp = Objects.requireNonNull(timestamp);
|
||||
this.bucketSpan = bucketSpan;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Result.JOB_ID.getPreferredName(), jobId);
|
||||
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
|
||||
builder.field(PROBABILITY.getPreferredName(), probability);
|
||||
builder.field(RECORD_SCORE.getPreferredName(), recordScore);
|
||||
builder.field(INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore);
|
||||
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
||||
builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex);
|
||||
builder.field(Result.IS_INTERIM.getPreferredName(), isInterim);
|
||||
builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime());
|
||||
if (byFieldName != null) {
|
||||
builder.field(BY_FIELD_NAME.getPreferredName(), byFieldName);
|
||||
}
|
||||
if (byFieldValue != null) {
|
||||
builder.field(BY_FIELD_VALUE.getPreferredName(), byFieldValue);
|
||||
}
|
||||
if (correlatedByFieldValue != null) {
|
||||
builder.field(CORRELATED_BY_FIELD_VALUE.getPreferredName(), correlatedByFieldValue);
|
||||
}
|
||||
if (partitionFieldName != null) {
|
||||
builder.field(PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName);
|
||||
}
|
||||
if (partitionFieldValue != null) {
|
||||
builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue);
|
||||
}
|
||||
if (function != null) {
|
||||
builder.field(FUNCTION.getPreferredName(), function);
|
||||
}
|
||||
if (functionDescription != null) {
|
||||
builder.field(FUNCTION_DESCRIPTION.getPreferredName(), functionDescription);
|
||||
}
|
||||
if (typical != null) {
|
||||
builder.field(TYPICAL.getPreferredName(), typical);
|
||||
}
|
||||
if (actual != null) {
|
||||
builder.field(ACTUAL.getPreferredName(), actual);
|
||||
}
|
||||
if (fieldName != null) {
|
||||
builder.field(FIELD_NAME.getPreferredName(), fieldName);
|
||||
}
|
||||
if (overFieldName != null) {
|
||||
builder.field(OVER_FIELD_NAME.getPreferredName(), overFieldName);
|
||||
}
|
||||
if (overFieldValue != null) {
|
||||
builder.field(OVER_FIELD_VALUE.getPreferredName(), overFieldValue);
|
||||
}
|
||||
if (causes != null) {
|
||||
builder.field(CAUSES.getPreferredName(), causes);
|
||||
}
|
||||
if (influences != null) {
|
||||
builder.field(INFLUENCERS.getPreferredName(), influences);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return this.jobId;
|
||||
}
|
||||
|
||||
public int getDetectorIndex() {
|
||||
return detectorIndex;
|
||||
}
|
||||
|
||||
void setDetectorIndex(int detectorIndex) {
|
||||
this.detectorIndex = detectorIndex;
|
||||
}
|
||||
|
||||
public double getRecordScore() {
|
||||
return recordScore;
|
||||
}
|
||||
|
||||
void setRecordScore(double recordScore) {
|
||||
this.recordScore = recordScore;
|
||||
}
|
||||
|
||||
public double getInitialRecordScore() {
|
||||
return initialRecordScore;
|
||||
}
|
||||
|
||||
void setInitialRecordScore(double initialRecordScore) {
|
||||
this.initialRecordScore = initialRecordScore;
|
||||
}
|
||||
|
||||
public Date getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bucketspan expressed in seconds
|
||||
*/
|
||||
public long getBucketSpan() {
|
||||
return bucketSpan;
|
||||
}
|
||||
|
||||
public double getProbability() {
|
||||
return probability;
|
||||
}
|
||||
|
||||
void setProbability(double value) {
|
||||
probability = value;
|
||||
}
|
||||
|
||||
public String getByFieldName() {
|
||||
return byFieldName;
|
||||
}
|
||||
|
||||
void setByFieldName(String value) {
|
||||
byFieldName = value;
|
||||
}
|
||||
|
||||
public String getByFieldValue() {
|
||||
return byFieldValue;
|
||||
}
|
||||
|
||||
void setByFieldValue(String value) {
|
||||
byFieldValue = value;
|
||||
}
|
||||
|
||||
public String getCorrelatedByFieldValue() {
|
||||
return correlatedByFieldValue;
|
||||
}
|
||||
|
||||
void setCorrelatedByFieldValue(String value) {
|
||||
correlatedByFieldValue = value;
|
||||
}
|
||||
|
||||
public String getPartitionFieldName() {
|
||||
return partitionFieldName;
|
||||
}
|
||||
|
||||
void setPartitionFieldName(String field) {
|
||||
partitionFieldName = field;
|
||||
}
|
||||
|
||||
public String getPartitionFieldValue() {
|
||||
return partitionFieldValue;
|
||||
}
|
||||
|
||||
void setPartitionFieldValue(String value) {
|
||||
partitionFieldValue = value;
|
||||
}
|
||||
|
||||
public String getFunction() {
|
||||
return function;
|
||||
}
|
||||
|
||||
void setFunction(String name) {
|
||||
function = name;
|
||||
}
|
||||
|
||||
public String getFunctionDescription() {
|
||||
return functionDescription;
|
||||
}
|
||||
|
||||
void setFunctionDescription(String functionDescription) {
|
||||
this.functionDescription = functionDescription;
|
||||
}
|
||||
|
||||
public List<Double> getTypical() {
|
||||
return typical;
|
||||
}
|
||||
|
||||
void setTypical(List<Double> typical) {
|
||||
this.typical = Collections.unmodifiableList(typical);
|
||||
}
|
||||
|
||||
public List<Double> getActual() {
|
||||
return actual;
|
||||
}
|
||||
|
||||
void setActual(List<Double> actual) {
|
||||
this.actual = Collections.unmodifiableList(actual);
|
||||
}
|
||||
|
||||
public boolean isInterim() {
|
||||
return isInterim;
|
||||
}
|
||||
|
||||
void setInterim(boolean isInterim) {
|
||||
this.isInterim = isInterim;
|
||||
}
|
||||
|
||||
public String getFieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
void setFieldName(String field) {
|
||||
fieldName = field;
|
||||
}
|
||||
|
||||
public String getOverFieldName() {
|
||||
return overFieldName;
|
||||
}
|
||||
|
||||
void setOverFieldName(String name) {
|
||||
overFieldName = name;
|
||||
}
|
||||
|
||||
public String getOverFieldValue() {
|
||||
return overFieldValue;
|
||||
}
|
||||
|
||||
void setOverFieldValue(String value) {
|
||||
overFieldValue = value;
|
||||
}
|
||||
|
||||
public List<AnomalyCause> getCauses() {
|
||||
return causes;
|
||||
}
|
||||
|
||||
void setCauses(List<AnomalyCause> causes) {
|
||||
this.causes = Collections.unmodifiableList(causes);
|
||||
}
|
||||
|
||||
public List<Influence> getInfluencers() {
|
||||
return influences;
|
||||
}
|
||||
|
||||
void setInfluencers(List<Influence> influencers) {
|
||||
this.influences = Collections.unmodifiableList(influencers);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, detectorIndex, bucketSpan, probability, recordScore,
|
||||
initialRecordScore, typical, actual,function, functionDescription, fieldName,
|
||||
byFieldName, byFieldValue, correlatedByFieldValue, partitionFieldName,
|
||||
partitionFieldValue, overFieldName, overFieldValue, timestamp, isInterim,
|
||||
causes, influences, jobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AnomalyRecord that = (AnomalyRecord) other;
|
||||
|
||||
return Objects.equals(this.jobId, that.jobId)
|
||||
&& this.detectorIndex == that.detectorIndex
|
||||
&& this.bucketSpan == that.bucketSpan
|
||||
&& this.probability == that.probability
|
||||
&& this.recordScore == that.recordScore
|
||||
&& this.initialRecordScore == that.initialRecordScore
|
||||
&& Objects.deepEquals(this.typical, that.typical)
|
||||
&& Objects.deepEquals(this.actual, that.actual)
|
||||
&& Objects.equals(this.function, that.function)
|
||||
&& Objects.equals(this.functionDescription, that.functionDescription)
|
||||
&& Objects.equals(this.fieldName, that.fieldName)
|
||||
&& Objects.equals(this.byFieldName, that.byFieldName)
|
||||
&& Objects.equals(this.byFieldValue, that.byFieldValue)
|
||||
&& Objects.equals(this.correlatedByFieldValue, that.correlatedByFieldValue)
|
||||
&& Objects.equals(this.partitionFieldName, that.partitionFieldName)
|
||||
&& Objects.equals(this.partitionFieldValue, that.partitionFieldValue)
|
||||
&& Objects.equals(this.overFieldName, that.overFieldName)
|
||||
&& Objects.equals(this.overFieldValue, that.overFieldValue)
|
||||
&& Objects.equals(this.timestamp, that.timestamp)
|
||||
&& Objects.equals(this.isInterim, that.isInterim)
|
||||
&& Objects.equals(this.causes, that.causes)
|
||||
&& Objects.equals(this.influences, that.influences);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,246 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Bucket Result POJO
|
||||
*/
|
||||
public class Bucket implements ToXContentObject {
|
||||
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
||||
public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score");
|
||||
public static final ParseField EVENT_COUNT = new ParseField("event_count");
|
||||
public static final ParseField RECORDS = new ParseField("records");
|
||||
public static final ParseField BUCKET_INFLUENCERS = new ParseField("bucket_influencers");
|
||||
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
|
||||
public static final ParseField PROCESSING_TIME_MS = new ParseField("processing_time_ms");
|
||||
public static final ParseField SCHEDULED_EVENTS = new ParseField("scheduled_events");
|
||||
|
||||
// Used for QueryPage
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("buckets");
|
||||
|
||||
/**
|
||||
* Result type
|
||||
*/
|
||||
public static final String RESULT_TYPE_VALUE = "bucket";
|
||||
public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE);
|
||||
|
||||
public static final ConstructingObjectParser<Bucket, Void> PARSER =
|
||||
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, true, a -> new Bucket((String) a[0], (Date) a[1], (long) a[2]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID);
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
||||
return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli());
|
||||
}
|
||||
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
|
||||
+ Result.TIMESTAMP.getPreferredName() + "]");
|
||||
}, Result.TIMESTAMP, ValueType.VALUE);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
||||
PARSER.declareDouble(Bucket::setAnomalyScore, ANOMALY_SCORE);
|
||||
PARSER.declareDouble(Bucket::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE);
|
||||
PARSER.declareBoolean(Bucket::setInterim, Result.IS_INTERIM);
|
||||
PARSER.declareLong(Bucket::setEventCount, EVENT_COUNT);
|
||||
PARSER.declareObjectArray(Bucket::setRecords, AnomalyRecord.PARSER, RECORDS);
|
||||
PARSER.declareObjectArray(Bucket::setBucketInfluencers, BucketInfluencer.PARSER, BUCKET_INFLUENCERS);
|
||||
PARSER.declareLong(Bucket::setProcessingTimeMs, PROCESSING_TIME_MS);
|
||||
PARSER.declareString((bucket, s) -> {}, Result.RESULT_TYPE);
|
||||
PARSER.declareStringArray(Bucket::setScheduledEvents, SCHEDULED_EVENTS);
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private final Date timestamp;
|
||||
private final long bucketSpan;
|
||||
private double anomalyScore;
|
||||
private double initialAnomalyScore;
|
||||
private List<AnomalyRecord> records = new ArrayList<>();
|
||||
private long eventCount;
|
||||
private boolean isInterim;
|
||||
private List<BucketInfluencer> bucketInfluencers = new ArrayList<>(); // Can't use emptyList as might be appended to
|
||||
private long processingTimeMs;
|
||||
private List<String> scheduledEvents = Collections.emptyList();
|
||||
|
||||
Bucket(String jobId, Date timestamp, long bucketSpan) {
|
||||
this.jobId = jobId;
|
||||
this.timestamp = Objects.requireNonNull(timestamp);
|
||||
this.bucketSpan = bucketSpan;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Result.JOB_ID.getPreferredName(), jobId);
|
||||
builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime());
|
||||
builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore);
|
||||
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
||||
builder.field(INITIAL_ANOMALY_SCORE.getPreferredName(), initialAnomalyScore);
|
||||
if (records.isEmpty() == false) {
|
||||
builder.field(RECORDS.getPreferredName(), records);
|
||||
}
|
||||
builder.field(EVENT_COUNT.getPreferredName(), eventCount);
|
||||
builder.field(Result.IS_INTERIM.getPreferredName(), isInterim);
|
||||
builder.field(BUCKET_INFLUENCERS.getPreferredName(), bucketInfluencers);
|
||||
builder.field(PROCESSING_TIME_MS.getPreferredName(), processingTimeMs);
|
||||
if (scheduledEvents.isEmpty() == false) {
|
||||
builder.field(SCHEDULED_EVENTS.getPreferredName(), scheduledEvents);
|
||||
}
|
||||
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public Date getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bucketspan expressed in seconds
|
||||
*/
|
||||
public long getBucketSpan() {
|
||||
return bucketSpan;
|
||||
}
|
||||
|
||||
public double getAnomalyScore() {
|
||||
return anomalyScore;
|
||||
}
|
||||
|
||||
void setAnomalyScore(double anomalyScore) {
|
||||
this.anomalyScore = anomalyScore;
|
||||
}
|
||||
|
||||
public double getInitialAnomalyScore() {
|
||||
return initialAnomalyScore;
|
||||
}
|
||||
|
||||
void setInitialAnomalyScore(double initialAnomalyScore) {
|
||||
this.initialAnomalyScore = initialAnomalyScore;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all the anomaly records associated with this bucket.
|
||||
* The records are not part of the bucket document. They will
|
||||
* only be present when the bucket was retrieved and expanded
|
||||
* to contain the associated records.
|
||||
*
|
||||
* @return the anomaly records for the bucket IF the bucket was expanded.
|
||||
*/
|
||||
public List<AnomalyRecord> getRecords() {
|
||||
return records;
|
||||
}
|
||||
|
||||
void setRecords(List<AnomalyRecord> records) {
|
||||
this.records = Collections.unmodifiableList(records);
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of records (events) actually processed in this bucket.
|
||||
*/
|
||||
public long getEventCount() {
|
||||
return eventCount;
|
||||
}
|
||||
|
||||
void setEventCount(long value) {
|
||||
eventCount = value;
|
||||
}
|
||||
|
||||
public boolean isInterim() {
|
||||
return isInterim;
|
||||
}
|
||||
|
||||
void setInterim(boolean isInterim) {
|
||||
this.isInterim = isInterim;
|
||||
}
|
||||
|
||||
public long getProcessingTimeMs() {
|
||||
return processingTimeMs;
|
||||
}
|
||||
|
||||
void setProcessingTimeMs(long timeMs) {
|
||||
processingTimeMs = timeMs;
|
||||
}
|
||||
|
||||
public List<BucketInfluencer> getBucketInfluencers() {
|
||||
return bucketInfluencers;
|
||||
}
|
||||
|
||||
void setBucketInfluencers(List<BucketInfluencer> bucketInfluencers) {
|
||||
this.bucketInfluencers = Collections.unmodifiableList(bucketInfluencers);
|
||||
}
|
||||
|
||||
public List<String> getScheduledEvents() {
|
||||
return scheduledEvents;
|
||||
}
|
||||
|
||||
void setScheduledEvents(List<String> scheduledEvents) {
|
||||
this.scheduledEvents = Collections.unmodifiableList(scheduledEvents);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, timestamp, eventCount, initialAnomalyScore, anomalyScore, records,
|
||||
isInterim, bucketSpan, bucketInfluencers, processingTimeMs, scheduledEvents);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare all the fields and embedded anomaly records (if any)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Bucket that = (Bucket) other;
|
||||
|
||||
return Objects.equals(this.jobId, that.jobId) && Objects.equals(this.timestamp, that.timestamp)
|
||||
&& (this.eventCount == that.eventCount) && (this.bucketSpan == that.bucketSpan)
|
||||
&& (this.anomalyScore == that.anomalyScore) && (this.initialAnomalyScore == that.initialAnomalyScore)
|
||||
&& Objects.equals(this.records, that.records) && Objects.equals(this.isInterim, that.isInterim)
|
||||
&& Objects.equals(this.bucketInfluencers, that.bucketInfluencers)
|
||||
&& (this.processingTimeMs == that.processingTimeMs)
|
||||
&& Objects.equals(this.scheduledEvents, that.scheduledEvents);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,195 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.Date;
|
||||
import java.util.Objects;
|
||||
|
||||
public class BucketInfluencer implements ToXContentObject {
|
||||
|
||||
/**
|
||||
* Result type
|
||||
*/
|
||||
public static final String RESULT_TYPE_VALUE = "bucket_influencer";
|
||||
public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE);
|
||||
|
||||
/**
|
||||
* Field names
|
||||
*/
|
||||
public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name");
|
||||
public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
||||
public static final ParseField RAW_ANOMALY_SCORE = new ParseField("raw_anomaly_score");
|
||||
public static final ParseField PROBABILITY = new ParseField("probability");
|
||||
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
|
||||
|
||||
public static final ConstructingObjectParser<BucketInfluencer, Void> PARSER =
|
||||
new ConstructingObjectParser<>(RESULT_TYPE_FIELD.getPreferredName(), true,
|
||||
a -> new BucketInfluencer((String) a[0], (Date) a[1], (long) a[2]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID);
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
||||
return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli());
|
||||
}
|
||||
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
|
||||
+ Result.TIMESTAMP.getPreferredName() + "]");
|
||||
}, Result.TIMESTAMP, ValueType.VALUE);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
||||
PARSER.declareString((bucketInfluencer, s) -> {}, Result.RESULT_TYPE);
|
||||
PARSER.declareString(BucketInfluencer::setInfluencerFieldName, INFLUENCER_FIELD_NAME);
|
||||
PARSER.declareDouble(BucketInfluencer::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE);
|
||||
PARSER.declareDouble(BucketInfluencer::setAnomalyScore, ANOMALY_SCORE);
|
||||
PARSER.declareDouble(BucketInfluencer::setRawAnomalyScore, RAW_ANOMALY_SCORE);
|
||||
PARSER.declareDouble(BucketInfluencer::setProbability, PROBABILITY);
|
||||
PARSER.declareBoolean(BucketInfluencer::setIsInterim, Result.IS_INTERIM);
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private String influenceField;
|
||||
private double initialAnomalyScore;
|
||||
private double anomalyScore;
|
||||
private double rawAnomalyScore;
|
||||
private double probability;
|
||||
private boolean isInterim;
|
||||
private final Date timestamp;
|
||||
private final long bucketSpan;
|
||||
|
||||
BucketInfluencer(String jobId, Date timestamp, long bucketSpan) {
|
||||
this.jobId = jobId;
|
||||
this.timestamp = Objects.requireNonNull(timestamp);
|
||||
this.bucketSpan = bucketSpan;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Result.JOB_ID.getPreferredName(), jobId);
|
||||
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
|
||||
if (influenceField != null) {
|
||||
builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField);
|
||||
}
|
||||
builder.field(INITIAL_ANOMALY_SCORE.getPreferredName(), initialAnomalyScore);
|
||||
builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore);
|
||||
builder.field(RAW_ANOMALY_SCORE.getPreferredName(), rawAnomalyScore);
|
||||
builder.field(PROBABILITY.getPreferredName(), probability);
|
||||
builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime());
|
||||
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
||||
builder.field(Result.IS_INTERIM.getPreferredName(), isInterim);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public double getProbability() {
|
||||
return probability;
|
||||
}
|
||||
|
||||
void setProbability(double probability) {
|
||||
this.probability = probability;
|
||||
}
|
||||
|
||||
public String getInfluencerFieldName() {
|
||||
return influenceField;
|
||||
}
|
||||
|
||||
void setInfluencerFieldName(String fieldName) {
|
||||
this.influenceField = fieldName;
|
||||
}
|
||||
|
||||
public double getInitialAnomalyScore() {
|
||||
return initialAnomalyScore;
|
||||
}
|
||||
|
||||
void setInitialAnomalyScore(double influenceScore) {
|
||||
this.initialAnomalyScore = influenceScore;
|
||||
}
|
||||
|
||||
public double getAnomalyScore() {
|
||||
return anomalyScore;
|
||||
}
|
||||
|
||||
void setAnomalyScore(double score) {
|
||||
anomalyScore = score;
|
||||
}
|
||||
|
||||
public double getRawAnomalyScore() {
|
||||
return rawAnomalyScore;
|
||||
}
|
||||
|
||||
void setRawAnomalyScore(double score) {
|
||||
rawAnomalyScore = score;
|
||||
}
|
||||
|
||||
void setIsInterim(boolean isInterim) {
|
||||
this.isInterim = isInterim;
|
||||
}
|
||||
|
||||
public boolean isInterim() {
|
||||
return isInterim;
|
||||
}
|
||||
|
||||
public Date getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(influenceField, initialAnomalyScore, anomalyScore, rawAnomalyScore, probability, isInterim, timestamp, jobId,
|
||||
bucketSpan);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BucketInfluencer other = (BucketInfluencer) obj;
|
||||
|
||||
return Objects.equals(influenceField, other.influenceField) && Double.compare(initialAnomalyScore, other.initialAnomalyScore) == 0
|
||||
&& Double.compare(anomalyScore, other.anomalyScore) == 0 && Double.compare(rawAnomalyScore, other.rawAnomalyScore) == 0
|
||||
&& Double.compare(probability, other.probability) == 0 && Objects.equals(isInterim, other.isInterim)
|
||||
&& Objects.equals(timestamp, other.timestamp) && Objects.equals(jobId, other.jobId) && bucketSpan == other.bucketSpan;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,168 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
public class CategoryDefinition implements ToXContentObject {
|
||||
|
||||
public static final ParseField TYPE = new ParseField("category_definition");
|
||||
|
||||
public static final ParseField CATEGORY_ID = new ParseField("category_id");
|
||||
public static final ParseField TERMS = new ParseField("terms");
|
||||
public static final ParseField REGEX = new ParseField("regex");
|
||||
public static final ParseField MAX_MATCHING_LENGTH = new ParseField("max_matching_length");
|
||||
public static final ParseField EXAMPLES = new ParseField("examples");
|
||||
public static final ParseField GROK_PATTERN = new ParseField("grok_pattern");
|
||||
|
||||
// Used for QueryPage
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("categories");
|
||||
|
||||
public static final ConstructingObjectParser<CategoryDefinition, Void> PARSER =
|
||||
new ConstructingObjectParser<>(TYPE.getPreferredName(), true, a -> new CategoryDefinition((String) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID);
|
||||
PARSER.declareLong(CategoryDefinition::setCategoryId, CATEGORY_ID);
|
||||
PARSER.declareString(CategoryDefinition::setTerms, TERMS);
|
||||
PARSER.declareString(CategoryDefinition::setRegex, REGEX);
|
||||
PARSER.declareLong(CategoryDefinition::setMaxMatchingLength, MAX_MATCHING_LENGTH);
|
||||
PARSER.declareStringArray(CategoryDefinition::setExamples, EXAMPLES);
|
||||
PARSER.declareString(CategoryDefinition::setGrokPattern, GROK_PATTERN);
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private long categoryId = 0L;
|
||||
private String terms = "";
|
||||
private String regex = "";
|
||||
private long maxMatchingLength = 0L;
|
||||
private final Set<String> examples = new TreeSet<>();
|
||||
private String grokPattern;
|
||||
|
||||
CategoryDefinition(String jobId) {
|
||||
this.jobId = jobId;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public long getCategoryId() {
|
||||
return categoryId;
|
||||
}
|
||||
|
||||
void setCategoryId(long categoryId) {
|
||||
this.categoryId = categoryId;
|
||||
}
|
||||
|
||||
public String getTerms() {
|
||||
return terms;
|
||||
}
|
||||
|
||||
void setTerms(String terms) {
|
||||
this.terms = terms;
|
||||
}
|
||||
|
||||
public String getRegex() {
|
||||
return regex;
|
||||
}
|
||||
|
||||
void setRegex(String regex) {
|
||||
this.regex = regex;
|
||||
}
|
||||
|
||||
public long getMaxMatchingLength() {
|
||||
return maxMatchingLength;
|
||||
}
|
||||
|
||||
void setMaxMatchingLength(long maxMatchingLength) {
|
||||
this.maxMatchingLength = maxMatchingLength;
|
||||
}
|
||||
|
||||
public List<String> getExamples() {
|
||||
return new ArrayList<>(examples);
|
||||
}
|
||||
|
||||
void setExamples(Collection<String> examples) {
|
||||
this.examples.clear();
|
||||
this.examples.addAll(examples);
|
||||
}
|
||||
|
||||
void addExample(String example) {
|
||||
examples.add(example);
|
||||
}
|
||||
|
||||
public String getGrokPattern() {
|
||||
return grokPattern;
|
||||
}
|
||||
|
||||
void setGrokPattern(String grokPattern) {
|
||||
this.grokPattern = grokPattern;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Result.JOB_ID.getPreferredName(), jobId);
|
||||
builder.field(CATEGORY_ID.getPreferredName(), categoryId);
|
||||
builder.field(TERMS.getPreferredName(), terms);
|
||||
builder.field(REGEX.getPreferredName(), regex);
|
||||
builder.field(MAX_MATCHING_LENGTH.getPreferredName(), maxMatchingLength);
|
||||
builder.field(EXAMPLES.getPreferredName(), examples);
|
||||
if (grokPattern != null) {
|
||||
builder.field(GROK_PATTERN.getPreferredName(), grokPattern);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
CategoryDefinition that = (CategoryDefinition) other;
|
||||
return Objects.equals(this.jobId, that.jobId)
|
||||
&& Objects.equals(this.categoryId, that.categoryId)
|
||||
&& Objects.equals(this.terms, that.terms)
|
||||
&& Objects.equals(this.regex, that.regex)
|
||||
&& Objects.equals(this.maxMatchingLength, that.maxMatchingLength)
|
||||
&& Objects.equals(this.examples, that.examples)
|
||||
&& Objects.equals(this.grokPattern, that.grokPattern);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, categoryId, terms, regex, maxMatchingLength, examples, grokPattern);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,99 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Influence field name and list of influence field values/score pairs
|
||||
*/
|
||||
public class Influence implements ToXContentObject {
|
||||
|
||||
/**
|
||||
* Note all X-Content serialized field names are "influencer" not "influence"
|
||||
*/
|
||||
public static final ParseField INFLUENCER = new ParseField("influencer");
|
||||
public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name");
|
||||
public static final ParseField INFLUENCER_FIELD_VALUES = new ParseField("influencer_field_values");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<Influence, Void> PARSER =
|
||||
new ConstructingObjectParser<>(INFLUENCER.getPreferredName(), true, a -> new Influence((String) a[0], (List<String>) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME);
|
||||
PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUES);
|
||||
}
|
||||
|
||||
private String field;
|
||||
private List<String> fieldValues;
|
||||
|
||||
Influence(String field, List<String> fieldValues) {
|
||||
this.field = field;
|
||||
this.fieldValues = Collections.unmodifiableList(fieldValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), field);
|
||||
builder.field(INFLUENCER_FIELD_VALUES.getPreferredName(), fieldValues);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public String getInfluencerFieldName() {
|
||||
return field;
|
||||
}
|
||||
|
||||
public List<String> getInfluencerFieldValues() {
|
||||
return fieldValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(field, fieldValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Influence other = (Influence) obj;
|
||||
return Objects.equals(field, other.field) && Objects.equals(fieldValues, other.fieldValues);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,191 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.Date;
|
||||
import java.util.Objects;
|
||||
|
||||
public class Influencer implements ToXContentObject {
|
||||
|
||||
/**
|
||||
* Result type
|
||||
*/
|
||||
public static final String RESULT_TYPE_VALUE = "influencer";
|
||||
public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE);
|
||||
|
||||
/*
|
||||
* Field names
|
||||
*/
|
||||
public static final ParseField PROBABILITY = new ParseField("probability");
|
||||
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
|
||||
public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name");
|
||||
public static final ParseField INFLUENCER_FIELD_VALUE = new ParseField("influencer_field_value");
|
||||
public static final ParseField INITIAL_INFLUENCER_SCORE = new ParseField("initial_influencer_score");
|
||||
public static final ParseField INFLUENCER_SCORE = new ParseField("influencer_score");
|
||||
|
||||
// Used for QueryPage
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("influencers");
|
||||
|
||||
public static final ConstructingObjectParser<Influencer, Void> PARSER = new ConstructingObjectParser<>(
|
||||
RESULT_TYPE_FIELD.getPreferredName(), true,
|
||||
a -> new Influencer((String) a[0], (String) a[1], (String) a[2], (Date) a[3], (long) a[4]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE);
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
||||
return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli());
|
||||
}
|
||||
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
|
||||
+ Result.TIMESTAMP.getPreferredName() + "]");
|
||||
}, Result.TIMESTAMP, ValueType.VALUE);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
||||
PARSER.declareString((influencer, s) -> {}, Result.RESULT_TYPE);
|
||||
PARSER.declareDouble(Influencer::setProbability, PROBABILITY);
|
||||
PARSER.declareDouble(Influencer::setInfluencerScore, INFLUENCER_SCORE);
|
||||
PARSER.declareDouble(Influencer::setInitialInfluencerScore, INITIAL_INFLUENCER_SCORE);
|
||||
PARSER.declareBoolean(Influencer::setInterim, Result.IS_INTERIM);
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private final Date timestamp;
|
||||
private final long bucketSpan;
|
||||
private String influenceField;
|
||||
private String influenceValue;
|
||||
private double probability;
|
||||
private double initialInfluencerScore;
|
||||
private double influencerScore;
|
||||
private boolean isInterim;
|
||||
|
||||
Influencer(String jobId, String fieldName, String fieldValue, Date timestamp, long bucketSpan) {
|
||||
this.jobId = jobId;
|
||||
influenceField = fieldName;
|
||||
influenceValue = fieldValue;
|
||||
this.timestamp = Objects.requireNonNull(timestamp);
|
||||
this.bucketSpan = bucketSpan;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Result.JOB_ID.getPreferredName(), jobId);
|
||||
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
|
||||
builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField);
|
||||
builder.field(INFLUENCER_FIELD_VALUE.getPreferredName(), influenceValue);
|
||||
builder.field(INFLUENCER_SCORE.getPreferredName(), influencerScore);
|
||||
builder.field(INITIAL_INFLUENCER_SCORE.getPreferredName(), initialInfluencerScore);
|
||||
builder.field(PROBABILITY.getPreferredName(), probability);
|
||||
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
||||
builder.field(Result.IS_INTERIM.getPreferredName(), isInterim);
|
||||
builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime());
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public double getProbability() {
|
||||
return probability;
|
||||
}
|
||||
|
||||
void setProbability(double probability) {
|
||||
this.probability = probability;
|
||||
}
|
||||
|
||||
public Date getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
public String getInfluencerFieldName() {
|
||||
return influenceField;
|
||||
}
|
||||
|
||||
public String getInfluencerFieldValue() {
|
||||
return influenceValue;
|
||||
}
|
||||
|
||||
public double getInitialInfluencerScore() {
|
||||
return initialInfluencerScore;
|
||||
}
|
||||
|
||||
void setInitialInfluencerScore(double score) {
|
||||
initialInfluencerScore = score;
|
||||
}
|
||||
|
||||
public double getInfluencerScore() {
|
||||
return influencerScore;
|
||||
}
|
||||
|
||||
void setInfluencerScore(double score) {
|
||||
influencerScore = score;
|
||||
}
|
||||
|
||||
public boolean isInterim() {
|
||||
return isInterim;
|
||||
}
|
||||
|
||||
void setInterim(boolean value) {
|
||||
isInterim = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, timestamp, influenceField, influenceValue, initialInfluencerScore,
|
||||
influencerScore, probability, isInterim, bucketSpan);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Influencer other = (Influencer) obj;
|
||||
return Objects.equals(jobId, other.jobId) && Objects.equals(timestamp, other.timestamp)
|
||||
&& Objects.equals(influenceField, other.influenceField)
|
||||
&& Objects.equals(influenceValue, other.influenceValue)
|
||||
&& Double.compare(initialInfluencerScore, other.initialInfluencerScore) == 0
|
||||
&& Double.compare(influencerScore, other.influencerScore) == 0 && Double.compare(probability, other.probability) == 0
|
||||
&& (isInterim == other.isInterim) && (bucketSpan == other.bucketSpan);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,216 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Overall Bucket Result POJO
|
||||
*/
|
||||
public class OverallBucket implements ToXContentObject {
|
||||
|
||||
public static final ParseField OVERALL_SCORE = new ParseField("overall_score");
|
||||
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
|
||||
public static final ParseField JOBS = new ParseField("jobs");
|
||||
|
||||
// Used for QueryPage
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("overall_buckets");
|
||||
|
||||
/**
|
||||
* Result type
|
||||
*/
|
||||
public static final String RESULT_TYPE_VALUE = "overall_bucket";
|
||||
|
||||
public static final ConstructingObjectParser<OverallBucket, Void> PARSER =
|
||||
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, true,
|
||||
a -> new OverallBucket((Date) a[0], (long) a[1], (double) a[2], (boolean) a[3]));
|
||||
|
||||
static {
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli());
|
||||
}
|
||||
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
|
||||
+ Result.TIMESTAMP.getPreferredName() + "]");
|
||||
}, Result.TIMESTAMP, ObjectParser.ValueType.VALUE);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
||||
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), OVERALL_SCORE);
|
||||
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), Result.IS_INTERIM);
|
||||
PARSER.declareObjectArray(OverallBucket::setJobs, JobInfo.PARSER, JOBS);
|
||||
}
|
||||
|
||||
private final Date timestamp;
|
||||
private final long bucketSpan;
|
||||
private final double overallScore;
|
||||
private final boolean isInterim;
|
||||
private List<JobInfo> jobs = Collections.emptyList();
|
||||
|
||||
OverallBucket(Date timestamp, long bucketSpan, double overallScore, boolean isInterim) {
|
||||
this.timestamp = Objects.requireNonNull(timestamp);
|
||||
this.bucketSpan = bucketSpan;
|
||||
this.overallScore = overallScore;
|
||||
this.isInterim = isInterim;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime());
|
||||
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
||||
builder.field(OVERALL_SCORE.getPreferredName(), overallScore);
|
||||
builder.field(JOBS.getPreferredName(), jobs);
|
||||
builder.field(Result.IS_INTERIM.getPreferredName(), isInterim);
|
||||
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public Date getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bucketspan expressed in seconds
|
||||
*/
|
||||
public long getBucketSpan() {
|
||||
return bucketSpan;
|
||||
}
|
||||
|
||||
public double getOverallScore() {
|
||||
return overallScore;
|
||||
}
|
||||
|
||||
public List<JobInfo> getJobs() {
|
||||
return jobs;
|
||||
}
|
||||
|
||||
void setJobs(List<JobInfo> jobs) {
|
||||
this.jobs = Collections.unmodifiableList(jobs);
|
||||
}
|
||||
|
||||
public boolean isInterim() {
|
||||
return isInterim;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(timestamp, bucketSpan, overallScore, jobs, isInterim);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare all the fields and embedded anomaly records (if any)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
OverallBucket that = (OverallBucket) other;
|
||||
|
||||
return Objects.equals(this.timestamp, that.timestamp)
|
||||
&& this.bucketSpan == that.bucketSpan
|
||||
&& this.overallScore == that.overallScore
|
||||
&& Objects.equals(this.jobs, that.jobs)
|
||||
&& this.isInterim == that.isInterim;
|
||||
}
|
||||
|
||||
public static class JobInfo implements ToXContentObject, Comparable<JobInfo> {
|
||||
|
||||
private static final ParseField MAX_ANOMALY_SCORE = new ParseField("max_anomaly_score");
|
||||
|
||||
public static final ConstructingObjectParser<JobInfo, Void> PARSER =
|
||||
new ConstructingObjectParser<>("job_info", true, a -> new JobInfo((String) a[0], (double) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID);
|
||||
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX_ANOMALY_SCORE);
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private final double maxAnomalyScore;
|
||||
|
||||
JobInfo(String jobId, double maxAnomalyScore) {
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
this.maxAnomalyScore = maxAnomalyScore;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public double getMaxAnomalyScore() {
|
||||
return maxAnomalyScore;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Result.JOB_ID.getPreferredName(), jobId);
|
||||
builder.field(MAX_ANOMALY_SCORE.getPreferredName(), maxAnomalyScore);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, maxAnomalyScore);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
JobInfo that = (JobInfo) other;
|
||||
return Objects.equals(this.jobId, that.jobId) && this.maxAnomalyScore == that.maxAnomalyScore;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(JobInfo other) {
|
||||
int result = this.jobId.compareTo(other.jobId);
|
||||
if (result == 0) {
|
||||
result = Double.compare(this.maxAnomalyScore, other.maxAnomalyScore);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
|
||||
/**
|
||||
* Contains common attributes for results.
|
||||
*/
|
||||
public final class Result {
|
||||
|
||||
/**
|
||||
* Serialisation fields
|
||||
*/
|
||||
public static final ParseField JOB_ID = new ParseField("job_id");
|
||||
public static final ParseField TYPE = new ParseField("result");
|
||||
public static final ParseField RESULT_TYPE = new ParseField("result_type");
|
||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||
public static final ParseField IS_INTERIM = new ParseField("is_interim");
|
||||
|
||||
private Result() {
|
||||
}
|
||||
}
|
|
@ -0,0 +1,106 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class AnomalyCauseTests extends AbstractXContentTestCase<AnomalyCause> {
|
||||
|
||||
@Override
|
||||
protected AnomalyCause createTestInstance() {
|
||||
AnomalyCause anomalyCause = new AnomalyCause();
|
||||
if (randomBoolean()) {
|
||||
int size = randomInt(10);
|
||||
List<Double> actual = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
actual.add(randomDouble());
|
||||
}
|
||||
anomalyCause.setActual(actual);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int size = randomInt(10);
|
||||
List<Double> typical = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
typical.add(randomDouble());
|
||||
}
|
||||
anomalyCause.setTypical(typical);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
anomalyCause.setByFieldName(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
anomalyCause.setByFieldValue(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
anomalyCause.setCorrelatedByFieldValue(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
anomalyCause.setOverFieldName(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
anomalyCause.setOverFieldValue(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
anomalyCause.setPartitionFieldName(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
anomalyCause.setPartitionFieldValue(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
anomalyCause.setFunction(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
anomalyCause.setFunctionDescription(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
anomalyCause.setFieldName(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
anomalyCause.setProbability(randomDouble());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int size = randomInt(10);
|
||||
List<Influence> influencers = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
int fieldValuesSize = randomInt(10);
|
||||
List<String> fieldValues = new ArrayList<>(fieldValuesSize);
|
||||
for (int j = 0; j < fieldValuesSize; j++) {
|
||||
fieldValues.add(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
influencers.add(new Influence(randomAlphaOfLengthBetween(1, 20), fieldValues));
|
||||
}
|
||||
anomalyCause.setInfluencers(influencers);
|
||||
}
|
||||
return anomalyCause;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AnomalyCause doParseInstance(XContentParser parser) {
|
||||
return AnomalyCause.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
public class AnomalyRecordTests extends AbstractXContentTestCase<AnomalyRecord> {
|
||||
|
||||
@Override
|
||||
protected AnomalyRecord createTestInstance() {
|
||||
return createTestInstance("foo");
|
||||
}
|
||||
|
||||
public AnomalyRecord createTestInstance(String jobId) {
|
||||
AnomalyRecord anomalyRecord = new AnomalyRecord(jobId, new Date(randomNonNegativeLong()), randomNonNegativeLong());
|
||||
anomalyRecord.setActual(Collections.singletonList(randomDouble()));
|
||||
anomalyRecord.setTypical(Collections.singletonList(randomDouble()));
|
||||
anomalyRecord.setProbability(randomDouble());
|
||||
anomalyRecord.setRecordScore(randomDouble());
|
||||
anomalyRecord.setInitialRecordScore(randomDouble());
|
||||
anomalyRecord.setInterim(randomBoolean());
|
||||
if (randomBoolean()) {
|
||||
anomalyRecord.setFieldName(randomAlphaOfLength(12));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
anomalyRecord.setByFieldName(randomAlphaOfLength(12));
|
||||
anomalyRecord.setByFieldValue(randomAlphaOfLength(12));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
anomalyRecord.setPartitionFieldName(randomAlphaOfLength(12));
|
||||
anomalyRecord.setPartitionFieldValue(randomAlphaOfLength(12));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
anomalyRecord.setOverFieldName(randomAlphaOfLength(12));
|
||||
anomalyRecord.setOverFieldValue(randomAlphaOfLength(12));
|
||||
}
|
||||
anomalyRecord.setFunction(randomAlphaOfLengthBetween(5, 20));
|
||||
anomalyRecord.setFunctionDescription(randomAlphaOfLengthBetween(5, 20));
|
||||
if (randomBoolean()) {
|
||||
anomalyRecord.setCorrelatedByFieldValue(randomAlphaOfLength(16));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int count = randomIntBetween(0, 9);
|
||||
List<Influence> influences = new ArrayList<>();
|
||||
for (int i=0; i<count; i++) {
|
||||
influences.add(new Influence(randomAlphaOfLength(8), Collections.singletonList(randomAlphaOfLengthBetween(1, 28))));
|
||||
}
|
||||
anomalyRecord.setInfluencers(influences);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int count = randomIntBetween(0, 9);
|
||||
List<AnomalyCause> causes = new ArrayList<>();
|
||||
for (int i=0; i<count; i++) {
|
||||
causes.add(new AnomalyCauseTests().createTestInstance());
|
||||
}
|
||||
anomalyRecord.setCauses(causes);
|
||||
}
|
||||
|
||||
return anomalyRecord;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AnomalyRecord doParseInstance(XContentParser parser) {
|
||||
return AnomalyRecord.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,145 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
public class BucketInfluencerTests extends AbstractXContentTestCase<BucketInfluencer> {
|
||||
|
||||
@Override
|
||||
protected BucketInfluencer createTestInstance() {
|
||||
BucketInfluencer bucketInfluencer = new BucketInfluencer(randomAlphaOfLengthBetween(1, 20), new Date(randomNonNegativeLong()),
|
||||
randomNonNegativeLong());
|
||||
if (randomBoolean()) {
|
||||
bucketInfluencer.setAnomalyScore(randomDouble());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucketInfluencer.setInfluencerFieldName(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucketInfluencer.setInitialAnomalyScore(randomDouble());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucketInfluencer.setProbability(randomDouble());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucketInfluencer.setRawAnomalyScore(randomDouble());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucketInfluencer.setIsInterim(randomBoolean());
|
||||
}
|
||||
return bucketInfluencer;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected BucketInfluencer doParseInstance(XContentParser parser) {
|
||||
return BucketInfluencer.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
public void testEquals_GivenNull() {
|
||||
assertFalse(new BucketInfluencer(randomAlphaOfLengthBetween(1, 20), new Date(), 600).equals(null));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentClass() {
|
||||
assertFalse(new BucketInfluencer(randomAlphaOfLengthBetween(1, 20), new Date(), 600).equals("a string"));
|
||||
}
|
||||
|
||||
public void testEquals_GivenEqualInfluencers() {
|
||||
BucketInfluencer bucketInfluencer1 = new BucketInfluencer("foo", new Date(123), 600);
|
||||
bucketInfluencer1.setAnomalyScore(42.0);
|
||||
bucketInfluencer1.setInfluencerFieldName("foo");
|
||||
bucketInfluencer1.setInitialAnomalyScore(67.3);
|
||||
bucketInfluencer1.setProbability(0.0003);
|
||||
bucketInfluencer1.setRawAnomalyScore(3.14);
|
||||
|
||||
BucketInfluencer bucketInfluencer2 = new BucketInfluencer("foo", new Date(123), 600);
|
||||
bucketInfluencer2.setAnomalyScore(42.0);
|
||||
bucketInfluencer2.setInfluencerFieldName("foo");
|
||||
bucketInfluencer2.setInitialAnomalyScore(67.3);
|
||||
bucketInfluencer2.setProbability(0.0003);
|
||||
bucketInfluencer2.setRawAnomalyScore(3.14);
|
||||
|
||||
assertTrue(bucketInfluencer1.equals(bucketInfluencer2));
|
||||
assertTrue(bucketInfluencer2.equals(bucketInfluencer1));
|
||||
assertEquals(bucketInfluencer1.hashCode(), bucketInfluencer2.hashCode());
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentAnomalyScore() {
|
||||
BucketInfluencer bucketInfluencer1 = new BucketInfluencer("foo", new Date(123), 600);
|
||||
bucketInfluencer1.setAnomalyScore(42.0);
|
||||
|
||||
BucketInfluencer bucketInfluencer2 = new BucketInfluencer("foo", new Date(123), 600);
|
||||
bucketInfluencer2.setAnomalyScore(42.1);
|
||||
|
||||
assertFalse(bucketInfluencer1.equals(bucketInfluencer2));
|
||||
assertFalse(bucketInfluencer2.equals(bucketInfluencer1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentFieldName() {
|
||||
BucketInfluencer bucketInfluencer1 = new BucketInfluencer("foo", new Date(123), 600);
|
||||
bucketInfluencer1.setInfluencerFieldName("foo");
|
||||
|
||||
BucketInfluencer bucketInfluencer2 = new BucketInfluencer("foo", new Date(123), 600);
|
||||
bucketInfluencer2.setInfluencerFieldName("bar");
|
||||
|
||||
assertFalse(bucketInfluencer1.equals(bucketInfluencer2));
|
||||
assertFalse(bucketInfluencer2.equals(bucketInfluencer1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentInitialAnomalyScore() {
|
||||
BucketInfluencer bucketInfluencer1 = new BucketInfluencer("foo", new Date(123), 600);
|
||||
bucketInfluencer1.setInitialAnomalyScore(42.0);
|
||||
|
||||
BucketInfluencer bucketInfluencer2 = new BucketInfluencer("foo", new Date(123), 600);
|
||||
bucketInfluencer2.setInitialAnomalyScore(42.1);
|
||||
|
||||
assertFalse(bucketInfluencer1.equals(bucketInfluencer2));
|
||||
assertFalse(bucketInfluencer2.equals(bucketInfluencer1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenRawAnomalyScore() {
|
||||
BucketInfluencer bucketInfluencer1 = new BucketInfluencer("foo", new Date(123), 600);
|
||||
bucketInfluencer1.setRawAnomalyScore(42.0);
|
||||
|
||||
BucketInfluencer bucketInfluencer2 = new BucketInfluencer("foo", new Date(123), 600);
|
||||
bucketInfluencer2.setRawAnomalyScore(42.1);
|
||||
|
||||
assertFalse(bucketInfluencer1.equals(bucketInfluencer2));
|
||||
assertFalse(bucketInfluencer2.equals(bucketInfluencer1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentProbability() {
|
||||
BucketInfluencer bucketInfluencer1 = new BucketInfluencer("foo", new Date(123), 600);
|
||||
bucketInfluencer1.setProbability(0.001);
|
||||
|
||||
BucketInfluencer bucketInfluencer2 = new BucketInfluencer("foo", new Date(123), 600);
|
||||
bucketInfluencer2.setProbability(0.002);
|
||||
|
||||
assertFalse(bucketInfluencer1.equals(bucketInfluencer2));
|
||||
assertFalse(bucketInfluencer2.equals(bucketInfluencer1));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,222 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
public class BucketTests extends AbstractXContentTestCase<Bucket> {
|
||||
|
||||
@Override
|
||||
public Bucket createTestInstance() {
|
||||
return createTestInstance("foo");
|
||||
}
|
||||
|
||||
public Bucket createTestInstance(String jobId) {
|
||||
Bucket bucket = new Bucket(jobId, new Date(randomNonNegativeLong()), randomNonNegativeLong());
|
||||
if (randomBoolean()) {
|
||||
bucket.setAnomalyScore(randomDouble());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int size = randomInt(10);
|
||||
List<BucketInfluencer> bucketInfluencers = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
BucketInfluencer bucketInfluencer = new BucketInfluencer(jobId, new Date(), 600);
|
||||
bucketInfluencer.setAnomalyScore(randomDouble());
|
||||
bucketInfluencer.setInfluencerFieldName(randomAlphaOfLengthBetween(1, 20));
|
||||
bucketInfluencer.setInitialAnomalyScore(randomDouble());
|
||||
bucketInfluencer.setProbability(randomDouble());
|
||||
bucketInfluencer.setRawAnomalyScore(randomDouble());
|
||||
bucketInfluencers.add(bucketInfluencer);
|
||||
}
|
||||
bucket.setBucketInfluencers(bucketInfluencers);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setEventCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setInitialAnomalyScore(randomDouble());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setInterim(randomBoolean());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setProcessingTimeMs(randomLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int size = randomInt(10);
|
||||
List<AnomalyRecord> records = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
AnomalyRecord anomalyRecord = new AnomalyRecordTests().createTestInstance(jobId);
|
||||
records.add(anomalyRecord);
|
||||
}
|
||||
bucket.setRecords(records);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int size = randomInt(10);
|
||||
List<String> scheduledEvents = new ArrayList<>(size);
|
||||
IntStream.range(0, size).forEach(i -> scheduledEvents.add(randomAlphaOfLength(20)));
|
||||
bucket.setScheduledEvents(scheduledEvents);
|
||||
}
|
||||
return bucket;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Bucket doParseInstance(XContentParser parser) {
|
||||
return Bucket.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentClass() {
|
||||
Bucket bucket = new Bucket("foo", new Date(randomLong()), randomNonNegativeLong());
|
||||
assertFalse(bucket.equals("a string"));
|
||||
}
|
||||
|
||||
public void testEquals_GivenTwoDefaultBuckets() {
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
|
||||
assertTrue(bucket1.equals(bucket2));
|
||||
assertTrue(bucket2.equals(bucket1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentAnomalyScore() {
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
bucket1.setAnomalyScore(3.0);
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
bucket2.setAnomalyScore(2.0);
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
assertFalse(bucket2.equals(bucket1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenSameDates() {
|
||||
Bucket b1 = new Bucket("foo", new Date(1234567890L), 123);
|
||||
Bucket b2 = new Bucket("foo", new Date(1234567890L), 123);
|
||||
assertTrue(b1.equals(b2));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentEventCount() {
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
bucket1.setEventCount(3);
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
bucket2.setEventCount(100);
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
assertFalse(bucket2.equals(bucket1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenOneHasRecordsAndTheOtherDoesNot() {
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
bucket1.setRecords(Collections.singletonList(new AnomalyRecord("foo", new Date(123), 123)));
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
bucket2.setRecords(Collections.emptyList());
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
assertFalse(bucket2.equals(bucket1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentNumberOfRecords() {
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
bucket1.setRecords(Collections.singletonList(new AnomalyRecord("foo", new Date(123), 123)));
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
bucket2.setRecords(Arrays.asList(new AnomalyRecord("foo", new Date(123), 123),
|
||||
new AnomalyRecord("foo", new Date(123), 123)));
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
assertFalse(bucket2.equals(bucket1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenSameNumberOfRecordsButDifferent() {
|
||||
AnomalyRecord anomalyRecord1 = new AnomalyRecord("foo", new Date(123), 123);
|
||||
anomalyRecord1.setRecordScore(1.0);
|
||||
AnomalyRecord anomalyRecord2 = new AnomalyRecord("foo", new Date(123), 123);
|
||||
anomalyRecord1.setRecordScore(2.0);
|
||||
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
bucket1.setRecords(Collections.singletonList(anomalyRecord1));
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
bucket2.setRecords(Collections.singletonList(anomalyRecord2));
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
assertFalse(bucket2.equals(bucket1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentIsInterim() {
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
bucket1.setInterim(true);
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
bucket2.setInterim(false);
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
assertFalse(bucket2.equals(bucket1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentBucketInfluencers() {
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
BucketInfluencer influencer1 = new BucketInfluencer("foo", new Date(123), 123);
|
||||
influencer1.setInfluencerFieldName("foo");
|
||||
bucket1.setBucketInfluencers(Collections.singletonList(influencer1));
|
||||
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
BucketInfluencer influencer2 = new BucketInfluencer("foo", new Date(123), 123);
|
||||
influencer2.setInfluencerFieldName("bar");
|
||||
bucket2.setBucketInfluencers(Collections.singletonList(influencer2));
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
assertFalse(bucket2.equals(bucket1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenEqualBuckets() {
|
||||
AnomalyRecord record = new AnomalyRecord("job_id", new Date(123), 123);
|
||||
BucketInfluencer bucketInfluencer = new BucketInfluencer("foo", new Date(123), 123);
|
||||
Date date = new Date();
|
||||
|
||||
Bucket bucket1 = new Bucket("foo", date, 123);
|
||||
bucket1.setAnomalyScore(42.0);
|
||||
bucket1.setInitialAnomalyScore(92.0);
|
||||
bucket1.setEventCount(134);
|
||||
bucket1.setInterim(true);
|
||||
bucket1.setRecords(Collections.singletonList(record));
|
||||
bucket1.setBucketInfluencers(Collections.singletonList(bucketInfluencer));
|
||||
|
||||
Bucket bucket2 = new Bucket("foo", date, 123);
|
||||
bucket2.setAnomalyScore(42.0);
|
||||
bucket2.setInitialAnomalyScore(92.0);
|
||||
bucket2.setEventCount(134);
|
||||
bucket2.setInterim(true);
|
||||
bucket2.setRecords(Collections.singletonList(record));
|
||||
bucket2.setBucketInfluencers(Collections.singletonList(bucketInfluencer));
|
||||
|
||||
assertTrue(bucket1.equals(bucket2));
|
||||
assertTrue(bucket2.equals(bucket1));
|
||||
assertEquals(bucket1.hashCode(), bucket2.hashCode());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,132 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
public class CategoryDefinitionTests extends AbstractXContentTestCase<CategoryDefinition> {
|
||||
|
||||
public CategoryDefinition createTestInstance(String jobId) {
|
||||
CategoryDefinition categoryDefinition = new CategoryDefinition(jobId);
|
||||
categoryDefinition.setCategoryId(randomLong());
|
||||
categoryDefinition.setTerms(randomAlphaOfLength(10));
|
||||
categoryDefinition.setRegex(randomAlphaOfLength(10));
|
||||
categoryDefinition.setMaxMatchingLength(randomLong());
|
||||
categoryDefinition.setExamples(Arrays.asList(generateRandomStringArray(10, 10, false)));
|
||||
if (randomBoolean()) {
|
||||
categoryDefinition.setGrokPattern(randomAlphaOfLength(50));
|
||||
}
|
||||
return categoryDefinition;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CategoryDefinition createTestInstance() {
|
||||
return createTestInstance(randomAlphaOfLength(10));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CategoryDefinition doParseInstance(XContentParser parser) {
|
||||
return CategoryDefinition.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
public void testEquals_GivenSameObject() {
|
||||
CategoryDefinition category = new CategoryDefinition(randomAlphaOfLength(10));
|
||||
|
||||
assertTrue(category.equals(category));
|
||||
}
|
||||
|
||||
public void testEquals_GivenObjectOfDifferentClass() {
|
||||
CategoryDefinition category = new CategoryDefinition(randomAlphaOfLength(10));
|
||||
|
||||
assertFalse(category.equals("a string"));
|
||||
}
|
||||
|
||||
public void testEquals_GivenEqualCategoryDefinitions() {
|
||||
CategoryDefinition category1 = createFullyPopulatedCategoryDefinition();
|
||||
CategoryDefinition category2 = createFullyPopulatedCategoryDefinition();
|
||||
|
||||
assertTrue(category1.equals(category2));
|
||||
assertTrue(category2.equals(category1));
|
||||
assertEquals(category1.hashCode(), category2.hashCode());
|
||||
}
|
||||
|
||||
public void testEquals_GivenCategoryDefinitionsWithDifferentIds() {
|
||||
CategoryDefinition category1 = createFullyPopulatedCategoryDefinition();
|
||||
CategoryDefinition category2 = createFullyPopulatedCategoryDefinition();
|
||||
category2.setCategoryId(category1.getCategoryId() + 1);
|
||||
|
||||
assertFalse(category1.equals(category2));
|
||||
assertFalse(category2.equals(category1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenCategoryDefinitionsWithDifferentTerms() {
|
||||
CategoryDefinition category1 = createFullyPopulatedCategoryDefinition();
|
||||
CategoryDefinition category2 = createFullyPopulatedCategoryDefinition();
|
||||
category2.setTerms(category1.getTerms() + " additional");
|
||||
|
||||
assertFalse(category1.equals(category2));
|
||||
assertFalse(category2.equals(category1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenCategoryDefinitionsWithDifferentRegex() {
|
||||
CategoryDefinition category1 = createFullyPopulatedCategoryDefinition();
|
||||
CategoryDefinition category2 = createFullyPopulatedCategoryDefinition();
|
||||
category2.setRegex(category1.getRegex() + ".*additional.*");
|
||||
|
||||
assertFalse(category1.equals(category2));
|
||||
assertFalse(category2.equals(category1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenCategoryDefinitionsWithDifferentMaxMatchingLength() {
|
||||
CategoryDefinition category1 = createFullyPopulatedCategoryDefinition();
|
||||
CategoryDefinition category2 = createFullyPopulatedCategoryDefinition();
|
||||
category2.setMaxMatchingLength(category1.getMaxMatchingLength() + 1);
|
||||
|
||||
assertFalse(category1.equals(category2));
|
||||
assertFalse(category2.equals(category1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenCategoryDefinitionsWithDifferentExamples() {
|
||||
CategoryDefinition category1 = createFullyPopulatedCategoryDefinition();
|
||||
CategoryDefinition category2 = createFullyPopulatedCategoryDefinition();
|
||||
category2.addExample("additional");
|
||||
|
||||
assertFalse(category1.equals(category2));
|
||||
assertFalse(category2.equals(category1));
|
||||
}
|
||||
|
||||
private static CategoryDefinition createFullyPopulatedCategoryDefinition() {
|
||||
CategoryDefinition category = new CategoryDefinition("jobName");
|
||||
category.setCategoryId(42);
|
||||
category.setTerms("foo bar");
|
||||
category.setRegex(".*?foo.*?bar.*");
|
||||
category.setMaxMatchingLength(120L);
|
||||
category.addExample("foo");
|
||||
category.addExample("bar");
|
||||
return category;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class InfluenceTests extends AbstractXContentTestCase<Influence> {
|
||||
|
||||
@Override
|
||||
protected Influence createTestInstance() {
|
||||
int size = randomInt(10);
|
||||
List<String> fieldValues = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
fieldValues.add(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
return new Influence(randomAlphaOfLengthBetween(1, 30), fieldValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Influence doParseInstance(XContentParser parser) {
|
||||
return Influence.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Date;
|
||||
|
||||
public class InfluencerTests extends AbstractXContentTestCase<Influencer> {
|
||||
|
||||
public Influencer createTestInstance(String jobId) {
|
||||
Influencer influencer = new Influencer(jobId, randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20),
|
||||
new Date(randomNonNegativeLong()), randomNonNegativeLong());
|
||||
influencer.setInterim(randomBoolean());
|
||||
influencer.setInfluencerScore(randomDouble());
|
||||
influencer.setInitialInfluencerScore(randomDouble());
|
||||
influencer.setProbability(randomDouble());
|
||||
return influencer;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Influencer createTestInstance() {
|
||||
return createTestInstance(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Influencer doParseInstance(XContentParser parser) {
|
||||
return Influencer.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
public void testToXContentDoesNotIncludeNameValueFieldWhenReservedWord() throws IOException {
|
||||
Influencer influencer = new Influencer("foo", Influencer.INFLUENCER_SCORE.getPreferredName(), "bar", new Date(), 300L);
|
||||
BytesReference bytes = XContentHelper.toXContent(influencer, XContentType.JSON, false);
|
||||
XContentParser parser = createParser(XContentType.JSON.xContent(), bytes);
|
||||
Object serialisedFieldValue = parser.map().get(Influencer.INFLUENCER_SCORE.getPreferredName());
|
||||
assertNotNull(serialisedFieldValue);
|
||||
assertNotEquals("bar", serialisedFieldValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.ml.job.results;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
|
||||
public class OverallBucketTests extends AbstractXContentTestCase<OverallBucket> {
|
||||
|
||||
@Override
|
||||
protected OverallBucket createTestInstance() {
|
||||
int jobCount = randomIntBetween(0, 10);
|
||||
List<OverallBucket.JobInfo> jobs = new ArrayList<>(jobCount);
|
||||
for (int i = 0; i < jobCount; ++i) {
|
||||
jobs.add(new OverallBucket.JobInfo(randomAlphaOfLength(10), randomDoubleBetween(0.0, 100.0, true)));
|
||||
}
|
||||
OverallBucket overallBucket = new OverallBucket(new Date(randomNonNegativeLong()),
|
||||
randomIntBetween(60, 24 * 3600),
|
||||
randomDoubleBetween(0.0, 100.0, true),
|
||||
randomBoolean());
|
||||
overallBucket.setJobs(jobs);
|
||||
return overallBucket;
|
||||
}
|
||||
|
||||
public void testCompareTo() {
|
||||
OverallBucket.JobInfo jobInfo1 = new OverallBucket.JobInfo("aaa", 1.0);
|
||||
OverallBucket.JobInfo jobInfo2 = new OverallBucket.JobInfo("aaa", 3.0);
|
||||
OverallBucket.JobInfo jobInfo3 = new OverallBucket.JobInfo("bbb", 1.0);
|
||||
assertThat(jobInfo1.compareTo(jobInfo1), equalTo(0));
|
||||
assertThat(jobInfo1.compareTo(jobInfo2), lessThan(0));
|
||||
assertThat(jobInfo1.compareTo(jobInfo3), lessThan(0));
|
||||
assertThat(jobInfo2.compareTo(jobInfo3), lessThan(0));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected OverallBucket doParseInstance(XContentParser parser) {
|
||||
return OverallBucket.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue