[ML] Rename score fields (elastic/x-pack-elasticsearch#769)
* Rename normalized_probability to record_score * Rename anomaly_score on influencers to influencer_score * Remove record level anomaly_score * Remove bucket level max_normalized_probability Relates elastic/x-pack-elasticsearch#678 Original commit: elastic/x-pack-elasticsearch@c2c05e73d8
This commit is contained in:
parent
47e28669b3
commit
f96a40c61a
|
@ -69,7 +69,6 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
||||||
public static final ParseField START = new ParseField("start");
|
public static final ParseField START = new ParseField("start");
|
||||||
public static final ParseField END = new ParseField("end");
|
public static final ParseField END = new ParseField("end");
|
||||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
||||||
public static final ParseField MAX_NORMALIZED_PROBABILITY = new ParseField("max_normalized_probability");
|
|
||||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||||
|
|
||||||
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
|
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||||
|
@ -86,7 +85,6 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
||||||
PARSER.declareBoolean(Request::setIncludeInterim, INCLUDE_INTERIM);
|
PARSER.declareBoolean(Request::setIncludeInterim, INCLUDE_INTERIM);
|
||||||
PARSER.declareObject(Request::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
PARSER.declareObject(Request::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
||||||
PARSER.declareDouble(Request::setAnomalyScore, ANOMALY_SCORE);
|
PARSER.declareDouble(Request::setAnomalyScore, ANOMALY_SCORE);
|
||||||
PARSER.declareDouble(Request::setMaxNormalizedProbability, MAX_NORMALIZED_PROBABILITY);
|
|
||||||
PARSER.declareString(Request::setPartitionValue, PARTITION_VALUE);
|
PARSER.declareString(Request::setPartitionValue, PARTITION_VALUE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -107,7 +105,6 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
||||||
private String end;
|
private String end;
|
||||||
private PageParams pageParams;
|
private PageParams pageParams;
|
||||||
private Double anomalyScore;
|
private Double anomalyScore;
|
||||||
private Double maxNormalizedProbability;
|
|
||||||
|
|
||||||
Request() {
|
Request() {
|
||||||
}
|
}
|
||||||
|
@ -121,14 +118,13 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setTimestamp(String timestamp) {
|
public void setTimestamp(String timestamp) {
|
||||||
if (pageParams != null || start != null || end != null || anomalyScore != null || maxNormalizedProbability != null) {
|
if (pageParams != null || start != null || end != null || anomalyScore != null) {
|
||||||
throw new IllegalArgumentException("Param [" + TIMESTAMP.getPreferredName() + "] is incompatible with ["
|
throw new IllegalArgumentException("Param [" + TIMESTAMP.getPreferredName() + "] is incompatible with ["
|
||||||
+ PageParams.FROM.getPreferredName() + ","
|
+ PageParams.FROM.getPreferredName() + ","
|
||||||
+ PageParams.SIZE.getPreferredName() + ","
|
+ PageParams.SIZE.getPreferredName() + ","
|
||||||
+ START.getPreferredName() + ","
|
+ START.getPreferredName() + ","
|
||||||
+ END.getPreferredName() + ","
|
+ END.getPreferredName() + ","
|
||||||
+ ANOMALY_SCORE.getPreferredName() + ","
|
+ ANOMALY_SCORE.getPreferredName() + "]");
|
||||||
+ MAX_NORMALIZED_PROBABILITY.getPreferredName() + "]");
|
|
||||||
}
|
}
|
||||||
this.timestamp = ExceptionsHelper.requireNonNull(timestamp, Result.TIMESTAMP.getPreferredName());
|
this.timestamp = ExceptionsHelper.requireNonNull(timestamp, Result.TIMESTAMP.getPreferredName());
|
||||||
}
|
}
|
||||||
|
@ -213,18 +209,6 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
||||||
this.anomalyScore = anomalyScore;
|
this.anomalyScore = anomalyScore;
|
||||||
}
|
}
|
||||||
|
|
||||||
public double getMaxNormalizedProbability() {
|
|
||||||
return maxNormalizedProbability;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setMaxNormalizedProbability(double maxNormalizedProbability) {
|
|
||||||
if (timestamp != null) {
|
|
||||||
throw new IllegalArgumentException("Param [" + MAX_NORMALIZED_PROBABILITY.getPreferredName() + "] is incompatible with ["
|
|
||||||
+ TIMESTAMP.getPreferredName() + "].");
|
|
||||||
}
|
|
||||||
this.maxNormalizedProbability = maxNormalizedProbability;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ActionRequestValidationException validate() {
|
public ActionRequestValidationException validate() {
|
||||||
return null;
|
return null;
|
||||||
|
@ -241,7 +225,6 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
||||||
start = in.readOptionalString();
|
start = in.readOptionalString();
|
||||||
end = in.readOptionalString();
|
end = in.readOptionalString();
|
||||||
anomalyScore = in.readOptionalDouble();
|
anomalyScore = in.readOptionalDouble();
|
||||||
maxNormalizedProbability = in.readOptionalDouble();
|
|
||||||
pageParams = in.readOptionalWriteable(PageParams::new);
|
pageParams = in.readOptionalWriteable(PageParams::new);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -256,7 +239,6 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
||||||
out.writeOptionalString(start);
|
out.writeOptionalString(start);
|
||||||
out.writeOptionalString(end);
|
out.writeOptionalString(end);
|
||||||
out.writeOptionalDouble(anomalyScore);
|
out.writeOptionalDouble(anomalyScore);
|
||||||
out.writeOptionalDouble(maxNormalizedProbability);
|
|
||||||
out.writeOptionalWriteable(pageParams);
|
out.writeOptionalWriteable(pageParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -284,9 +266,6 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
||||||
if (anomalyScore != null) {
|
if (anomalyScore != null) {
|
||||||
builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore);
|
builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore);
|
||||||
}
|
}
|
||||||
if (maxNormalizedProbability != null) {
|
|
||||||
builder.field(MAX_NORMALIZED_PROBABILITY.getPreferredName(), maxNormalizedProbability);
|
|
||||||
}
|
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
@ -294,7 +273,7 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(jobId, timestamp, partitionValue, expand, includeInterim,
|
return Objects.hash(jobId, timestamp, partitionValue, expand, includeInterim,
|
||||||
anomalyScore, maxNormalizedProbability, pageParams, start, end);
|
anomalyScore, pageParams, start, end);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -312,7 +291,6 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
||||||
Objects.equals(expand, other.expand) &&
|
Objects.equals(expand, other.expand) &&
|
||||||
Objects.equals(includeInterim, other.includeInterim) &&
|
Objects.equals(includeInterim, other.includeInterim) &&
|
||||||
Objects.equals(anomalyScore, other.anomalyScore) &&
|
Objects.equals(anomalyScore, other.anomalyScore) &&
|
||||||
Objects.equals(maxNormalizedProbability, other.maxNormalizedProbability) &&
|
|
||||||
Objects.equals(pageParams, other.pageParams) &&
|
Objects.equals(pageParams, other.pageParams) &&
|
||||||
Objects.equals(start, other.start) &&
|
Objects.equals(start, other.start) &&
|
||||||
Objects.equals(end, other.end);
|
Objects.equals(end, other.end);
|
||||||
|
@ -410,7 +388,6 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
||||||
.start(request.start)
|
.start(request.start)
|
||||||
.end(request.end)
|
.end(request.end)
|
||||||
.anomalyScoreThreshold(request.anomalyScore)
|
.anomalyScoreThreshold(request.anomalyScore)
|
||||||
.normalizedProbabilityThreshold(request.maxNormalizedProbability)
|
|
||||||
.partitionValue(request.partitionValue);
|
.partitionValue(request.partitionValue);
|
||||||
|
|
||||||
if (request.pageParams != null) {
|
if (request.pageParams != null) {
|
||||||
|
|
|
@ -96,7 +96,7 @@ extends Action<GetInfluencersAction.Request, GetInfluencersAction.Response, GetI
|
||||||
private boolean includeInterim = false;
|
private boolean includeInterim = false;
|
||||||
private PageParams pageParams = new PageParams();
|
private PageParams pageParams = new PageParams();
|
||||||
private double anomalyScoreFilter = 0.0;
|
private double anomalyScoreFilter = 0.0;
|
||||||
private String sort = Influencer.ANOMALY_SCORE.getPreferredName();
|
private String sort = Influencer.INFLUENCER_SCORE.getPreferredName();
|
||||||
private boolean decending = false;
|
private boolean decending = false;
|
||||||
|
|
||||||
Request() {
|
Request() {
|
||||||
|
|
|
@ -66,10 +66,9 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
||||||
public static final ParseField START = new ParseField("start");
|
public static final ParseField START = new ParseField("start");
|
||||||
public static final ParseField END = new ParseField("end");
|
public static final ParseField END = new ParseField("end");
|
||||||
public static final ParseField INCLUDE_INTERIM = new ParseField("include_interim");
|
public static final ParseField INCLUDE_INTERIM = new ParseField("include_interim");
|
||||||
public static final ParseField ANOMALY_SCORE_FILTER = new ParseField("anomaly_score");
|
public static final ParseField RECORD_SCORE_FILTER = new ParseField("record_score");
|
||||||
public static final ParseField SORT = new ParseField("sort");
|
public static final ParseField SORT = new ParseField("sort");
|
||||||
public static final ParseField DESCENDING = new ParseField("desc");
|
public static final ParseField DESCENDING = new ParseField("desc");
|
||||||
public static final ParseField MAX_NORMALIZED_PROBABILITY = new ParseField("normalized_probability");
|
|
||||||
public static final ParseField PARTITION_VALUE = new ParseField("partition_value");
|
public static final ParseField PARTITION_VALUE = new ParseField("partition_value");
|
||||||
|
|
||||||
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
|
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||||
|
@ -83,8 +82,7 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
||||||
PARSER.declareBoolean(Request::setDecending, DESCENDING);
|
PARSER.declareBoolean(Request::setDecending, DESCENDING);
|
||||||
PARSER.declareBoolean(Request::setIncludeInterim, INCLUDE_INTERIM);
|
PARSER.declareBoolean(Request::setIncludeInterim, INCLUDE_INTERIM);
|
||||||
PARSER.declareObject(Request::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
PARSER.declareObject(Request::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
||||||
PARSER.declareDouble(Request::setAnomalyScore, ANOMALY_SCORE_FILTER);
|
PARSER.declareDouble(Request::setRecordScore, RECORD_SCORE_FILTER);
|
||||||
PARSER.declareDouble(Request::setMaxNormalizedProbability, MAX_NORMALIZED_PROBABILITY);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Request parseRequest(String jobId, XContentParser parser) {
|
public static Request parseRequest(String jobId, XContentParser parser) {
|
||||||
|
@ -100,10 +98,9 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
||||||
private String end;
|
private String end;
|
||||||
private boolean includeInterim = false;
|
private boolean includeInterim = false;
|
||||||
private PageParams pageParams = new PageParams();
|
private PageParams pageParams = new PageParams();
|
||||||
private double anomalyScoreFilter = 0.0;
|
private double recordScoreFilter = 0.0;
|
||||||
private String sort = Influencer.ANOMALY_SCORE.getPreferredName();
|
private String sort = Influencer.INFLUENCER_SCORE.getPreferredName();
|
||||||
private boolean decending = false;
|
private boolean decending = false;
|
||||||
private double maxNormalizedProbability = 0.0;
|
|
||||||
private String partitionValue;
|
private String partitionValue;
|
||||||
|
|
||||||
Request() {
|
Request() {
|
||||||
|
@ -156,12 +153,12 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
||||||
return pageParams;
|
return pageParams;
|
||||||
}
|
}
|
||||||
|
|
||||||
public double getAnomalyScoreFilter() {
|
public double getRecordScoreFilter() {
|
||||||
return anomalyScoreFilter;
|
return recordScoreFilter;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setAnomalyScore(double anomalyScoreFilter) {
|
public void setRecordScore(double recordScoreFilter) {
|
||||||
this.anomalyScoreFilter = anomalyScoreFilter;
|
this.recordScoreFilter = recordScoreFilter;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getSort() {
|
public String getSort() {
|
||||||
|
@ -172,14 +169,6 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
||||||
this.sort = ExceptionsHelper.requireNonNull(sort, SORT.getPreferredName());
|
this.sort = ExceptionsHelper.requireNonNull(sort, SORT.getPreferredName());
|
||||||
}
|
}
|
||||||
|
|
||||||
public double getMaxNormalizedProbability() {
|
|
||||||
return maxNormalizedProbability;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setMaxNormalizedProbability(double maxNormalizedProbability) {
|
|
||||||
this.maxNormalizedProbability = maxNormalizedProbability;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getPartitionValue() {
|
public String getPartitionValue() {
|
||||||
return partitionValue;
|
return partitionValue;
|
||||||
}
|
}
|
||||||
|
@ -203,8 +192,7 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
||||||
end = in.readOptionalString();
|
end = in.readOptionalString();
|
||||||
sort = in.readOptionalString();
|
sort = in.readOptionalString();
|
||||||
decending = in.readBoolean();
|
decending = in.readBoolean();
|
||||||
anomalyScoreFilter = in.readDouble();
|
recordScoreFilter = in.readDouble();
|
||||||
maxNormalizedProbability = in.readDouble();
|
|
||||||
partitionValue = in.readOptionalString();
|
partitionValue = in.readOptionalString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -218,8 +206,7 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
||||||
out.writeOptionalString(end);
|
out.writeOptionalString(end);
|
||||||
out.writeOptionalString(sort);
|
out.writeOptionalString(sort);
|
||||||
out.writeBoolean(decending);
|
out.writeBoolean(decending);
|
||||||
out.writeDouble(anomalyScoreFilter);
|
out.writeDouble(recordScoreFilter);
|
||||||
out.writeDouble(maxNormalizedProbability);
|
|
||||||
out.writeOptionalString(partitionValue);
|
out.writeOptionalString(partitionValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -231,9 +218,8 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
||||||
builder.field(END.getPreferredName(), end);
|
builder.field(END.getPreferredName(), end);
|
||||||
builder.field(SORT.getPreferredName(), sort);
|
builder.field(SORT.getPreferredName(), sort);
|
||||||
builder.field(DESCENDING.getPreferredName(), decending);
|
builder.field(DESCENDING.getPreferredName(), decending);
|
||||||
builder.field(ANOMALY_SCORE_FILTER.getPreferredName(), anomalyScoreFilter);
|
builder.field(RECORD_SCORE_FILTER.getPreferredName(), recordScoreFilter);
|
||||||
builder.field(INCLUDE_INTERIM.getPreferredName(), includeInterim);
|
builder.field(INCLUDE_INTERIM.getPreferredName(), includeInterim);
|
||||||
builder.field(MAX_NORMALIZED_PROBABILITY.getPreferredName(), maxNormalizedProbability);
|
|
||||||
builder.field(PageParams.PAGE.getPreferredName(), pageParams);
|
builder.field(PageParams.PAGE.getPreferredName(), pageParams);
|
||||||
if (partitionValue != null) {
|
if (partitionValue != null) {
|
||||||
builder.field(PARTITION_VALUE.getPreferredName(), partitionValue);
|
builder.field(PARTITION_VALUE.getPreferredName(), partitionValue);
|
||||||
|
@ -244,7 +230,7 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(jobId, start, end, sort, decending, anomalyScoreFilter, includeInterim, maxNormalizedProbability,
|
return Objects.hash(jobId, start, end, sort, decending, recordScoreFilter, includeInterim,
|
||||||
pageParams, partitionValue);
|
pageParams, partitionValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -262,9 +248,8 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
||||||
Objects.equals(end, other.end) &&
|
Objects.equals(end, other.end) &&
|
||||||
Objects.equals(sort, other.sort) &&
|
Objects.equals(sort, other.sort) &&
|
||||||
Objects.equals(decending, other.decending) &&
|
Objects.equals(decending, other.decending) &&
|
||||||
Objects.equals(anomalyScoreFilter, other.anomalyScoreFilter) &&
|
Objects.equals(recordScoreFilter, other.recordScoreFilter) &&
|
||||||
Objects.equals(includeInterim, other.includeInterim) &&
|
Objects.equals(includeInterim, other.includeInterim) &&
|
||||||
Objects.equals(maxNormalizedProbability, other.maxNormalizedProbability) &&
|
|
||||||
Objects.equals(pageParams, other.pageParams) &&
|
Objects.equals(pageParams, other.pageParams) &&
|
||||||
Objects.equals(partitionValue, other.partitionValue);
|
Objects.equals(partitionValue, other.partitionValue);
|
||||||
}
|
}
|
||||||
|
@ -362,7 +347,7 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
||||||
.epochEnd(request.end)
|
.epochEnd(request.end)
|
||||||
.from(request.pageParams.getFrom())
|
.from(request.pageParams.getFrom())
|
||||||
.size(request.pageParams.getSize())
|
.size(request.pageParams.getSize())
|
||||||
.anomalyScoreThreshold(request.anomalyScoreFilter)
|
.recordScore(request.recordScoreFilter)
|
||||||
.sortField(request.sort)
|
.sortField(request.sort)
|
||||||
.sortDescending(request.decending)
|
.sortDescending(request.decending)
|
||||||
.build();
|
.build();
|
||||||
|
|
|
@ -22,8 +22,6 @@ import java.util.Objects;
|
||||||
* <li>IncludeInterim- Include interim results. Default = false</li>
|
* <li>IncludeInterim- Include interim results. Default = false</li>
|
||||||
* <li>anomalyScoreThreshold- Return only buckets with an anomalyScore >=
|
* <li>anomalyScoreThreshold- Return only buckets with an anomalyScore >=
|
||||||
* this value. Default = 0.0</li>
|
* this value. Default = 0.0</li>
|
||||||
* <li>normalizedProbabilityThreshold- Return only buckets with a
|
|
||||||
* maxNormalizedProbability >= this value. Default = 0.0</li>
|
|
||||||
* <li>start- The start bucket time. A bucket with this timestamp will be
|
* <li>start- The start bucket time. A bucket with this timestamp will be
|
||||||
* included in the results. If 0 all buckets up to <code>endEpochMs</code> are
|
* included in the results. If 0 all buckets up to <code>endEpochMs</code> are
|
||||||
* returned. Default = -1</li>
|
* returned. Default = -1</li>
|
||||||
|
@ -66,13 +64,6 @@ public final class BucketsQueryBuilder {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public BucketsQueryBuilder normalizedProbabilityThreshold(Double normalizedProbability) {
|
|
||||||
if (normalizedProbability != null) {
|
|
||||||
bucketsQuery.normalizedProbability = normalizedProbability;
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param partitionValue Not set if null or empty
|
* @param partitionValue Not set if null or empty
|
||||||
*/
|
*/
|
||||||
|
@ -133,8 +124,7 @@ public final class BucketsQueryBuilder {
|
||||||
private int size = DEFAULT_SIZE;
|
private int size = DEFAULT_SIZE;
|
||||||
private boolean expand = false;
|
private boolean expand = false;
|
||||||
private boolean includeInterim = false;
|
private boolean includeInterim = false;
|
||||||
private double anomalyScoreFilter = 0.0d;
|
private double anomalyScoreFilter = 0.0;
|
||||||
private double normalizedProbability = 0.0d;
|
|
||||||
private String start;
|
private String start;
|
||||||
private String end;
|
private String end;
|
||||||
private String timestamp;
|
private String timestamp;
|
||||||
|
@ -162,10 +152,6 @@ public final class BucketsQueryBuilder {
|
||||||
return anomalyScoreFilter;
|
return anomalyScoreFilter;
|
||||||
}
|
}
|
||||||
|
|
||||||
public double getNormalizedProbability() {
|
|
||||||
return normalizedProbability;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getStart() {
|
public String getStart() {
|
||||||
return start;
|
return start;
|
||||||
}
|
}
|
||||||
|
@ -195,7 +181,7 @@ public final class BucketsQueryBuilder {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(from, size, expand, includeInterim, anomalyScoreFilter, normalizedProbability, start, end,
|
return Objects.hash(from, size, expand, includeInterim, anomalyScoreFilter, start, end,
|
||||||
timestamp, partitionValue, sortField, sortDescending);
|
timestamp, partitionValue, sortField, sortDescending);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -221,7 +207,6 @@ public final class BucketsQueryBuilder {
|
||||||
Objects.equals(end, other.end) &&
|
Objects.equals(end, other.end) &&
|
||||||
Objects.equals(timestamp, other.timestamp) &&
|
Objects.equals(timestamp, other.timestamp) &&
|
||||||
Objects.equals(anomalyScoreFilter, other.anomalyScoreFilter) &&
|
Objects.equals(anomalyScoreFilter, other.anomalyScoreFilter) &&
|
||||||
Objects.equals(normalizedProbability, other.normalizedProbability) &&
|
|
||||||
Objects.equals(partitionValue, other.partitionValue) &&
|
Objects.equals(partitionValue, other.partitionValue) &&
|
||||||
Objects.equals(sortField, other.sortField) &&
|
Objects.equals(sortField, other.sortField) &&
|
||||||
this.sortDescending == other.sortDescending;
|
this.sortDescending == other.sortDescending;
|
||||||
|
|
|
@ -145,9 +145,6 @@ public class ElasticsearchMappings {
|
||||||
.startObject(Bucket.INITIAL_ANOMALY_SCORE.getPreferredName())
|
.startObject(Bucket.INITIAL_ANOMALY_SCORE.getPreferredName())
|
||||||
.field(TYPE, DOUBLE)
|
.field(TYPE, DOUBLE)
|
||||||
.endObject()
|
.endObject()
|
||||||
.startObject(Bucket.MAX_NORMALIZED_PROBABILITY.getPreferredName())
|
|
||||||
.field(TYPE, DOUBLE)
|
|
||||||
.endObject()
|
|
||||||
.startObject(Bucket.IS_INTERIM.getPreferredName())
|
.startObject(Bucket.IS_INTERIM.getPreferredName())
|
||||||
.field(TYPE, BOOLEAN)
|
.field(TYPE, BOOLEAN)
|
||||||
.endObject()
|
.endObject()
|
||||||
|
@ -175,9 +172,6 @@ public class ElasticsearchMappings {
|
||||||
.startObject(Bucket.INITIAL_ANOMALY_SCORE.getPreferredName())
|
.startObject(Bucket.INITIAL_ANOMALY_SCORE.getPreferredName())
|
||||||
.field(TYPE, DOUBLE)
|
.field(TYPE, DOUBLE)
|
||||||
.endObject()
|
.endObject()
|
||||||
.startObject(AnomalyRecord.ANOMALY_SCORE.getPreferredName())
|
|
||||||
.field(TYPE, DOUBLE)
|
|
||||||
.endObject()
|
|
||||||
.startObject(AnomalyRecord.PROBABILITY.getPreferredName())
|
.startObject(AnomalyRecord.PROBABILITY.getPreferredName())
|
||||||
.field(TYPE, DOUBLE)
|
.field(TYPE, DOUBLE)
|
||||||
.endObject()
|
.endObject()
|
||||||
|
@ -230,7 +224,7 @@ public class ElasticsearchMappings {
|
||||||
.startObject(AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName())
|
.startObject(AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName())
|
||||||
.field(TYPE, KEYWORD)
|
.field(TYPE, KEYWORD)
|
||||||
.endObject()
|
.endObject()
|
||||||
.startObject(Bucket.MAX_NORMALIZED_PROBABILITY.getPreferredName())
|
.startObject(PerPartitionMaxProbabilities.MAX_RECORD_SCORE.getPreferredName())
|
||||||
.field(TYPE, DOUBLE)
|
.field(TYPE, DOUBLE)
|
||||||
.endObject()
|
.endObject()
|
||||||
.endObject()
|
.endObject()
|
||||||
|
@ -343,10 +337,10 @@ public class ElasticsearchMappings {
|
||||||
.field(TYPE, KEYWORD)
|
.field(TYPE, KEYWORD)
|
||||||
.field(COPY_TO, ALL_FIELD_VALUES)
|
.field(COPY_TO, ALL_FIELD_VALUES)
|
||||||
.endObject()
|
.endObject()
|
||||||
.startObject(AnomalyRecord.NORMALIZED_PROBABILITY.getPreferredName())
|
.startObject(AnomalyRecord.RECORD_SCORE.getPreferredName())
|
||||||
.field(TYPE, DOUBLE)
|
.field(TYPE, DOUBLE)
|
||||||
.endObject()
|
.endObject()
|
||||||
.startObject(AnomalyRecord.INITIAL_NORMALIZED_PROBABILITY.getPreferredName())
|
.startObject(AnomalyRecord.INITIAL_RECORD_SCORE.getPreferredName())
|
||||||
.field(TYPE, DOUBLE)
|
.field(TYPE, DOUBLE)
|
||||||
.endObject()
|
.endObject()
|
||||||
.startObject(AnomalyRecord.CAUSES.getPreferredName())
|
.startObject(AnomalyRecord.CAUSES.getPreferredName())
|
||||||
|
@ -415,7 +409,13 @@ public class ElasticsearchMappings {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static XContentBuilder addInfluencerFieldsToMapping(XContentBuilder builder) throws IOException {
|
private static XContentBuilder addInfluencerFieldsToMapping(XContentBuilder builder) throws IOException {
|
||||||
builder.startObject(Influencer.INFLUENCER_FIELD_NAME.getPreferredName())
|
builder.startObject(Influencer.INFLUENCER_SCORE.getPreferredName())
|
||||||
|
.field(TYPE, DOUBLE)
|
||||||
|
.endObject()
|
||||||
|
.startObject(Influencer.INITIAL_INFLUENCER_SCORE.getPreferredName())
|
||||||
|
.field(TYPE, DOUBLE)
|
||||||
|
.endObject()
|
||||||
|
.startObject(Influencer.INFLUENCER_FIELD_NAME.getPreferredName())
|
||||||
.field(TYPE, KEYWORD)
|
.field(TYPE, KEYWORD)
|
||||||
.endObject()
|
.endObject()
|
||||||
.startObject(Influencer.INFLUENCER_FIELD_VALUE.getPreferredName())
|
.startObject(Influencer.INFLUENCER_FIELD_VALUE.getPreferredName())
|
||||||
|
|
|
@ -25,8 +25,6 @@ import java.util.Objects;
|
||||||
* <li>end- The end influencer timestamp. Influencers up to but NOT including this
|
* <li>end- The end influencer timestamp. Influencers up to but NOT including this
|
||||||
* timestamp are returned. If 0 all influencers from <code>start</code> are
|
* timestamp are returned. If 0 all influencers from <code>start</code> are
|
||||||
* returned. Default = -1</li>
|
* returned. Default = -1</li>
|
||||||
* <li>partitionValue Set the bucket's max normalized probability to this
|
|
||||||
* partition field value's max normalized probability. Default = null</li>
|
|
||||||
* </ul>
|
* </ul>
|
||||||
*/
|
*/
|
||||||
public final class InfluencersQueryBuilder {
|
public final class InfluencersQueryBuilder {
|
||||||
|
@ -96,7 +94,7 @@ public final class InfluencersQueryBuilder {
|
||||||
private double anomalyScoreFilter = 0.0d;
|
private double anomalyScoreFilter = 0.0d;
|
||||||
private String start;
|
private String start;
|
||||||
private String end;
|
private String end;
|
||||||
private String sortField = Influencer.ANOMALY_SCORE.getPreferredName();
|
private String sortField = Influencer.INFLUENCER_SCORE.getPreferredName();
|
||||||
private boolean sortDescending = false;
|
private boolean sortDescending = false;
|
||||||
|
|
||||||
public int getFrom() {
|
public int getFrom() {
|
||||||
|
|
|
@ -99,7 +99,7 @@ public class JobProvider {
|
||||||
private static final Logger LOGGER = Loggers.getLogger(JobProvider.class);
|
private static final Logger LOGGER = Loggers.getLogger(JobProvider.class);
|
||||||
|
|
||||||
private static final List<String> SECONDARY_SORT = Arrays.asList(
|
private static final List<String> SECONDARY_SORT = Arrays.asList(
|
||||||
AnomalyRecord.ANOMALY_SCORE.getPreferredName(),
|
AnomalyRecord.RECORD_SCORE.getPreferredName(),
|
||||||
AnomalyRecord.OVER_FIELD_VALUE.getPreferredName(),
|
AnomalyRecord.OVER_FIELD_VALUE.getPreferredName(),
|
||||||
AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName(),
|
AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName(),
|
||||||
AnomalyRecord.BY_FIELD_VALUE.getPreferredName(),
|
AnomalyRecord.BY_FIELD_VALUE.getPreferredName(),
|
||||||
|
@ -330,7 +330,6 @@ public class JobProvider {
|
||||||
} else {
|
} else {
|
||||||
rfb.timeRange(Result.TIMESTAMP.getPreferredName(), query.getStart(), query.getEnd())
|
rfb.timeRange(Result.TIMESTAMP.getPreferredName(), query.getStart(), query.getEnd())
|
||||||
.score(Bucket.ANOMALY_SCORE.getPreferredName(), query.getAnomalyScoreFilter())
|
.score(Bucket.ANOMALY_SCORE.getPreferredName(), query.getAnomalyScoreFilter())
|
||||||
.score(Bucket.MAX_NORMALIZED_PROBABILITY.getPreferredName(), query.getNormalizedProbability())
|
|
||||||
.interim(Bucket.IS_INTERIM.getPreferredName(), query.isIncludeInterim());
|
.interim(Bucket.IS_INTERIM.getPreferredName(), query.isIncludeInterim());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -399,9 +398,6 @@ public class JobProvider {
|
||||||
errorHandler.accept(item2.getFailure());
|
errorHandler.accept(item2.getFailure());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
List<PerPartitionMaxProbabilities> partitionProbs =
|
|
||||||
handlePartitionMaxNormailizedProbabilitiesResponse(item2.getResponse());
|
|
||||||
mergePartitionScoresIntoBucket(partitionProbs, buckets.results(), query.getPartitionValue());
|
|
||||||
|
|
||||||
if (query.isExpand()) {
|
if (query.isExpand()) {
|
||||||
Iterator<Bucket> bucketsToExpand = buckets.results().stream()
|
Iterator<Bucket> bucketsToExpand = buckets.results().stream()
|
||||||
|
@ -433,23 +429,6 @@ public class JobProvider {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void mergePartitionScoresIntoBucket(List<PerPartitionMaxProbabilities> partitionProbs, List<Bucket> buckets, String partitionValue) {
|
|
||||||
Iterator<PerPartitionMaxProbabilities> itr = partitionProbs.iterator();
|
|
||||||
PerPartitionMaxProbabilities partitionProb = itr.hasNext() ? itr.next() : null;
|
|
||||||
for (Bucket b : buckets) {
|
|
||||||
if (partitionProb == null) {
|
|
||||||
b.setMaxNormalizedProbability(0.0);
|
|
||||||
} else {
|
|
||||||
if (partitionProb.getTimestamp().equals(b.getTimestamp())) {
|
|
||||||
b.setMaxNormalizedProbability(partitionProb.getMaxProbabilityForPartition(partitionValue));
|
|
||||||
partitionProb = itr.hasNext() ? itr.next() : null;
|
|
||||||
} else {
|
|
||||||
b.setMaxNormalizedProbability(0.0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private SearchRequest createPartitionMaxNormailizedProbabilitiesRequest(String jobId, Object epochStart, Object epochEnd,
|
private SearchRequest createPartitionMaxNormailizedProbabilitiesRequest(String jobId, Object epochStart, Object epochEnd,
|
||||||
String partitionFieldValue) {
|
String partitionFieldValue) {
|
||||||
QueryBuilder timeRangeQuery = new ResultsFilterBuilder()
|
QueryBuilder timeRangeQuery = new ResultsFilterBuilder()
|
||||||
|
@ -643,8 +622,7 @@ public class JobProvider {
|
||||||
Consumer<Exception> errorHandler, Client client) {
|
Consumer<Exception> errorHandler, Client client) {
|
||||||
QueryBuilder fb = new ResultsFilterBuilder()
|
QueryBuilder fb = new ResultsFilterBuilder()
|
||||||
.timeRange(Result.TIMESTAMP.getPreferredName(), query.getStart(), query.getEnd())
|
.timeRange(Result.TIMESTAMP.getPreferredName(), query.getStart(), query.getEnd())
|
||||||
.score(AnomalyRecord.ANOMALY_SCORE.getPreferredName(), query.getAnomalyScoreThreshold())
|
.score(AnomalyRecord.RECORD_SCORE.getPreferredName(), query.getRecordScoreThreshold())
|
||||||
.score(AnomalyRecord.NORMALIZED_PROBABILITY.getPreferredName(), query.getNormalizedProbabilityThreshold())
|
|
||||||
.interim(AnomalyRecord.IS_INTERIM.getPreferredName(), query.isIncludeInterim())
|
.interim(AnomalyRecord.IS_INTERIM.getPreferredName(), query.isIncludeInterim())
|
||||||
.term(AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName(), query.getPartitionFieldValue()).build();
|
.term(AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName(), query.getPartitionFieldValue()).build();
|
||||||
FieldSortBuilder sb = null;
|
FieldSortBuilder sb = null;
|
||||||
|
|
|
@ -17,10 +17,8 @@ package org.elasticsearch.xpack.ml.job.persistence;
|
||||||
* <li>SortField- The field to sort results by if <code>null</code> no sort is
|
* <li>SortField- The field to sort results by if <code>null</code> no sort is
|
||||||
* applied. Default = null</li>
|
* applied. Default = null</li>
|
||||||
* <li>SortDescending- Sort in descending order. Default = true</li>
|
* <li>SortDescending- Sort in descending order. Default = true</li>
|
||||||
* <li>anomalyScoreThreshold- Return only buckets with an anomalyScore >=
|
* <li>recordScoreThreshold- Return only records with a record_score >=
|
||||||
* this value. Default = 0.0</li>
|
* this value. Default = 0.0</li>
|
||||||
* <li>normalizedProbabilityThreshold. Return only buckets with a
|
|
||||||
* maxNormalizedProbability >= this value. Default = 0.0</li>
|
|
||||||
* <li>start- The start bucket time. A bucket with this timestamp will be
|
* <li>start- The start bucket time. A bucket with this timestamp will be
|
||||||
* included in the results. If 0 all buckets up to <code>endEpochMs</code> are
|
* included in the results. If 0 all buckets up to <code>endEpochMs</code> are
|
||||||
* returned. Default = -1</li>
|
* returned. Default = -1</li>
|
||||||
|
@ -70,13 +68,8 @@ public final class RecordsQueryBuilder {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public RecordsQueryBuilder anomalyScoreThreshold(double anomalyScoreFilter) {
|
public RecordsQueryBuilder recordScore(double recordScore) {
|
||||||
recordsQuery.anomalyScoreFilter = anomalyScoreFilter;
|
recordsQuery.recordScore = recordScore;
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RecordsQueryBuilder normalizedProbability(double normalizedProbability) {
|
|
||||||
recordsQuery.normalizedProbability = normalizedProbability;
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,8 +93,7 @@ public final class RecordsQueryBuilder {
|
||||||
private boolean includeInterim = false;
|
private boolean includeInterim = false;
|
||||||
private String sortField;
|
private String sortField;
|
||||||
private boolean sortDescending = true;
|
private boolean sortDescending = true;
|
||||||
private double anomalyScoreFilter = 0.0d;
|
private double recordScore = 0.0;
|
||||||
private double normalizedProbability = 0.0d;
|
|
||||||
private String partitionFieldValue;
|
private String partitionFieldValue;
|
||||||
private String start;
|
private String start;
|
||||||
private String end;
|
private String end;
|
||||||
|
@ -123,12 +115,8 @@ public final class RecordsQueryBuilder {
|
||||||
return sortDescending;
|
return sortDescending;
|
||||||
}
|
}
|
||||||
|
|
||||||
public double getAnomalyScoreThreshold() {
|
public double getRecordScoreThreshold() {
|
||||||
return anomalyScoreFilter;
|
return recordScore;
|
||||||
}
|
|
||||||
|
|
||||||
public double getNormalizedProbabilityThreshold() {
|
|
||||||
return normalizedProbability;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getPartitionFieldValue() {
|
public String getPartitionFieldValue() {
|
||||||
|
|
|
@ -140,22 +140,18 @@ public class BucketNormalizable extends Normalizable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean setMaxChildrenScore(ChildType childrenType, double maxScore) {
|
public boolean setMaxChildrenScore(ChildType childrenType, double maxScore) {
|
||||||
double oldScore = 0.0;
|
|
||||||
switch (childrenType) {
|
switch (childrenType) {
|
||||||
case BUCKET_INFLUENCER:
|
case BUCKET_INFLUENCER:
|
||||||
oldScore = bucket.getAnomalyScore();
|
double oldScore = bucket.getAnomalyScore();
|
||||||
bucket.setAnomalyScore(maxScore);
|
bucket.setAnomalyScore(maxScore);
|
||||||
break;
|
return maxScore != oldScore;
|
||||||
case RECORD:
|
case RECORD:
|
||||||
oldScore = bucket.getMaxNormalizedProbability();
|
|
||||||
bucket.setMaxNormalizedProbability(maxScore);
|
|
||||||
break;
|
|
||||||
case PARTITION_SCORE:
|
case PARTITION_SCORE:
|
||||||
break;
|
return false;
|
||||||
default:
|
default:
|
||||||
throw new IllegalArgumentException("Invalid type: " + childrenType);
|
throw new IllegalArgumentException("Invalid type: " + childrenType);
|
||||||
}
|
}
|
||||||
return maxScore != oldScore;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -61,12 +61,12 @@ class InfluencerNormalizable extends AbstractLeafNormalizable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double getNormalizedScore() {
|
public double getNormalizedScore() {
|
||||||
return influencer.getAnomalyScore();
|
return influencer.getInfluencerScore();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setNormalizedScore(double normalizedScore) {
|
public void setNormalizedScore(double normalizedScore) {
|
||||||
influencer.setAnomalyScore(normalizedScore);
|
influencer.setInfluencerScore(normalizedScore);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -62,12 +62,12 @@ public class PartitionScoreNormalizable extends AbstractLeafNormalizable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double getNormalizedScore() {
|
public double getNormalizedScore() {
|
||||||
return score.getAnomalyScore();
|
return score.getRecordScore();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setNormalizedScore(double normalizedScore) {
|
public void setNormalizedScore(double normalizedScore) {
|
||||||
score.setAnomalyScore(normalizedScore);
|
score.setRecordScore(normalizedScore);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -63,17 +63,17 @@ class RecordNormalizable extends AbstractLeafNormalizable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double getNormalizedScore() {
|
public double getNormalizedScore() {
|
||||||
return record.getNormalizedProbability();
|
return record.getRecordScore();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setNormalizedScore(double normalizedScore) {
|
public void setNormalizedScore(double normalizedScore) {
|
||||||
record.setNormalizedProbability(normalizedScore);
|
record.setRecordScore(normalizedScore);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setParentScore(double parentScore) {
|
public void setParentScore(double parentScore) {
|
||||||
record.setAnomalyScore(parentScore);
|
// nothing to do
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -76,9 +76,8 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
|
||||||
/**
|
/**
|
||||||
* Normalization
|
* Normalization
|
||||||
*/
|
*/
|
||||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
public static final ParseField RECORD_SCORE = new ParseField("record_score");
|
||||||
public static final ParseField NORMALIZED_PROBABILITY = new ParseField("normalized_probability");
|
public static final ParseField INITIAL_RECORD_SCORE = new ParseField("initial_record_score");
|
||||||
public static final ParseField INITIAL_NORMALIZED_PROBABILITY = new ParseField("initial_normalized_probability");
|
|
||||||
|
|
||||||
public static final ConstructingObjectParser<AnomalyRecord, Void> PARSER =
|
public static final ConstructingObjectParser<AnomalyRecord, Void> PARSER =
|
||||||
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, true,
|
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, true,
|
||||||
|
@ -99,9 +98,8 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
|
||||||
PARSER.declareInt(ConstructingObjectParser.constructorArg(), SEQUENCE_NUM);
|
PARSER.declareInt(ConstructingObjectParser.constructorArg(), SEQUENCE_NUM);
|
||||||
PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE);
|
PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE);
|
||||||
PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY);
|
PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY);
|
||||||
PARSER.declareDouble(AnomalyRecord::setAnomalyScore, ANOMALY_SCORE);
|
PARSER.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE);
|
||||||
PARSER.declareDouble(AnomalyRecord::setNormalizedProbability, NORMALIZED_PROBABILITY);
|
PARSER.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE);
|
||||||
PARSER.declareDouble(AnomalyRecord::setInitialNormalizedProbability, INITIAL_NORMALIZED_PROBABILITY);
|
|
||||||
PARSER.declareInt(AnomalyRecord::setDetectorIndex, DETECTOR_INDEX);
|
PARSER.declareInt(AnomalyRecord::setDetectorIndex, DETECTOR_INDEX);
|
||||||
PARSER.declareBoolean(AnomalyRecord::setInterim, IS_INTERIM);
|
PARSER.declareBoolean(AnomalyRecord::setInterim, IS_INTERIM);
|
||||||
PARSER.declareString(AnomalyRecord::setByFieldName, BY_FIELD_NAME);
|
PARSER.declareString(AnomalyRecord::setByFieldName, BY_FIELD_NAME);
|
||||||
|
@ -141,10 +139,9 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
|
||||||
private String overFieldValue;
|
private String overFieldValue;
|
||||||
private List<AnomalyCause> causes;
|
private List<AnomalyCause> causes;
|
||||||
|
|
||||||
private double anomalyScore;
|
private double recordScore;
|
||||||
private double normalizedProbability;
|
|
||||||
|
|
||||||
private double initialNormalizedProbability;
|
private double initialRecordScore;
|
||||||
|
|
||||||
private final Date timestamp;
|
private final Date timestamp;
|
||||||
private final long bucketSpan;
|
private final long bucketSpan;
|
||||||
|
@ -184,9 +181,8 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
|
||||||
if (in.readBoolean()) {
|
if (in.readBoolean()) {
|
||||||
causes = in.readList(AnomalyCause::new);
|
causes = in.readList(AnomalyCause::new);
|
||||||
}
|
}
|
||||||
anomalyScore = in.readDouble();
|
recordScore = in.readDouble();
|
||||||
normalizedProbability = in.readDouble();
|
initialRecordScore = in.readDouble();
|
||||||
initialNormalizedProbability = in.readDouble();
|
|
||||||
timestamp = new Date(in.readLong());
|
timestamp = new Date(in.readLong());
|
||||||
bucketSpan = in.readLong();
|
bucketSpan = in.readLong();
|
||||||
if (in.readBoolean()) {
|
if (in.readBoolean()) {
|
||||||
|
@ -226,9 +222,8 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
|
||||||
if (hasCauses) {
|
if (hasCauses) {
|
||||||
out.writeList(causes);
|
out.writeList(causes);
|
||||||
}
|
}
|
||||||
out.writeDouble(anomalyScore);
|
out.writeDouble(recordScore);
|
||||||
out.writeDouble(normalizedProbability);
|
out.writeDouble(initialRecordScore);
|
||||||
out.writeDouble(initialNormalizedProbability);
|
|
||||||
out.writeLong(timestamp.getTime());
|
out.writeLong(timestamp.getTime());
|
||||||
out.writeLong(bucketSpan);
|
out.writeLong(bucketSpan);
|
||||||
boolean hasInfluencers = influences != null;
|
boolean hasInfluencers = influences != null;
|
||||||
|
@ -244,9 +239,8 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
|
||||||
builder.field(Job.ID.getPreferredName(), jobId);
|
builder.field(Job.ID.getPreferredName(), jobId);
|
||||||
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
|
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
|
||||||
builder.field(PROBABILITY.getPreferredName(), probability);
|
builder.field(PROBABILITY.getPreferredName(), probability);
|
||||||
builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore);
|
builder.field(RECORD_SCORE.getPreferredName(), recordScore);
|
||||||
builder.field(NORMALIZED_PROBABILITY.getPreferredName(), normalizedProbability);
|
builder.field(INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore);
|
||||||
builder.field(INITIAL_NORMALIZED_PROBABILITY.getPreferredName(), initialNormalizedProbability);
|
|
||||||
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
||||||
builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex);
|
builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex);
|
||||||
builder.field(SEQUENCE_NUM.getPreferredName(), sequenceNum);
|
builder.field(SEQUENCE_NUM.getPreferredName(), sequenceNum);
|
||||||
|
@ -350,28 +344,20 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
|
||||||
this.detectorIndex = detectorIndex;
|
this.detectorIndex = detectorIndex;
|
||||||
}
|
}
|
||||||
|
|
||||||
public double getAnomalyScore() {
|
public double getRecordScore() {
|
||||||
return anomalyScore;
|
return recordScore;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setAnomalyScore(double anomalyScore) {
|
public void setRecordScore(double recordScore) {
|
||||||
this.anomalyScore = anomalyScore;
|
this.recordScore = recordScore;
|
||||||
}
|
}
|
||||||
|
|
||||||
public double getNormalizedProbability() {
|
public double getInitialRecordScore() {
|
||||||
return normalizedProbability;
|
return initialRecordScore;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setNormalizedProbability(double normalizedProbability) {
|
public void setInitialRecordScore(double initialRecordScore) {
|
||||||
this.normalizedProbability = normalizedProbability;
|
this.initialRecordScore = initialRecordScore;
|
||||||
}
|
|
||||||
|
|
||||||
public double getInitialNormalizedProbability() {
|
|
||||||
return initialNormalizedProbability;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setInitialNormalizedProbability(double initialNormalizedProbability) {
|
|
||||||
this.initialNormalizedProbability = initialNormalizedProbability;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Date getTimestamp() {
|
public Date getTimestamp() {
|
||||||
|
@ -523,8 +509,8 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(jobId, detectorIndex, sequenceNum, bucketSpan, probability, anomalyScore,
|
return Objects.hash(jobId, detectorIndex, sequenceNum, bucketSpan, probability,
|
||||||
normalizedProbability, initialNormalizedProbability, typical, actual,
|
recordScore, initialRecordScore, typical, actual,
|
||||||
function, functionDescription, fieldName, byFieldName, byFieldValue, correlatedByFieldValue,
|
function, functionDescription, fieldName, byFieldName, byFieldValue, correlatedByFieldValue,
|
||||||
partitionFieldName, partitionFieldValue, overFieldName, overFieldValue,
|
partitionFieldName, partitionFieldValue, overFieldName, overFieldValue,
|
||||||
timestamp, isInterim, causes, influences, jobId);
|
timestamp, isInterim, causes, influences, jobId);
|
||||||
|
@ -548,9 +534,8 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
|
||||||
&& this.sequenceNum == that.sequenceNum
|
&& this.sequenceNum == that.sequenceNum
|
||||||
&& this.bucketSpan == that.bucketSpan
|
&& this.bucketSpan == that.bucketSpan
|
||||||
&& this.probability == that.probability
|
&& this.probability == that.probability
|
||||||
&& this.anomalyScore == that.anomalyScore
|
&& this.recordScore == that.recordScore
|
||||||
&& this.normalizedProbability == that.normalizedProbability
|
&& this.initialRecordScore == that.initialRecordScore
|
||||||
&& this.initialNormalizedProbability == that.initialNormalizedProbability
|
|
||||||
&& Objects.deepEquals(this.typical, that.typical)
|
&& Objects.deepEquals(this.typical, that.typical)
|
||||||
&& Objects.deepEquals(this.actual, that.actual)
|
&& Objects.deepEquals(this.actual, that.actual)
|
||||||
&& Objects.equals(this.function, that.function)
|
&& Objects.equals(this.function, that.function)
|
||||||
|
|
|
@ -38,7 +38,6 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
||||||
|
|
||||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
||||||
public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score");
|
public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score");
|
||||||
public static final ParseField MAX_NORMALIZED_PROBABILITY = new ParseField("max_normalized_probability");
|
|
||||||
public static final ParseField IS_INTERIM = new ParseField("is_interim");
|
public static final ParseField IS_INTERIM = new ParseField("is_interim");
|
||||||
public static final ParseField RECORD_COUNT = new ParseField("record_count");
|
public static final ParseField RECORD_COUNT = new ParseField("record_count");
|
||||||
public static final ParseField EVENT_COUNT = new ParseField("event_count");
|
public static final ParseField EVENT_COUNT = new ParseField("event_count");
|
||||||
|
@ -74,7 +73,6 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
||||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
||||||
PARSER.declareDouble(Bucket::setAnomalyScore, ANOMALY_SCORE);
|
PARSER.declareDouble(Bucket::setAnomalyScore, ANOMALY_SCORE);
|
||||||
PARSER.declareDouble(Bucket::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE);
|
PARSER.declareDouble(Bucket::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE);
|
||||||
PARSER.declareDouble(Bucket::setMaxNormalizedProbability, MAX_NORMALIZED_PROBABILITY);
|
|
||||||
PARSER.declareBoolean(Bucket::setInterim, IS_INTERIM);
|
PARSER.declareBoolean(Bucket::setInterim, IS_INTERIM);
|
||||||
PARSER.declareInt(Bucket::setRecordCount, RECORD_COUNT);
|
PARSER.declareInt(Bucket::setRecordCount, RECORD_COUNT);
|
||||||
PARSER.declareLong(Bucket::setEventCount, EVENT_COUNT);
|
PARSER.declareLong(Bucket::setEventCount, EVENT_COUNT);
|
||||||
|
@ -90,7 +88,6 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
||||||
private final long bucketSpan;
|
private final long bucketSpan;
|
||||||
private double anomalyScore;
|
private double anomalyScore;
|
||||||
private double initialAnomalyScore;
|
private double initialAnomalyScore;
|
||||||
private double maxNormalizedProbability;
|
|
||||||
private int recordCount;
|
private int recordCount;
|
||||||
private List<AnomalyRecord> records = new ArrayList<>();
|
private List<AnomalyRecord> records = new ArrayList<>();
|
||||||
private long eventCount;
|
private long eventCount;
|
||||||
|
@ -112,7 +109,6 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
||||||
this.bucketSpan = other.bucketSpan;
|
this.bucketSpan = other.bucketSpan;
|
||||||
this.anomalyScore = other.anomalyScore;
|
this.anomalyScore = other.anomalyScore;
|
||||||
this.initialAnomalyScore = other.initialAnomalyScore;
|
this.initialAnomalyScore = other.initialAnomalyScore;
|
||||||
this.maxNormalizedProbability = other.maxNormalizedProbability;
|
|
||||||
this.recordCount = other.recordCount;
|
this.recordCount = other.recordCount;
|
||||||
this.records = new ArrayList<>(other.records);
|
this.records = new ArrayList<>(other.records);
|
||||||
this.eventCount = other.eventCount;
|
this.eventCount = other.eventCount;
|
||||||
|
@ -130,7 +126,6 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
||||||
anomalyScore = in.readDouble();
|
anomalyScore = in.readDouble();
|
||||||
bucketSpan = in.readLong();
|
bucketSpan = in.readLong();
|
||||||
initialAnomalyScore = in.readDouble();
|
initialAnomalyScore = in.readDouble();
|
||||||
maxNormalizedProbability = in.readDouble();
|
|
||||||
recordCount = in.readInt();
|
recordCount = in.readInt();
|
||||||
records = in.readList(AnomalyRecord::new);
|
records = in.readList(AnomalyRecord::new);
|
||||||
eventCount = in.readLong();
|
eventCount = in.readLong();
|
||||||
|
@ -148,7 +143,6 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
||||||
out.writeDouble(anomalyScore);
|
out.writeDouble(anomalyScore);
|
||||||
out.writeLong(bucketSpan);
|
out.writeLong(bucketSpan);
|
||||||
out.writeDouble(initialAnomalyScore);
|
out.writeDouble(initialAnomalyScore);
|
||||||
out.writeDouble(maxNormalizedProbability);
|
|
||||||
out.writeInt(recordCount);
|
out.writeInt(recordCount);
|
||||||
out.writeList(records);
|
out.writeList(records);
|
||||||
out.writeLong(eventCount);
|
out.writeLong(eventCount);
|
||||||
|
@ -167,7 +161,6 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
||||||
builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore);
|
builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore);
|
||||||
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
||||||
builder.field(INITIAL_ANOMALY_SCORE.getPreferredName(), initialAnomalyScore);
|
builder.field(INITIAL_ANOMALY_SCORE.getPreferredName(), initialAnomalyScore);
|
||||||
builder.field(MAX_NORMALIZED_PROBABILITY.getPreferredName(), maxNormalizedProbability);
|
|
||||||
builder.field(RECORD_COUNT.getPreferredName(), recordCount);
|
builder.field(RECORD_COUNT.getPreferredName(), recordCount);
|
||||||
if (!records.isEmpty()) {
|
if (!records.isEmpty()) {
|
||||||
builder.field(RECORDS.getPreferredName(), records);
|
builder.field(RECORDS.getPreferredName(), records);
|
||||||
|
@ -225,14 +218,6 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
||||||
this.initialAnomalyScore = initialAnomalyScore;
|
this.initialAnomalyScore = initialAnomalyScore;
|
||||||
}
|
}
|
||||||
|
|
||||||
public double getMaxNormalizedProbability() {
|
|
||||||
return maxNormalizedProbability;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setMaxNormalizedProbability(double maxNormalizedProbability) {
|
|
||||||
this.maxNormalizedProbability = maxNormalizedProbability;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getRecordCount() {
|
public int getRecordCount() {
|
||||||
return recordCount;
|
return recordCount;
|
||||||
}
|
}
|
||||||
|
@ -316,19 +301,19 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
||||||
Optional<PartitionScore> first = partitionScores.stream().filter(s -> partitionValue.equals(s.getPartitionFieldValue()))
|
Optional<PartitionScore> first = partitionScores.stream().filter(s -> partitionValue.equals(s.getPartitionFieldValue()))
|
||||||
.findFirst();
|
.findFirst();
|
||||||
|
|
||||||
return first.isPresent() ? first.get().getInitialAnomalyScore() : 0.0;
|
return first.isPresent() ? first.get().getInitialRecordScore() : 0.0;
|
||||||
}
|
}
|
||||||
|
|
||||||
public double partitionAnomalyScore(String partitionValue) {
|
public double partitionAnomalyScore(String partitionValue) {
|
||||||
Optional<PartitionScore> first = partitionScores.stream().filter(s -> partitionValue.equals(s.getPartitionFieldValue()))
|
Optional<PartitionScore> first = partitionScores.stream().filter(s -> partitionValue.equals(s.getPartitionFieldValue()))
|
||||||
.findFirst();
|
.findFirst();
|
||||||
|
|
||||||
return first.isPresent() ? first.get().getAnomalyScore() : 0.0;
|
return first.isPresent() ? first.get().getRecordScore() : 0.0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(jobId, timestamp, eventCount, initialAnomalyScore, anomalyScore, maxNormalizedProbability, recordCount, records,
|
return Objects.hash(jobId, timestamp, eventCount, initialAnomalyScore, anomalyScore, recordCount, records,
|
||||||
isInterim, bucketSpan, bucketInfluencers);
|
isInterim, bucketSpan, bucketInfluencers);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -349,8 +334,8 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
||||||
|
|
||||||
return Objects.equals(this.jobId, that.jobId) && Objects.equals(this.timestamp, that.timestamp)
|
return Objects.equals(this.jobId, that.jobId) && Objects.equals(this.timestamp, that.timestamp)
|
||||||
&& (this.eventCount == that.eventCount) && (this.bucketSpan == that.bucketSpan)
|
&& (this.eventCount == that.eventCount) && (this.bucketSpan == that.bucketSpan)
|
||||||
|
&& (this.recordCount == that.recordCount)
|
||||||
&& (this.anomalyScore == that.anomalyScore) && (this.initialAnomalyScore == that.initialAnomalyScore)
|
&& (this.anomalyScore == that.anomalyScore) && (this.initialAnomalyScore == that.initialAnomalyScore)
|
||||||
&& (this.maxNormalizedProbability == that.maxNormalizedProbability) && (this.recordCount == that.recordCount)
|
|
||||||
&& Objects.equals(this.records, that.records) && Objects.equals(this.isInterim, that.isInterim)
|
&& Objects.equals(this.records, that.records) && Objects.equals(this.isInterim, that.isInterim)
|
||||||
&& Objects.equals(this.bucketInfluencers, that.bucketInfluencers);
|
&& Objects.equals(this.bucketInfluencers, that.bucketInfluencers);
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,8 +37,8 @@ public class Influencer extends ToXContentToBytes implements Writeable {
|
||||||
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
|
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
|
||||||
public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name");
|
public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name");
|
||||||
public static final ParseField INFLUENCER_FIELD_VALUE = new ParseField("influencer_field_value");
|
public static final ParseField INFLUENCER_FIELD_VALUE = new ParseField("influencer_field_value");
|
||||||
public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score");
|
public static final ParseField INITIAL_INFLUENCER_SCORE = new ParseField("initial_influencer_score");
|
||||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
public static final ParseField INFLUENCER_SCORE = new ParseField("influencer_score");
|
||||||
|
|
||||||
// Used for QueryPage
|
// Used for QueryPage
|
||||||
public static final ParseField RESULTS_FIELD = new ParseField("influencers");
|
public static final ParseField RESULTS_FIELD = new ParseField("influencers");
|
||||||
|
@ -64,8 +64,8 @@ public class Influencer extends ToXContentToBytes implements Writeable {
|
||||||
PARSER.declareInt(ConstructingObjectParser.constructorArg(), SEQUENCE_NUM);
|
PARSER.declareInt(ConstructingObjectParser.constructorArg(), SEQUENCE_NUM);
|
||||||
PARSER.declareString((influencer, s) -> {}, Result.RESULT_TYPE);
|
PARSER.declareString((influencer, s) -> {}, Result.RESULT_TYPE);
|
||||||
PARSER.declareDouble(Influencer::setProbability, PROBABILITY);
|
PARSER.declareDouble(Influencer::setProbability, PROBABILITY);
|
||||||
PARSER.declareDouble(Influencer::setAnomalyScore, ANOMALY_SCORE);
|
PARSER.declareDouble(Influencer::setInfluencerScore, INFLUENCER_SCORE);
|
||||||
PARSER.declareDouble(Influencer::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE);
|
PARSER.declareDouble(Influencer::setInitialInfluencerScore, INITIAL_INFLUENCER_SCORE);
|
||||||
PARSER.declareBoolean(Influencer::setInterim, Bucket.IS_INTERIM);
|
PARSER.declareBoolean(Influencer::setInterim, Bucket.IS_INTERIM);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -76,8 +76,8 @@ public class Influencer extends ToXContentToBytes implements Writeable {
|
||||||
private String influenceField;
|
private String influenceField;
|
||||||
private String influenceValue;
|
private String influenceValue;
|
||||||
private double probability;
|
private double probability;
|
||||||
private double initialAnomalyScore;
|
private double initialInfluencerScore;
|
||||||
private double anomalyScore;
|
private double influencerScore;
|
||||||
private boolean isInterim;
|
private boolean isInterim;
|
||||||
|
|
||||||
public Influencer(String jobId, String fieldName, String fieldValue, Date timestamp, long bucketSpan, int sequenceNum) {
|
public Influencer(String jobId, String fieldName, String fieldValue, Date timestamp, long bucketSpan, int sequenceNum) {
|
||||||
|
@ -95,8 +95,8 @@ public class Influencer extends ToXContentToBytes implements Writeable {
|
||||||
influenceField = in.readString();
|
influenceField = in.readString();
|
||||||
influenceValue = in.readString();
|
influenceValue = in.readString();
|
||||||
probability = in.readDouble();
|
probability = in.readDouble();
|
||||||
initialAnomalyScore = in.readDouble();
|
initialInfluencerScore = in.readDouble();
|
||||||
anomalyScore = in.readDouble();
|
influencerScore = in.readDouble();
|
||||||
isInterim = in.readBoolean();
|
isInterim = in.readBoolean();
|
||||||
bucketSpan = in.readLong();
|
bucketSpan = in.readLong();
|
||||||
sequenceNum = in.readInt();
|
sequenceNum = in.readInt();
|
||||||
|
@ -109,8 +109,8 @@ public class Influencer extends ToXContentToBytes implements Writeable {
|
||||||
out.writeString(influenceField);
|
out.writeString(influenceField);
|
||||||
out.writeString(influenceValue);
|
out.writeString(influenceValue);
|
||||||
out.writeDouble(probability);
|
out.writeDouble(probability);
|
||||||
out.writeDouble(initialAnomalyScore);
|
out.writeDouble(initialInfluencerScore);
|
||||||
out.writeDouble(anomalyScore);
|
out.writeDouble(influencerScore);
|
||||||
out.writeBoolean(isInterim);
|
out.writeBoolean(isInterim);
|
||||||
out.writeLong(bucketSpan);
|
out.writeLong(bucketSpan);
|
||||||
out.writeInt(sequenceNum);
|
out.writeInt(sequenceNum);
|
||||||
|
@ -126,8 +126,8 @@ public class Influencer extends ToXContentToBytes implements Writeable {
|
||||||
if (ReservedFieldNames.isValidFieldName(influenceField)) {
|
if (ReservedFieldNames.isValidFieldName(influenceField)) {
|
||||||
builder.field(influenceField, influenceValue);
|
builder.field(influenceField, influenceValue);
|
||||||
}
|
}
|
||||||
builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore);
|
builder.field(INFLUENCER_SCORE.getPreferredName(), influencerScore);
|
||||||
builder.field(INITIAL_ANOMALY_SCORE.getPreferredName(), initialAnomalyScore);
|
builder.field(INITIAL_INFLUENCER_SCORE.getPreferredName(), initialInfluencerScore);
|
||||||
builder.field(PROBABILITY.getPreferredName(), probability);
|
builder.field(PROBABILITY.getPreferredName(), probability);
|
||||||
builder.field(SEQUENCE_NUM.getPreferredName(), sequenceNum);
|
builder.field(SEQUENCE_NUM.getPreferredName(), sequenceNum);
|
||||||
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
||||||
|
@ -165,20 +165,20 @@ public class Influencer extends ToXContentToBytes implements Writeable {
|
||||||
return influenceValue;
|
return influenceValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
public double getInitialAnomalyScore() {
|
public double getInitialInfluencerScore() {
|
||||||
return initialAnomalyScore;
|
return initialInfluencerScore;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setInitialAnomalyScore(double influenceScore) {
|
public void setInitialInfluencerScore(double score) {
|
||||||
initialAnomalyScore = influenceScore;
|
initialInfluencerScore = score;
|
||||||
}
|
}
|
||||||
|
|
||||||
public double getAnomalyScore() {
|
public double getInfluencerScore() {
|
||||||
return anomalyScore;
|
return influencerScore;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setAnomalyScore(double score) {
|
public void setInfluencerScore(double score) {
|
||||||
anomalyScore = score;
|
influencerScore = score;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isInterim() {
|
public boolean isInterim() {
|
||||||
|
@ -191,8 +191,8 @@ public class Influencer extends ToXContentToBytes implements Writeable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(jobId, timestamp, influenceField, influenceValue, initialAnomalyScore, anomalyScore, probability, isInterim,
|
return Objects.hash(jobId, timestamp, influenceField, influenceValue, initialInfluencerScore,
|
||||||
bucketSpan, sequenceNum);
|
influencerScore, probability, isInterim, bucketSpan, sequenceNum);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -213,8 +213,8 @@ public class Influencer extends ToXContentToBytes implements Writeable {
|
||||||
return Objects.equals(jobId, other.jobId) && Objects.equals(timestamp, other.timestamp)
|
return Objects.equals(jobId, other.jobId) && Objects.equals(timestamp, other.timestamp)
|
||||||
&& Objects.equals(influenceField, other.influenceField)
|
&& Objects.equals(influenceField, other.influenceField)
|
||||||
&& Objects.equals(influenceValue, other.influenceValue)
|
&& Objects.equals(influenceValue, other.influenceValue)
|
||||||
&& Double.compare(initialAnomalyScore, other.initialAnomalyScore) == 0
|
&& Double.compare(initialInfluencerScore, other.initialInfluencerScore) == 0
|
||||||
&& Double.compare(anomalyScore, other.anomalyScore) == 0 && Double.compare(probability, other.probability) == 0
|
&& Double.compare(influencerScore, other.influencerScore) == 0 && Double.compare(probability, other.probability) == 0
|
||||||
&& (isInterim == other.isInterim) && (bucketSpan == other.bucketSpan) && (sequenceNum == other.sequenceNum);
|
&& (isInterim == other.isInterim) && (bucketSpan == other.bucketSpan) && (sequenceNum == other.sequenceNum);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,8 +21,8 @@ public class PartitionScore extends ToXContentToBytes implements Writeable {
|
||||||
|
|
||||||
private final String partitionFieldValue;
|
private final String partitionFieldValue;
|
||||||
private final String partitionFieldName;
|
private final String partitionFieldName;
|
||||||
private final double initialAnomalyScore;
|
private final double initialRecordScore;
|
||||||
private double anomalyScore;
|
private double recordScore;
|
||||||
private double probability;
|
private double probability;
|
||||||
|
|
||||||
public static final ConstructingObjectParser<PartitionScore, Void> PARSER = new ConstructingObjectParser<>(
|
public static final ConstructingObjectParser<PartitionScore, Void> PARSER = new ConstructingObjectParser<>(
|
||||||
|
@ -32,24 +32,24 @@ public class PartitionScore extends ToXContentToBytes implements Writeable {
|
||||||
static {
|
static {
|
||||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARTITION_FIELD_NAME);
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARTITION_FIELD_NAME);
|
||||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARTITION_FIELD_VALUE);
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARTITION_FIELD_VALUE);
|
||||||
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), Bucket.INITIAL_ANOMALY_SCORE);
|
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AnomalyRecord.INITIAL_RECORD_SCORE);
|
||||||
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AnomalyRecord.ANOMALY_SCORE);
|
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AnomalyRecord.RECORD_SCORE);
|
||||||
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AnomalyRecord.PROBABILITY);
|
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AnomalyRecord.PROBABILITY);
|
||||||
}
|
}
|
||||||
|
|
||||||
public PartitionScore(String fieldName, String fieldValue, double initialAnomalyScore, double anomalyScore, double probability) {
|
public PartitionScore(String fieldName, String fieldValue, double initialRecordScore, double recordScore, double probability) {
|
||||||
partitionFieldName = fieldName;
|
partitionFieldName = fieldName;
|
||||||
partitionFieldValue = fieldValue;
|
partitionFieldValue = fieldValue;
|
||||||
this.initialAnomalyScore = initialAnomalyScore;
|
this.initialRecordScore = initialRecordScore;
|
||||||
this.anomalyScore = anomalyScore;
|
this.recordScore = recordScore;
|
||||||
this.probability = probability;
|
this.probability = probability;
|
||||||
}
|
}
|
||||||
|
|
||||||
public PartitionScore(StreamInput in) throws IOException {
|
public PartitionScore(StreamInput in) throws IOException {
|
||||||
partitionFieldName = in.readString();
|
partitionFieldName = in.readString();
|
||||||
partitionFieldValue = in.readString();
|
partitionFieldValue = in.readString();
|
||||||
initialAnomalyScore = in.readDouble();
|
initialRecordScore = in.readDouble();
|
||||||
anomalyScore = in.readDouble();
|
recordScore = in.readDouble();
|
||||||
probability = in.readDouble();
|
probability = in.readDouble();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,8 +57,8 @@ public class PartitionScore extends ToXContentToBytes implements Writeable {
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
out.writeString(partitionFieldName);
|
out.writeString(partitionFieldName);
|
||||||
out.writeString(partitionFieldValue);
|
out.writeString(partitionFieldValue);
|
||||||
out.writeDouble(initialAnomalyScore);
|
out.writeDouble(initialRecordScore);
|
||||||
out.writeDouble(anomalyScore);
|
out.writeDouble(recordScore);
|
||||||
out.writeDouble(probability);
|
out.writeDouble(probability);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,23 +67,23 @@ public class PartitionScore extends ToXContentToBytes implements Writeable {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.field(AnomalyRecord.PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName);
|
builder.field(AnomalyRecord.PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName);
|
||||||
builder.field(AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue);
|
builder.field(AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue);
|
||||||
builder.field(Bucket.INITIAL_ANOMALY_SCORE.getPreferredName(), initialAnomalyScore);
|
builder.field(AnomalyRecord.INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore);
|
||||||
builder.field(AnomalyRecord.ANOMALY_SCORE.getPreferredName(), anomalyScore);
|
builder.field(AnomalyRecord.RECORD_SCORE.getPreferredName(), recordScore);
|
||||||
builder.field(AnomalyRecord.PROBABILITY.getPreferredName(), probability);
|
builder.field(AnomalyRecord.PROBABILITY.getPreferredName(), probability);
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
public double getInitialAnomalyScore() {
|
public double getInitialRecordScore() {
|
||||||
return initialAnomalyScore;
|
return initialRecordScore;
|
||||||
}
|
}
|
||||||
|
|
||||||
public double getAnomalyScore() {
|
public double getRecordScore() {
|
||||||
return anomalyScore;
|
return recordScore;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setAnomalyScore(double anomalyScore) {
|
public void setRecordScore(double recordScore) {
|
||||||
this.anomalyScore = anomalyScore;
|
this.recordScore = recordScore;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getPartitionFieldName() {
|
public String getPartitionFieldName() {
|
||||||
|
@ -104,7 +104,7 @@ public class PartitionScore extends ToXContentToBytes implements Writeable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(partitionFieldName, partitionFieldValue, probability, initialAnomalyScore, anomalyScore);
|
return Objects.hash(partitionFieldName, partitionFieldValue, probability, initialRecordScore, recordScore);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -122,6 +122,6 @@ public class PartitionScore extends ToXContentToBytes implements Writeable {
|
||||||
// id is excluded from the test as it is generated by the datastore
|
// id is excluded from the test as it is generated by the datastore
|
||||||
return Objects.equals(this.partitionFieldValue, that.partitionFieldValue)
|
return Objects.equals(this.partitionFieldValue, that.partitionFieldValue)
|
||||||
&& Objects.equals(this.partitionFieldName, that.partitionFieldName) && (this.probability == that.probability)
|
&& Objects.equals(this.partitionFieldName, that.partitionFieldName) && (this.probability == that.probability)
|
||||||
&& (this.initialAnomalyScore == that.initialAnomalyScore) && (this.anomalyScore == that.anomalyScore);
|
&& (this.initialRecordScore == that.initialRecordScore) && (this.recordScore == that.recordScore);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,6 +43,7 @@ public class PerPartitionMaxProbabilities extends ToXContentToBytes implements W
|
||||||
* Field Names
|
* Field Names
|
||||||
*/
|
*/
|
||||||
public static final ParseField PER_PARTITION_MAX_PROBABILITIES = new ParseField("per_partition_max_probabilities");
|
public static final ParseField PER_PARTITION_MAX_PROBABILITIES = new ParseField("per_partition_max_probabilities");
|
||||||
|
public static final ParseField MAX_RECORD_SCORE = new ParseField("max_record_score");
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public static final ConstructingObjectParser<PerPartitionMaxProbabilities, Void> PARSER =
|
public static final ConstructingObjectParser<PerPartitionMaxProbabilities, Void> PARSER =
|
||||||
|
@ -85,7 +86,7 @@ public class PerPartitionMaxProbabilities extends ToXContentToBytes implements W
|
||||||
this.jobId = records.get(0).getJobId();
|
this.jobId = records.get(0).getJobId();
|
||||||
this.timestamp = records.get(0).getTimestamp();
|
this.timestamp = records.get(0).getTimestamp();
|
||||||
this.bucketSpan = records.get(0).getBucketSpan();
|
this.bucketSpan = records.get(0).getBucketSpan();
|
||||||
this.perPartitionMaxProbabilities = calcMaxNormalizedProbabilityPerPartition(records);
|
this.perPartitionMaxProbabilities = calcMaxRecordScorePerPartition(records);
|
||||||
}
|
}
|
||||||
|
|
||||||
public PerPartitionMaxProbabilities(StreamInput in) throws IOException {
|
public PerPartitionMaxProbabilities(StreamInput in) throws IOException {
|
||||||
|
@ -123,7 +124,7 @@ public class PerPartitionMaxProbabilities extends ToXContentToBytes implements W
|
||||||
Optional<PartitionProbability> first =
|
Optional<PartitionProbability> first =
|
||||||
perPartitionMaxProbabilities.stream().filter(pp -> partitionValue.equals(pp.getPartitionValue())).findFirst();
|
perPartitionMaxProbabilities.stream().filter(pp -> partitionValue.equals(pp.getPartitionValue())).findFirst();
|
||||||
|
|
||||||
return first.isPresent() ? first.get().getMaxNormalizedProbability() : 0.0;
|
return first.isPresent() ? first.get().getMaxRecordScore() : 0.0;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -150,10 +151,10 @@ public class PerPartitionMaxProbabilities extends ToXContentToBytes implements W
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<PartitionProbability> calcMaxNormalizedProbabilityPerPartition(List<AnomalyRecord> anomalyRecords) {
|
private List<PartitionProbability> calcMaxRecordScorePerPartition(List<AnomalyRecord> anomalyRecords) {
|
||||||
Map<String, Double> maxValueByPartition = anomalyRecords.stream().collect(
|
Map<String, Double> maxValueByPartition = anomalyRecords.stream().collect(
|
||||||
Collectors.groupingBy(AnomalyRecord::getPartitionFieldValue,
|
Collectors.groupingBy(AnomalyRecord::getPartitionFieldValue,
|
||||||
Collector.of(DoubleMaxBox::new, (m, ar) -> m.accept(ar.getNormalizedProbability()),
|
Collector.of(DoubleMaxBox::new, (m, ar) -> m.accept(ar.getRecordScore()),
|
||||||
DoubleMaxBox::combine, DoubleMaxBox::value)));
|
DoubleMaxBox::combine, DoubleMaxBox::value)));
|
||||||
|
|
||||||
List<PartitionProbability> pProbs = new ArrayList<>();
|
List<PartitionProbability> pProbs = new ArrayList<>();
|
||||||
|
@ -200,7 +201,7 @@ public class PerPartitionMaxProbabilities extends ToXContentToBytes implements W
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class for partitionValue, maxNormalizedProb pairs
|
* Class for partitionValue, maxRecordScore pairs
|
||||||
*/
|
*/
|
||||||
public static class PartitionProbability extends ToXContentToBytes implements Writeable {
|
public static class PartitionProbability extends ToXContentToBytes implements Writeable {
|
||||||
|
|
||||||
|
@ -210,48 +211,48 @@ public class PerPartitionMaxProbabilities extends ToXContentToBytes implements W
|
||||||
|
|
||||||
static {
|
static {
|
||||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARTITION_FIELD_VALUE);
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARTITION_FIELD_VALUE);
|
||||||
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), Bucket.MAX_NORMALIZED_PROBABILITY);
|
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX_RECORD_SCORE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private final String partitionValue;
|
private final String partitionValue;
|
||||||
private final double maxNormalizedProbability;
|
private final double maxRecordScore;
|
||||||
|
|
||||||
PartitionProbability(String partitionName, double maxNormalizedProbability) {
|
PartitionProbability(String partitionName, double maxRecordScore) {
|
||||||
this.partitionValue = partitionName;
|
this.partitionValue = partitionName;
|
||||||
this.maxNormalizedProbability = maxNormalizedProbability;
|
this.maxRecordScore = maxRecordScore;
|
||||||
}
|
}
|
||||||
|
|
||||||
public PartitionProbability(StreamInput in) throws IOException {
|
public PartitionProbability(StreamInput in) throws IOException {
|
||||||
partitionValue = in.readString();
|
partitionValue = in.readString();
|
||||||
maxNormalizedProbability = in.readDouble();
|
maxRecordScore = in.readDouble();
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getPartitionValue() {
|
public String getPartitionValue() {
|
||||||
return partitionValue;
|
return partitionValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
public double getMaxNormalizedProbability() {
|
public double getMaxRecordScore() {
|
||||||
return maxNormalizedProbability;
|
return maxRecordScore;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
out.writeString(partitionValue);
|
out.writeString(partitionValue);
|
||||||
out.writeDouble(maxNormalizedProbability);
|
out.writeDouble(maxRecordScore);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject()
|
builder.startObject()
|
||||||
.field(AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName(), partitionValue)
|
.field(AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName(), partitionValue)
|
||||||
.field(Bucket.MAX_NORMALIZED_PROBABILITY.getPreferredName(), maxNormalizedProbability)
|
.field(MAX_RECORD_SCORE.getPreferredName(), maxRecordScore)
|
||||||
.endObject();
|
.endObject();
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(partitionValue, maxNormalizedProbability);
|
return Objects.hash(partitionValue, maxRecordScore);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -267,7 +268,7 @@ public class PerPartitionMaxProbabilities extends ToXContentToBytes implements W
|
||||||
PartitionProbability that = (PartitionProbability) other;
|
PartitionProbability that = (PartitionProbability) other;
|
||||||
|
|
||||||
return Objects.equals(this.partitionValue, that.partitionValue)
|
return Objects.equals(this.partitionValue, that.partitionValue)
|
||||||
&& this.maxNormalizedProbability == that.maxNormalizedProbability;
|
&& this.maxRecordScore == that.maxRecordScore;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,6 @@ import org.elasticsearch.xpack.ml.job.config.Job;
|
||||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats;
|
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats;
|
||||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot;
|
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot;
|
||||||
import org.elasticsearch.xpack.ml.job.persistence.ElasticsearchMappings;
|
import org.elasticsearch.xpack.ml.job.persistence.ElasticsearchMappings;
|
||||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.Quantiles;
|
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
@ -72,16 +71,14 @@ public final class ReservedFieldNames {
|
||||||
AnomalyRecord.OVER_FIELD_NAME.getPreferredName(),
|
AnomalyRecord.OVER_FIELD_NAME.getPreferredName(),
|
||||||
AnomalyRecord.OVER_FIELD_VALUE.getPreferredName(),
|
AnomalyRecord.OVER_FIELD_VALUE.getPreferredName(),
|
||||||
AnomalyRecord.CAUSES.getPreferredName(),
|
AnomalyRecord.CAUSES.getPreferredName(),
|
||||||
AnomalyRecord.ANOMALY_SCORE.getPreferredName(),
|
AnomalyRecord.RECORD_SCORE.getPreferredName(),
|
||||||
AnomalyRecord.NORMALIZED_PROBABILITY.getPreferredName(),
|
AnomalyRecord.INITIAL_RECORD_SCORE.getPreferredName(),
|
||||||
AnomalyRecord.INITIAL_NORMALIZED_PROBABILITY.getPreferredName(),
|
|
||||||
AnomalyRecord.BUCKET_SPAN.getPreferredName(),
|
AnomalyRecord.BUCKET_SPAN.getPreferredName(),
|
||||||
AnomalyRecord.SEQUENCE_NUM.getPreferredName(),
|
AnomalyRecord.SEQUENCE_NUM.getPreferredName(),
|
||||||
|
|
||||||
Bucket.ANOMALY_SCORE.getPreferredName(),
|
Bucket.ANOMALY_SCORE.getPreferredName(),
|
||||||
Bucket.BUCKET_INFLUENCERS.getPreferredName(),
|
Bucket.BUCKET_INFLUENCERS.getPreferredName(),
|
||||||
Bucket.BUCKET_SPAN.getPreferredName(),
|
Bucket.BUCKET_SPAN.getPreferredName(),
|
||||||
Bucket.MAX_NORMALIZED_PROBABILITY.getPreferredName(),
|
|
||||||
Bucket.IS_INTERIM.getPreferredName(),
|
Bucket.IS_INTERIM.getPreferredName(),
|
||||||
Bucket.RECORD_COUNT.getPreferredName(),
|
Bucket.RECORD_COUNT.getPreferredName(),
|
||||||
Bucket.EVENT_COUNT.getPreferredName(),
|
Bucket.EVENT_COUNT.getPreferredName(),
|
||||||
|
@ -121,8 +118,8 @@ public final class ReservedFieldNames {
|
||||||
Influencer.PROBABILITY.getPreferredName(),
|
Influencer.PROBABILITY.getPreferredName(),
|
||||||
Influencer.INFLUENCER_FIELD_NAME.getPreferredName(),
|
Influencer.INFLUENCER_FIELD_NAME.getPreferredName(),
|
||||||
Influencer.INFLUENCER_FIELD_VALUE.getPreferredName(),
|
Influencer.INFLUENCER_FIELD_VALUE.getPreferredName(),
|
||||||
Influencer.INITIAL_ANOMALY_SCORE.getPreferredName(),
|
Influencer.INITIAL_INFLUENCER_SCORE.getPreferredName(),
|
||||||
Influencer.ANOMALY_SCORE.getPreferredName(),
|
Influencer.INFLUENCER_SCORE.getPreferredName(),
|
||||||
Influencer.BUCKET_SPAN.getPreferredName(),
|
Influencer.BUCKET_SPAN.getPreferredName(),
|
||||||
Influencer.SEQUENCE_NUM.getPreferredName(),
|
Influencer.SEQUENCE_NUM.getPreferredName(),
|
||||||
|
|
||||||
|
@ -148,6 +145,7 @@ public final class ReservedFieldNames {
|
||||||
ModelSnapshot.LATEST_RESULT_TIME.getPreferredName(),
|
ModelSnapshot.LATEST_RESULT_TIME.getPreferredName(),
|
||||||
|
|
||||||
PerPartitionMaxProbabilities.PER_PARTITION_MAX_PROBABILITIES.getPreferredName(),
|
PerPartitionMaxProbabilities.PER_PARTITION_MAX_PROBABILITIES.getPreferredName(),
|
||||||
|
PerPartitionMaxProbabilities.MAX_RECORD_SCORE.getPreferredName(),
|
||||||
|
|
||||||
Result.RESULT_TYPE.getPreferredName(),
|
Result.RESULT_TYPE.getPreferredName(),
|
||||||
Result.TIMESTAMP.getPreferredName()
|
Result.TIMESTAMP.getPreferredName()
|
||||||
|
|
|
@ -71,11 +71,6 @@ public class RestGetBucketsAction extends BaseRestHandler {
|
||||||
request.setAnomalyScore(
|
request.setAnomalyScore(
|
||||||
Double.parseDouble(restRequest.param(GetBucketsAction.Request.ANOMALY_SCORE.getPreferredName(), "0.0")));
|
Double.parseDouble(restRequest.param(GetBucketsAction.Request.ANOMALY_SCORE.getPreferredName(), "0.0")));
|
||||||
}
|
}
|
||||||
if (restRequest.hasParam(GetBucketsAction.Request.MAX_NORMALIZED_PROBABILITY.getPreferredName())) {
|
|
||||||
request.setMaxNormalizedProbability(
|
|
||||||
Double.parseDouble(restRequest.param(
|
|
||||||
GetBucketsAction.Request.MAX_NORMALIZED_PROBABILITY.getPreferredName(), "0.0")));
|
|
||||||
}
|
|
||||||
if (restRequest.hasParam(GetBucketsAction.Request.PARTITION_VALUE.getPreferredName())) {
|
if (restRequest.hasParam(GetBucketsAction.Request.PARTITION_VALUE.getPreferredName())) {
|
||||||
request.setPartitionValue(restRequest.param(GetBucketsAction.Request.PARTITION_VALUE.getPreferredName()));
|
request.setPartitionValue(restRequest.param(GetBucketsAction.Request.PARTITION_VALUE.getPreferredName()));
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,7 +50,7 @@ public class RestGetInfluencersAction extends BaseRestHandler {
|
||||||
request.setAnomalyScore(
|
request.setAnomalyScore(
|
||||||
Double.parseDouble(restRequest.param(GetInfluencersAction.Request.ANOMALY_SCORE.getPreferredName(), "0.0")));
|
Double.parseDouble(restRequest.param(GetInfluencersAction.Request.ANOMALY_SCORE.getPreferredName(), "0.0")));
|
||||||
request.setSort(restRequest.param(GetInfluencersAction.Request.SORT_FIELD.getPreferredName(),
|
request.setSort(restRequest.param(GetInfluencersAction.Request.SORT_FIELD.getPreferredName(),
|
||||||
Influencer.ANOMALY_SCORE.getPreferredName()));
|
Influencer.INFLUENCER_SCORE.getPreferredName()));
|
||||||
request.setDecending(restRequest.paramAsBoolean(GetInfluencersAction.Request.DESCENDING_SORT.getPreferredName(), true));
|
request.setDecending(restRequest.paramAsBoolean(GetInfluencersAction.Request.DESCENDING_SORT.getPreferredName(), true));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -45,13 +45,11 @@ public class RestGetRecordsAction extends BaseRestHandler {
|
||||||
request.setIncludeInterim(restRequest.paramAsBoolean(GetRecordsAction.Request.INCLUDE_INTERIM.getPreferredName(), false));
|
request.setIncludeInterim(restRequest.paramAsBoolean(GetRecordsAction.Request.INCLUDE_INTERIM.getPreferredName(), false));
|
||||||
request.setPageParams(new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM),
|
request.setPageParams(new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM),
|
||||||
restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE)));
|
restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE)));
|
||||||
request.setAnomalyScore(
|
request.setRecordScore(
|
||||||
Double.parseDouble(restRequest.param(GetRecordsAction.Request.ANOMALY_SCORE_FILTER.getPreferredName(), "0.0")));
|
Double.parseDouble(restRequest.param(GetRecordsAction.Request.RECORD_SCORE_FILTER.getPreferredName(), "0.0")));
|
||||||
request.setSort(restRequest.param(GetRecordsAction.Request.SORT.getPreferredName(),
|
request.setSort(restRequest.param(GetRecordsAction.Request.SORT.getPreferredName(),
|
||||||
AnomalyRecord.NORMALIZED_PROBABILITY.getPreferredName()));
|
AnomalyRecord.RECORD_SCORE.getPreferredName()));
|
||||||
request.setDecending(restRequest.paramAsBoolean(GetRecordsAction.Request.DESCENDING.getPreferredName(), true));
|
request.setDecending(restRequest.paramAsBoolean(GetRecordsAction.Request.DESCENDING.getPreferredName(), true));
|
||||||
request.setMaxNormalizedProbability(
|
|
||||||
Double.parseDouble(restRequest.param(GetRecordsAction.Request.MAX_NORMALIZED_PROBABILITY.getPreferredName(), "0.0")));
|
|
||||||
String partitionValue = restRequest.param(GetRecordsAction.Request.PARTITION_VALUE.getPreferredName());
|
String partitionValue = restRequest.param(GetRecordsAction.Request.PARTITION_VALUE.getPreferredName());
|
||||||
if (partitionValue != null) {
|
if (partitionValue != null) {
|
||||||
request.setPartitionValue(partitionValue);
|
request.setPartitionValue(partitionValue);
|
||||||
|
|
|
@ -19,9 +19,6 @@ public class GetBucketActionRequestTests extends AbstractStreamableXContentTestC
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
request.setTimestamp(String.valueOf(randomLong()));
|
request.setTimestamp(String.valueOf(randomLong()));
|
||||||
} else {
|
} else {
|
||||||
if (randomBoolean()) {
|
|
||||||
request.setMaxNormalizedProbability(randomDouble());
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
request.setPartitionValue(randomAsciiOfLengthBetween(1, 20));
|
request.setPartitionValue(randomAsciiOfLengthBetween(1, 20));
|
||||||
}
|
}
|
||||||
|
|
|
@ -58,9 +58,6 @@ public class GetBucketActionResponseTests extends AbstractStreamableTestCase<Get
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
bucket.setInterim(randomBoolean());
|
bucket.setInterim(randomBoolean());
|
||||||
}
|
}
|
||||||
if (randomBoolean()) {
|
|
||||||
bucket.setMaxNormalizedProbability(randomDouble());
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
int size = randomInt(10);
|
int size = randomInt(10);
|
||||||
List<PartitionScore> partitionScores = new ArrayList<>(size);
|
List<PartitionScore> partitionScores = new ArrayList<>(size);
|
||||||
|
@ -89,7 +86,6 @@ public class GetBucketActionResponseTests extends AbstractStreamableTestCase<Get
|
||||||
List<AnomalyRecord> records = new ArrayList<>(size);
|
List<AnomalyRecord> records = new ArrayList<>(size);
|
||||||
for (int i = 0; i < size; i++) {
|
for (int i = 0; i < size; i++) {
|
||||||
AnomalyRecord anomalyRecord = new AnomalyRecord(jobId, new Date(randomLong()), randomNonNegativeLong(), sequenceNum++);
|
AnomalyRecord anomalyRecord = new AnomalyRecord(jobId, new Date(randomLong()), randomNonNegativeLong(), sequenceNum++);
|
||||||
anomalyRecord.setAnomalyScore(randomDouble());
|
|
||||||
anomalyRecord.setActual(Collections.singletonList(randomDouble()));
|
anomalyRecord.setActual(Collections.singletonList(randomDouble()));
|
||||||
anomalyRecord.setTypical(Collections.singletonList(randomDouble()));
|
anomalyRecord.setTypical(Collections.singletonList(randomDouble()));
|
||||||
anomalyRecord.setProbability(randomDouble());
|
anomalyRecord.setProbability(randomDouble());
|
||||||
|
|
|
@ -23,8 +23,8 @@ public class GetInfluencersActionResponseTests extends AbstractStreamableTestCas
|
||||||
for (int j = 0; j < listSize; j++) {
|
for (int j = 0; j < listSize; j++) {
|
||||||
Influencer influencer = new Influencer(randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(1, 20),
|
Influencer influencer = new Influencer(randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(1, 20),
|
||||||
randomAsciiOfLengthBetween(1, 20), new Date(randomNonNegativeLong()), randomNonNegativeLong(), j + 1);
|
randomAsciiOfLengthBetween(1, 20), new Date(randomNonNegativeLong()), randomNonNegativeLong(), j + 1);
|
||||||
influencer.setAnomalyScore(randomDouble());
|
influencer.setInfluencerScore(randomDouble());
|
||||||
influencer.setInitialAnomalyScore(randomDouble());
|
influencer.setInitialInfluencerScore(randomDouble());
|
||||||
influencer.setProbability(randomDouble());
|
influencer.setProbability(randomDouble());
|
||||||
influencer.setInterim(randomBoolean());
|
influencer.setInterim(randomBoolean());
|
||||||
hits.add(influencer);
|
hits.add(influencer);
|
||||||
|
|
|
@ -38,14 +38,11 @@ public class GetRecordsActionRequestTests extends AbstractStreamableXContentTest
|
||||||
request.setDecending(randomBoolean());
|
request.setDecending(randomBoolean());
|
||||||
}
|
}
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
request.setAnomalyScore(randomDouble());
|
request.setRecordScore(randomDouble());
|
||||||
}
|
}
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
request.setIncludeInterim(randomBoolean());
|
request.setIncludeInterim(randomBoolean());
|
||||||
}
|
}
|
||||||
if (randomBoolean()) {
|
|
||||||
request.setMaxNormalizedProbability(randomDouble());
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
int from = randomInt(PageParams.MAX_FROM_SIZE_SUM);
|
int from = randomInt(PageParams.MAX_FROM_SIZE_SUM);
|
||||||
int maxSize = PageParams.MAX_FROM_SIZE_SUM - from;
|
int maxSize = PageParams.MAX_FROM_SIZE_SUM - from;
|
||||||
|
|
|
@ -51,7 +51,7 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
"\"result_type\": { \"type\" : \"keyword\" }," +
|
"\"result_type\": { \"type\" : \"keyword\" }," +
|
||||||
"\"timestamp\": { \"type\" : \"date\" }, " +
|
"\"timestamp\": { \"type\" : \"date\" }, " +
|
||||||
"\"anomaly_score\": { \"type\" : \"double\" }, " +
|
"\"anomaly_score\": { \"type\" : \"double\" }, " +
|
||||||
"\"normalized_probability\": { \"type\" : \"double\" }, " +
|
"\"record_score\": { \"type\" : \"double\" }, " +
|
||||||
"\"over_field_value\": { \"type\" : \"keyword\" }, " +
|
"\"over_field_value\": { \"type\" : \"keyword\" }, " +
|
||||||
"\"partition_field_value\": { \"type\" : \"keyword\" }, " +
|
"\"partition_field_value\": { \"type\" : \"keyword\" }, " +
|
||||||
"\"by_field_value\": { \"type\" : \"keyword\" }, " +
|
"\"by_field_value\": { \"type\" : \"keyword\" }, " +
|
||||||
|
|
|
@ -17,7 +17,6 @@ public class BucketsQueryBuilderTests extends ESTestCase {
|
||||||
assertEquals(false, query.isIncludeInterim());
|
assertEquals(false, query.isIncludeInterim());
|
||||||
assertEquals(false, query.isExpand());
|
assertEquals(false, query.isExpand());
|
||||||
assertEquals(0.0, query.getAnomalyScoreFilter(), 0.0001);
|
assertEquals(0.0, query.getAnomalyScoreFilter(), 0.0001);
|
||||||
assertEquals(0.0, query.getNormalizedProbability(), 0.0001);
|
|
||||||
assertNull(query.getStart());
|
assertNull(query.getStart());
|
||||||
assertNull(query.getEnd());
|
assertNull(query.getEnd());
|
||||||
assertEquals("timestamp", query.getSortField());
|
assertEquals("timestamp", query.getSortField());
|
||||||
|
@ -31,7 +30,6 @@ public class BucketsQueryBuilderTests extends ESTestCase {
|
||||||
.includeInterim(true)
|
.includeInterim(true)
|
||||||
.expand(true)
|
.expand(true)
|
||||||
.anomalyScoreThreshold(50.0d)
|
.anomalyScoreThreshold(50.0d)
|
||||||
.normalizedProbabilityThreshold(70.0d)
|
|
||||||
.start("1000")
|
.start("1000")
|
||||||
.end("2000")
|
.end("2000")
|
||||||
.partitionValue("foo")
|
.partitionValue("foo")
|
||||||
|
@ -44,7 +42,6 @@ public class BucketsQueryBuilderTests extends ESTestCase {
|
||||||
assertEquals(true, query.isIncludeInterim());
|
assertEquals(true, query.isIncludeInterim());
|
||||||
assertEquals(true, query.isExpand());
|
assertEquals(true, query.isExpand());
|
||||||
assertEquals(50.0d, query.getAnomalyScoreFilter(), 0.00001);
|
assertEquals(50.0d, query.getAnomalyScoreFilter(), 0.00001);
|
||||||
assertEquals(70.0d, query.getNormalizedProbability(), 0.00001);
|
|
||||||
assertEquals("1000", query.getStart());
|
assertEquals("1000", query.getStart());
|
||||||
assertEquals("2000", query.getEnd());
|
assertEquals("2000", query.getEnd());
|
||||||
assertEquals("foo", query.getPartitionValue());
|
assertEquals("foo", query.getPartitionValue());
|
||||||
|
@ -59,7 +56,6 @@ public class BucketsQueryBuilderTests extends ESTestCase {
|
||||||
.includeInterim(true)
|
.includeInterim(true)
|
||||||
.expand(true)
|
.expand(true)
|
||||||
.anomalyScoreThreshold(50.0d)
|
.anomalyScoreThreshold(50.0d)
|
||||||
.normalizedProbabilityThreshold(70.0d)
|
|
||||||
.start("1000")
|
.start("1000")
|
||||||
.end("2000")
|
.end("2000")
|
||||||
.partitionValue("foo");
|
.partitionValue("foo");
|
||||||
|
@ -70,7 +66,6 @@ public class BucketsQueryBuilderTests extends ESTestCase {
|
||||||
.includeInterim(true)
|
.includeInterim(true)
|
||||||
.expand(true)
|
.expand(true)
|
||||||
.anomalyScoreThreshold(50.0d)
|
.anomalyScoreThreshold(50.0d)
|
||||||
.normalizedProbabilityThreshold(70.0d)
|
|
||||||
.start("1000")
|
.start("1000")
|
||||||
.end("2000")
|
.end("2000")
|
||||||
.partitionValue("foo");
|
.partitionValue("foo");
|
||||||
|
@ -85,7 +80,6 @@ public class BucketsQueryBuilderTests extends ESTestCase {
|
||||||
.includeInterim(true)
|
.includeInterim(true)
|
||||||
.expand(true)
|
.expand(true)
|
||||||
.anomalyScoreThreshold(50.0d)
|
.anomalyScoreThreshold(50.0d)
|
||||||
.normalizedProbabilityThreshold(70.0d)
|
|
||||||
.start("1000")
|
.start("1000")
|
||||||
.end("2000")
|
.end("2000")
|
||||||
.partitionValue("foo");
|
.partitionValue("foo");
|
||||||
|
@ -97,10 +91,9 @@ public class BucketsQueryBuilderTests extends ESTestCase {
|
||||||
.includeInterim(true)
|
.includeInterim(true)
|
||||||
.expand(true)
|
.expand(true)
|
||||||
.anomalyScoreThreshold(50.1d)
|
.anomalyScoreThreshold(50.1d)
|
||||||
.normalizedProbabilityThreshold(70.0d)
|
|
||||||
.start("1000")
|
.start("1000")
|
||||||
.end("2000")
|
.end("2000")
|
||||||
.partitionValue("foo");
|
.partitionValue("foo");
|
||||||
assertFalse(query.build().equals(query2.build()));
|
assertFalse(query.build().equals(query2.build()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@ public class InfluencersQueryBuilderTests extends ESTestCase {
|
||||||
assertEquals(0.0, query.getAnomalyScoreFilter(), 0.0001);
|
assertEquals(0.0, query.getAnomalyScoreFilter(), 0.0001);
|
||||||
assertNull(query.getStart());
|
assertNull(query.getStart());
|
||||||
assertNull(query.getEnd());
|
assertNull(query.getEnd());
|
||||||
assertEquals(Influencer.ANOMALY_SCORE.getPreferredName(), query.getSortField());
|
assertEquals(Influencer.INFLUENCER_SCORE.getPreferredName(), query.getSortField());
|
||||||
assertFalse(query.isSortDescending());
|
assertFalse(query.isSortDescending());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,4 +84,4 @@ public class InfluencersQueryBuilderTests extends ESTestCase {
|
||||||
.end("2000");
|
.end("2000");
|
||||||
assertFalse(query.build().equals(query2.build()));
|
assertFalse(query.build().equals(query2.build()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -297,8 +297,7 @@ public class JobProviderTests extends ESTestCase {
|
||||||
Client client = getMockedClient(queryBuilder -> {queryBuilderHolder[0] = queryBuilder;}, response);
|
Client client = getMockedClient(queryBuilder -> {queryBuilderHolder[0] = queryBuilder;}, response);
|
||||||
JobProvider provider = createProvider(client);
|
JobProvider provider = createProvider(client);
|
||||||
|
|
||||||
BucketsQueryBuilder bq = new BucketsQueryBuilder().from(from).size(size).anomalyScoreThreshold(0.0)
|
BucketsQueryBuilder bq = new BucketsQueryBuilder().from(from).size(size).anomalyScoreThreshold(1.0);
|
||||||
.normalizedProbabilityThreshold(1.0);
|
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||||
QueryPage<Bucket>[] holder = new QueryPage[1];
|
QueryPage<Bucket>[] holder = new QueryPage[1];
|
||||||
|
@ -308,7 +307,7 @@ public class JobProviderTests extends ESTestCase {
|
||||||
QueryBuilder query = queryBuilderHolder[0];
|
QueryBuilder query = queryBuilderHolder[0];
|
||||||
String queryString = query.toString();
|
String queryString = query.toString();
|
||||||
assertTrue(
|
assertTrue(
|
||||||
queryString.matches("(?s).*max_normalized_probability[^}]*from. : 1\\.0.*must_not[^}]*term[^}]*is_interim.*value. : .true" +
|
queryString.matches("(?s).*anomaly_score[^}]*from. : 1\\.0.*must_not[^}]*term[^}]*is_interim.*value. : .true" +
|
||||||
".*"));
|
".*"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -333,7 +332,7 @@ public class JobProviderTests extends ESTestCase {
|
||||||
JobProvider provider = createProvider(client);
|
JobProvider provider = createProvider(client);
|
||||||
|
|
||||||
BucketsQueryBuilder bq = new BucketsQueryBuilder().from(from).size(size).anomalyScoreThreshold(5.1)
|
BucketsQueryBuilder bq = new BucketsQueryBuilder().from(from).size(size).anomalyScoreThreshold(5.1)
|
||||||
.normalizedProbabilityThreshold(10.9).includeInterim(true);
|
.includeInterim(true);
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||||
QueryPage<Bucket>[] holder = new QueryPage[1];
|
QueryPage<Bucket>[] holder = new QueryPage[1];
|
||||||
|
@ -342,7 +341,6 @@ public class JobProviderTests extends ESTestCase {
|
||||||
assertEquals(1L, buckets.count());
|
assertEquals(1L, buckets.count());
|
||||||
QueryBuilder query = queryBuilderHolder[0];
|
QueryBuilder query = queryBuilderHolder[0];
|
||||||
String queryString = query.toString();
|
String queryString = query.toString();
|
||||||
assertTrue(queryString.matches("(?s).*max_normalized_probability[^}]*from. : 10\\.9.*"));
|
|
||||||
assertTrue(queryString.matches("(?s).*anomaly_score[^}]*from. : 5\\.1.*"));
|
assertTrue(queryString.matches("(?s).*anomaly_score[^}]*from. : 5\\.1.*"));
|
||||||
assertFalse(queryString.matches("(?s).*is_interim.*"));
|
assertFalse(queryString.matches("(?s).*is_interim.*"));
|
||||||
}
|
}
|
||||||
|
@ -371,7 +369,6 @@ public class JobProviderTests extends ESTestCase {
|
||||||
bq.from(from);
|
bq.from(from);
|
||||||
bq.size(size);
|
bq.size(size);
|
||||||
bq.anomalyScoreThreshold(5.1);
|
bq.anomalyScoreThreshold(5.1);
|
||||||
bq.normalizedProbabilityThreshold(10.9);
|
|
||||||
bq.includeInterim(true);
|
bq.includeInterim(true);
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||||
|
@ -381,7 +378,6 @@ public class JobProviderTests extends ESTestCase {
|
||||||
assertEquals(1L, buckets.count());
|
assertEquals(1L, buckets.count());
|
||||||
QueryBuilder query = queryBuilderHolder[0];
|
QueryBuilder query = queryBuilderHolder[0];
|
||||||
String queryString = query.toString();
|
String queryString = query.toString();
|
||||||
assertTrue(queryString.matches("(?s).*max_normalized_probability[^}]*from. : 10\\.9.*"));
|
|
||||||
assertTrue(queryString.matches("(?s).*anomaly_score[^}]*from. : 5\\.1.*"));
|
assertTrue(queryString.matches("(?s).*anomaly_score[^}]*from. : 5\\.1.*"));
|
||||||
assertFalse(queryString.matches("(?s).*is_interim.*"));
|
assertFalse(queryString.matches("(?s).*is_interim.*"));
|
||||||
}
|
}
|
||||||
|
@ -488,8 +484,8 @@ public class JobProviderTests extends ESTestCase {
|
||||||
JobProvider provider = createProvider(client);
|
JobProvider provider = createProvider(client);
|
||||||
|
|
||||||
RecordsQueryBuilder rqb = new RecordsQueryBuilder().from(from).size(size).epochStart(String.valueOf(now.getTime()))
|
RecordsQueryBuilder rqb = new RecordsQueryBuilder().from(from).size(size).epochStart(String.valueOf(now.getTime()))
|
||||||
.epochEnd(String.valueOf(now.getTime())).includeInterim(true).sortField(sortfield).anomalyScoreThreshold(11.1)
|
.epochEnd(String.valueOf(now.getTime())).includeInterim(true).sortField(sortfield)
|
||||||
.normalizedProbability(2.2);
|
.recordScore(2.2);
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||||
QueryPage<AnomalyRecord>[] holder = new QueryPage[1];
|
QueryPage<AnomalyRecord>[] holder = new QueryPage[1];
|
||||||
|
@ -545,8 +541,7 @@ public class JobProviderTests extends ESTestCase {
|
||||||
rqb.epochEnd(String.valueOf(now.getTime()));
|
rqb.epochEnd(String.valueOf(now.getTime()));
|
||||||
rqb.includeInterim(true);
|
rqb.includeInterim(true);
|
||||||
rqb.sortField(sortfield);
|
rqb.sortField(sortfield);
|
||||||
rqb.anomalyScoreThreshold(11.1);
|
rqb.recordScore(2.2);
|
||||||
rqb.normalizedProbability(2.2);
|
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||||
QueryPage<AnomalyRecord>[] holder = new QueryPage[1];
|
QueryPage<AnomalyRecord>[] holder = new QueryPage[1];
|
||||||
|
@ -729,27 +724,27 @@ public class JobProviderTests extends ESTestCase {
|
||||||
Date now = new Date();
|
Date now = new Date();
|
||||||
List<Map<String, Object>> source = new ArrayList<>();
|
List<Map<String, Object>> source = new ArrayList<>();
|
||||||
|
|
||||||
Map<String, Object> recordMap1 = new HashMap<>();
|
Map<String, Object> influencerMap1 = new HashMap<>();
|
||||||
recordMap1.put("job_id", "foo");
|
influencerMap1.put("job_id", "foo");
|
||||||
recordMap1.put("probability", 0.555);
|
influencerMap1.put("probability", 0.555);
|
||||||
recordMap1.put("influencer_field_name", "Builder");
|
influencerMap1.put("influencer_field_name", "Builder");
|
||||||
recordMap1.put("timestamp", now.getTime());
|
influencerMap1.put("timestamp", now.getTime());
|
||||||
recordMap1.put("influencer_field_value", "Bob");
|
influencerMap1.put("influencer_field_value", "Bob");
|
||||||
recordMap1.put("initial_anomaly_score", 22.2);
|
influencerMap1.put("initial_influencer_score", 22.2);
|
||||||
recordMap1.put("anomaly_score", 22.6);
|
influencerMap1.put("influencer_score", 22.6);
|
||||||
recordMap1.put("bucket_span", 123);
|
influencerMap1.put("bucket_span", 123);
|
||||||
recordMap1.put("sequence_num", 1);
|
influencerMap1.put("sequence_num", 1);
|
||||||
Map<String, Object> recordMap2 = new HashMap<>();
|
Map<String, Object> recordMap2 = new HashMap<>();
|
||||||
recordMap2.put("job_id", "foo");
|
recordMap2.put("job_id", "foo");
|
||||||
recordMap2.put("probability", 0.99);
|
recordMap2.put("probability", 0.99);
|
||||||
recordMap2.put("influencer_field_name", "Builder");
|
recordMap2.put("influencer_field_name", "Builder");
|
||||||
recordMap2.put("timestamp", now.getTime());
|
recordMap2.put("timestamp", now.getTime());
|
||||||
recordMap2.put("influencer_field_value", "James");
|
recordMap2.put("influencer_field_value", "James");
|
||||||
recordMap2.put("initial_anomaly_score", 5.0);
|
recordMap2.put("initial_influencer_score", 5.0);
|
||||||
recordMap2.put("anomaly_score", 5.0);
|
recordMap2.put("influencer_score", 5.0);
|
||||||
recordMap2.put("bucket_span", 123);
|
recordMap2.put("bucket_span", 123);
|
||||||
recordMap2.put("sequence_num", 2);
|
recordMap2.put("sequence_num", 2);
|
||||||
source.add(recordMap1);
|
source.add(influencerMap1);
|
||||||
source.add(recordMap2);
|
source.add(recordMap2);
|
||||||
|
|
||||||
int from = 4;
|
int from = 4;
|
||||||
|
@ -775,15 +770,15 @@ public class JobProviderTests extends ESTestCase {
|
||||||
assertEquals("Builder", records.get(0).getInfluencerFieldName());
|
assertEquals("Builder", records.get(0).getInfluencerFieldName());
|
||||||
assertEquals(now, records.get(0).getTimestamp());
|
assertEquals(now, records.get(0).getTimestamp());
|
||||||
assertEquals(0.555, records.get(0).getProbability(), 0.00001);
|
assertEquals(0.555, records.get(0).getProbability(), 0.00001);
|
||||||
assertEquals(22.6, records.get(0).getAnomalyScore(), 0.00001);
|
assertEquals(22.6, records.get(0).getInfluencerScore(), 0.00001);
|
||||||
assertEquals(22.2, records.get(0).getInitialAnomalyScore(), 0.00001);
|
assertEquals(22.2, records.get(0).getInitialInfluencerScore(), 0.00001);
|
||||||
|
|
||||||
assertEquals("James", records.get(1).getInfluencerFieldValue());
|
assertEquals("James", records.get(1).getInfluencerFieldValue());
|
||||||
assertEquals("Builder", records.get(1).getInfluencerFieldName());
|
assertEquals("Builder", records.get(1).getInfluencerFieldName());
|
||||||
assertEquals(now, records.get(1).getTimestamp());
|
assertEquals(now, records.get(1).getTimestamp());
|
||||||
assertEquals(0.99, records.get(1).getProbability(), 0.00001);
|
assertEquals(0.99, records.get(1).getProbability(), 0.00001);
|
||||||
assertEquals(5.0, records.get(1).getAnomalyScore(), 0.00001);
|
assertEquals(5.0, records.get(1).getInfluencerScore(), 0.00001);
|
||||||
assertEquals(5.0, records.get(1).getInitialAnomalyScore(), 0.00001);
|
assertEquals(5.0, records.get(1).getInitialInfluencerScore(), 0.00001);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testInfluencers_WithInterim() throws InterruptedException, ExecutionException, IOException {
|
public void testInfluencers_WithInterim() throws InterruptedException, ExecutionException, IOException {
|
||||||
|
@ -791,28 +786,28 @@ public class JobProviderTests extends ESTestCase {
|
||||||
Date now = new Date();
|
Date now = new Date();
|
||||||
List<Map<String, Object>> source = new ArrayList<>();
|
List<Map<String, Object>> source = new ArrayList<>();
|
||||||
|
|
||||||
Map<String, Object> recordMap1 = new HashMap<>();
|
Map<String, Object> influencerMap1 = new HashMap<>();
|
||||||
recordMap1.put("job_id", "foo");
|
influencerMap1.put("job_id", "foo");
|
||||||
recordMap1.put("probability", 0.555);
|
influencerMap1.put("probability", 0.555);
|
||||||
recordMap1.put("influencer_field_name", "Builder");
|
influencerMap1.put("influencer_field_name", "Builder");
|
||||||
recordMap1.put("timestamp", now.getTime());
|
influencerMap1.put("timestamp", now.getTime());
|
||||||
recordMap1.put("influencer_field_value", "Bob");
|
influencerMap1.put("influencer_field_value", "Bob");
|
||||||
recordMap1.put("initial_anomaly_score", 22.2);
|
influencerMap1.put("initial_influencer_score", 22.2);
|
||||||
recordMap1.put("anomaly_score", 22.6);
|
influencerMap1.put("influencer_score", 22.6);
|
||||||
recordMap1.put("bucket_span", 123);
|
influencerMap1.put("bucket_span", 123);
|
||||||
recordMap1.put("sequence_num", 1);
|
influencerMap1.put("sequence_num", 1);
|
||||||
Map<String, Object> recordMap2 = new HashMap<>();
|
Map<String, Object> influencerMap2 = new HashMap<>();
|
||||||
recordMap2.put("job_id", "foo");
|
influencerMap2.put("job_id", "foo");
|
||||||
recordMap2.put("probability", 0.99);
|
influencerMap2.put("probability", 0.99);
|
||||||
recordMap2.put("influencer_field_name", "Builder");
|
influencerMap2.put("influencer_field_name", "Builder");
|
||||||
recordMap2.put("timestamp", now.getTime());
|
influencerMap2.put("timestamp", now.getTime());
|
||||||
recordMap2.put("influencer_field_value", "James");
|
influencerMap2.put("influencer_field_value", "James");
|
||||||
recordMap2.put("initial_anomaly_score", 5.0);
|
influencerMap2.put("initial_influencer_score", 5.0);
|
||||||
recordMap2.put("anomaly_score", 5.0);
|
influencerMap2.put("influencer_score", 5.0);
|
||||||
recordMap2.put("bucket_span", 123);
|
influencerMap2.put("bucket_span", 123);
|
||||||
recordMap2.put("sequence_num", 2);
|
influencerMap2.put("sequence_num", 2);
|
||||||
source.add(recordMap1);
|
source.add(influencerMap1);
|
||||||
source.add(recordMap2);
|
source.add(influencerMap2);
|
||||||
|
|
||||||
int from = 4;
|
int from = 4;
|
||||||
int size = 3;
|
int size = 3;
|
||||||
|
@ -837,15 +832,15 @@ public class JobProviderTests extends ESTestCase {
|
||||||
assertEquals("Builder", records.get(0).getInfluencerFieldName());
|
assertEquals("Builder", records.get(0).getInfluencerFieldName());
|
||||||
assertEquals(now, records.get(0).getTimestamp());
|
assertEquals(now, records.get(0).getTimestamp());
|
||||||
assertEquals(0.555, records.get(0).getProbability(), 0.00001);
|
assertEquals(0.555, records.get(0).getProbability(), 0.00001);
|
||||||
assertEquals(22.6, records.get(0).getAnomalyScore(), 0.00001);
|
assertEquals(22.6, records.get(0).getInfluencerScore(), 0.00001);
|
||||||
assertEquals(22.2, records.get(0).getInitialAnomalyScore(), 0.00001);
|
assertEquals(22.2, records.get(0).getInitialInfluencerScore(), 0.00001);
|
||||||
|
|
||||||
assertEquals("James", records.get(1).getInfluencerFieldValue());
|
assertEquals("James", records.get(1).getInfluencerFieldValue());
|
||||||
assertEquals("Builder", records.get(1).getInfluencerFieldName());
|
assertEquals("Builder", records.get(1).getInfluencerFieldName());
|
||||||
assertEquals(now, records.get(1).getTimestamp());
|
assertEquals(now, records.get(1).getTimestamp());
|
||||||
assertEquals(0.99, records.get(1).getProbability(), 0.00001);
|
assertEquals(0.99, records.get(1).getProbability(), 0.00001);
|
||||||
assertEquals(5.0, records.get(1).getAnomalyScore(), 0.00001);
|
assertEquals(5.0, records.get(1).getInfluencerScore(), 0.00001);
|
||||||
assertEquals(5.0, records.get(1).getInitialAnomalyScore(), 0.00001);
|
assertEquals(5.0, records.get(1).getInitialInfluencerScore(), 0.00001);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testModelSnapshots() throws InterruptedException, ExecutionException, IOException {
|
public void testModelSnapshots() throws InterruptedException, ExecutionException, IOException {
|
||||||
|
@ -951,80 +946,13 @@ public class JobProviderTests extends ESTestCase {
|
||||||
assertTrue(queryString.matches("(?s).*snapshot_id.*value. : .snappyId.*description.*value. : .description1.*"));
|
assertTrue(queryString.matches("(?s).*snapshot_id.*value. : .snappyId.*description.*value. : .description1.*"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMergePartitionScoresIntoBucket() throws InterruptedException, ExecutionException {
|
private AnomalyRecord createAnomalyRecord(String partitionFieldValue, Date timestamp, double recordScore) {
|
||||||
MockClientBuilder clientBuilder = new MockClientBuilder(CLUSTER_NAME);
|
|
||||||
|
|
||||||
JobProvider provider = createProvider(clientBuilder.build());
|
|
||||||
|
|
||||||
List<PerPartitionMaxProbabilities> partitionMaxProbs = new ArrayList<>();
|
|
||||||
|
|
||||||
List<AnomalyRecord> records = new ArrayList<>();
|
|
||||||
records.add(createAnomalyRecord("partitionValue1", new Date(2), 1.0));
|
|
||||||
records.add(createAnomalyRecord("partitionValue2", new Date(2), 4.0));
|
|
||||||
partitionMaxProbs.add(new PerPartitionMaxProbabilities(records));
|
|
||||||
|
|
||||||
records.clear();
|
|
||||||
records.add(createAnomalyRecord("partitionValue1", new Date(3), 2.0));
|
|
||||||
records.add(createAnomalyRecord("partitionValue2", new Date(3), 1.0));
|
|
||||||
partitionMaxProbs.add(new PerPartitionMaxProbabilities(records));
|
|
||||||
|
|
||||||
records.clear();
|
|
||||||
records.add(createAnomalyRecord("partitionValue1", new Date(5), 3.0));
|
|
||||||
records.add(createAnomalyRecord("partitionValue2", new Date(5), 2.0));
|
|
||||||
partitionMaxProbs.add(new PerPartitionMaxProbabilities(records));
|
|
||||||
|
|
||||||
List<Bucket> buckets = new ArrayList<>();
|
|
||||||
buckets.add(createBucketAtEpochTime(1));
|
|
||||||
buckets.add(createBucketAtEpochTime(2));
|
|
||||||
buckets.add(createBucketAtEpochTime(3));
|
|
||||||
buckets.add(createBucketAtEpochTime(4));
|
|
||||||
buckets.add(createBucketAtEpochTime(5));
|
|
||||||
buckets.add(createBucketAtEpochTime(6));
|
|
||||||
|
|
||||||
provider.mergePartitionScoresIntoBucket(partitionMaxProbs, buckets, "partitionValue1");
|
|
||||||
assertEquals(0.0, buckets.get(0).getMaxNormalizedProbability(), 0.001);
|
|
||||||
assertEquals(1.0, buckets.get(1).getMaxNormalizedProbability(), 0.001);
|
|
||||||
assertEquals(2.0, buckets.get(2).getMaxNormalizedProbability(), 0.001);
|
|
||||||
assertEquals(0.0, buckets.get(3).getMaxNormalizedProbability(), 0.001);
|
|
||||||
assertEquals(3.0, buckets.get(4).getMaxNormalizedProbability(), 0.001);
|
|
||||||
assertEquals(0.0, buckets.get(5).getMaxNormalizedProbability(), 0.001);
|
|
||||||
|
|
||||||
provider.mergePartitionScoresIntoBucket(partitionMaxProbs, buckets, "partitionValue2");
|
|
||||||
assertEquals(0.0, buckets.get(0).getMaxNormalizedProbability(), 0.001);
|
|
||||||
assertEquals(4.0, buckets.get(1).getMaxNormalizedProbability(), 0.001);
|
|
||||||
assertEquals(1.0, buckets.get(2).getMaxNormalizedProbability(), 0.001);
|
|
||||||
assertEquals(0.0, buckets.get(3).getMaxNormalizedProbability(), 0.001);
|
|
||||||
assertEquals(2.0, buckets.get(4).getMaxNormalizedProbability(), 0.001);
|
|
||||||
assertEquals(0.0, buckets.get(5).getMaxNormalizedProbability(), 0.001);
|
|
||||||
}
|
|
||||||
|
|
||||||
private AnomalyRecord createAnomalyRecord(String partitionFieldValue, Date timestamp, double normalizedProbability) {
|
|
||||||
AnomalyRecord record = new AnomalyRecord("foo", timestamp, 600, 42);
|
AnomalyRecord record = new AnomalyRecord("foo", timestamp, 600, 42);
|
||||||
record.setPartitionFieldValue(partitionFieldValue);
|
record.setPartitionFieldValue(partitionFieldValue);
|
||||||
record.setNormalizedProbability(normalizedProbability);
|
record.setRecordScore(recordScore);
|
||||||
return record;
|
return record;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMergePartitionScoresIntoBucket_WithEmptyScoresList() throws InterruptedException, ExecutionException {
|
|
||||||
MockClientBuilder clientBuilder = new MockClientBuilder(CLUSTER_NAME);
|
|
||||||
|
|
||||||
JobProvider provider = createProvider(clientBuilder.build());
|
|
||||||
|
|
||||||
List<PerPartitionMaxProbabilities> scores = new ArrayList<>();
|
|
||||||
|
|
||||||
List<Bucket> buckets = new ArrayList<>();
|
|
||||||
buckets.add(createBucketAtEpochTime(1));
|
|
||||||
buckets.add(createBucketAtEpochTime(2));
|
|
||||||
buckets.add(createBucketAtEpochTime(3));
|
|
||||||
buckets.add(createBucketAtEpochTime(4));
|
|
||||||
|
|
||||||
provider.mergePartitionScoresIntoBucket(scores, buckets, "partitionValue");
|
|
||||||
assertEquals(0.0, buckets.get(0).getMaxNormalizedProbability(), 0.001);
|
|
||||||
assertEquals(0.0, buckets.get(1).getMaxNormalizedProbability(), 0.001);
|
|
||||||
assertEquals(0.0, buckets.get(2).getMaxNormalizedProbability(), 0.001);
|
|
||||||
assertEquals(0.0, buckets.get(3).getMaxNormalizedProbability(), 0.001);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testRestoreStateToStream() throws Exception {
|
public void testRestoreStateToStream() throws Exception {
|
||||||
Map<String, Object> categorizerState = new HashMap<>();
|
Map<String, Object> categorizerState = new HashMap<>();
|
||||||
categorizerState.put("catName", "catVal");
|
categorizerState.put("catName", "catVal");
|
||||||
|
@ -1120,7 +1048,6 @@ public class JobProviderTests extends ESTestCase {
|
||||||
|
|
||||||
private Bucket createBucketAtEpochTime(long epoch) {
|
private Bucket createBucketAtEpochTime(long epoch) {
|
||||||
Bucket b = new Bucket("foo", new Date(epoch), 123);
|
Bucket b = new Bucket("foo", new Date(epoch), 123);
|
||||||
b.setMaxNormalizedProbability(10.0);
|
|
||||||
return b;
|
return b;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -40,7 +40,6 @@ public class JobResultsPersisterTests extends ESTestCase {
|
||||||
bucket.setAnomalyScore(99.9);
|
bucket.setAnomalyScore(99.9);
|
||||||
bucket.setEventCount(57);
|
bucket.setEventCount(57);
|
||||||
bucket.setInitialAnomalyScore(88.8);
|
bucket.setInitialAnomalyScore(88.8);
|
||||||
bucket.setMaxNormalizedProbability(42.0);
|
|
||||||
bucket.setProcessingTimeMs(8888);
|
bucket.setProcessingTimeMs(8888);
|
||||||
bucket.setRecordCount(1);
|
bucket.setRecordCount(1);
|
||||||
|
|
||||||
|
@ -54,7 +53,6 @@ public class JobResultsPersisterTests extends ESTestCase {
|
||||||
|
|
||||||
// We are adding a record but it shouldn't be persisted as part of the bucket
|
// We are adding a record but it shouldn't be persisted as part of the bucket
|
||||||
AnomalyRecord record = new AnomalyRecord(JOB_ID, new Date(), 600, 2);
|
AnomalyRecord record = new AnomalyRecord(JOB_ID, new Date(), 600, 2);
|
||||||
record.setAnomalyScore(99.8);
|
|
||||||
bucket.setRecords(Arrays.asList(record));
|
bucket.setRecords(Arrays.asList(record));
|
||||||
|
|
||||||
JobResultsPersister persister = new JobResultsPersister(Settings.EMPTY, client);
|
JobResultsPersister persister = new JobResultsPersister(Settings.EMPTY, client);
|
||||||
|
@ -65,7 +63,6 @@ public class JobResultsPersisterTests extends ESTestCase {
|
||||||
String s = ((IndexRequest)bulkRequest.requests().get(0)).source().utf8ToString();
|
String s = ((IndexRequest)bulkRequest.requests().get(0)).source().utf8ToString();
|
||||||
assertTrue(s.matches(".*anomaly_score.:99\\.9.*"));
|
assertTrue(s.matches(".*anomaly_score.:99\\.9.*"));
|
||||||
assertTrue(s.matches(".*initial_anomaly_score.:88\\.8.*"));
|
assertTrue(s.matches(".*initial_anomaly_score.:88\\.8.*"));
|
||||||
assertTrue(s.matches(".*max_normalized_probability.:42\\.0.*"));
|
|
||||||
assertTrue(s.matches(".*record_count.:1.*"));
|
assertTrue(s.matches(".*record_count.:1.*"));
|
||||||
assertTrue(s.matches(".*event_count.:57.*"));
|
assertTrue(s.matches(".*event_count.:57.*"));
|
||||||
assertTrue(s.matches(".*bucket_span.:123456.*"));
|
assertTrue(s.matches(".*bucket_span.:123456.*"));
|
||||||
|
@ -92,7 +89,6 @@ public class JobResultsPersisterTests extends ESTestCase {
|
||||||
actuals.add(5.0);
|
actuals.add(5.0);
|
||||||
actuals.add(5.1);
|
actuals.add(5.1);
|
||||||
r1.setActual(actuals);
|
r1.setActual(actuals);
|
||||||
r1.setAnomalyScore(99.8);
|
|
||||||
r1.setByFieldName("byName");
|
r1.setByFieldName("byName");
|
||||||
r1.setByFieldValue("byValue");
|
r1.setByFieldValue("byValue");
|
||||||
r1.setCorrelatedByFieldValue("testCorrelations");
|
r1.setCorrelatedByFieldValue("testCorrelations");
|
||||||
|
@ -100,8 +96,8 @@ public class JobResultsPersisterTests extends ESTestCase {
|
||||||
r1.setFieldName("testFieldName");
|
r1.setFieldName("testFieldName");
|
||||||
r1.setFunction("testFunction");
|
r1.setFunction("testFunction");
|
||||||
r1.setFunctionDescription("testDescription");
|
r1.setFunctionDescription("testDescription");
|
||||||
r1.setInitialNormalizedProbability(23.4);
|
r1.setInitialRecordScore(23.4);
|
||||||
r1.setNormalizedProbability(0.005);
|
r1.setRecordScore(0.005);
|
||||||
r1.setOverFieldName("overName");
|
r1.setOverFieldName("overName");
|
||||||
r1.setOverFieldValue("overValue");
|
r1.setOverFieldValue("overValue");
|
||||||
r1.setPartitionFieldName("partName");
|
r1.setPartitionFieldName("partName");
|
||||||
|
@ -120,9 +116,8 @@ public class JobResultsPersisterTests extends ESTestCase {
|
||||||
String s = ((IndexRequest) bulkRequest.requests().get(0)).source().utf8ToString();
|
String s = ((IndexRequest) bulkRequest.requests().get(0)).source().utf8ToString();
|
||||||
assertTrue(s.matches(".*detector_index.:3.*"));
|
assertTrue(s.matches(".*detector_index.:3.*"));
|
||||||
assertTrue(s.matches(".*\"probability\":0\\.1.*"));
|
assertTrue(s.matches(".*\"probability\":0\\.1.*"));
|
||||||
assertTrue(s.matches(".*\"anomaly_score\":99\\.8.*"));
|
assertTrue(s.matches(".*\"record_score\":0\\.005.*"));
|
||||||
assertTrue(s.matches(".*\"normalized_probability\":0\\.005.*"));
|
assertTrue(s.matches(".*initial_record_score.:23.4.*"));
|
||||||
assertTrue(s.matches(".*initial_normalized_probability.:23.4.*"));
|
|
||||||
assertTrue(s.matches(".*bucket_span.:42.*"));
|
assertTrue(s.matches(".*bucket_span.:42.*"));
|
||||||
assertTrue(s.matches(".*by_field_name.:.byName.*"));
|
assertTrue(s.matches(".*by_field_name.:.byName.*"));
|
||||||
assertTrue(s.matches(".*by_field_value.:.byValue.*"));
|
assertTrue(s.matches(".*by_field_value.:.byValue.*"));
|
||||||
|
@ -144,8 +139,8 @@ public class JobResultsPersisterTests extends ESTestCase {
|
||||||
|
|
||||||
List<Influencer> influencers = new ArrayList<>();
|
List<Influencer> influencers = new ArrayList<>();
|
||||||
Influencer inf = new Influencer(JOB_ID, "infName1", "infValue1", new Date(), 600, 1);
|
Influencer inf = new Influencer(JOB_ID, "infName1", "infValue1", new Date(), 600, 1);
|
||||||
inf.setAnomalyScore(16);
|
inf.setInfluencerScore(16);
|
||||||
inf.setInitialAnomalyScore(55.5);
|
inf.setInitialInfluencerScore(55.5);
|
||||||
inf.setProbability(0.4);
|
inf.setProbability(0.4);
|
||||||
influencers.add(inf);
|
influencers.add(inf);
|
||||||
|
|
||||||
|
@ -158,8 +153,8 @@ public class JobResultsPersisterTests extends ESTestCase {
|
||||||
assertTrue(s.matches(".*probability.:0\\.4.*"));
|
assertTrue(s.matches(".*probability.:0\\.4.*"));
|
||||||
assertTrue(s.matches(".*influencer_field_name.:.infName1.*"));
|
assertTrue(s.matches(".*influencer_field_name.:.infName1.*"));
|
||||||
assertTrue(s.matches(".*influencer_field_value.:.infValue1.*"));
|
assertTrue(s.matches(".*influencer_field_value.:.infValue1.*"));
|
||||||
assertTrue(s.matches(".*initial_anomaly_score.:55\\.5.*"));
|
assertTrue(s.matches(".*initial_influencer_score.:55\\.5.*"));
|
||||||
assertTrue(s.matches(".*anomaly_score.:16\\.0.*"));
|
assertTrue(s.matches(".*influencer_score.:16\\.0.*"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||||
|
|
|
@ -30,7 +30,7 @@ public class AutodetectResultsParserTests extends ESTestCase {
|
||||||
|
|
||||||
public static final String METRIC_OUTPUT_SAMPLE = "[{\"bucket\": {\"job_id\":\"foo\",\"timestamp\":1359450000000,"
|
public static final String METRIC_OUTPUT_SAMPLE = "[{\"bucket\": {\"job_id\":\"foo\",\"timestamp\":1359450000000,"
|
||||||
+ "\"bucket_span\":22, \"records\":[],"
|
+ "\"bucket_span\":22, \"records\":[],"
|
||||||
+ "\"max_normalized_probability\":0, \"anomaly_score\":0,\"record_count\":0,\"event_count\":806,\"bucket_influencers\":["
|
+ "\"anomaly_score\":0,\"record_count\":0,\"event_count\":806,\"bucket_influencers\":["
|
||||||
+ "{\"sequence_num\":1,\"timestamp\":1359450000000,\"bucket_span\":22,\"job_id\":\"foo\",\"anomaly_score\":0,"
|
+ "{\"sequence_num\":1,\"timestamp\":1359450000000,\"bucket_span\":22,\"job_id\":\"foo\",\"anomaly_score\":0,"
|
||||||
+ "\"probability\":0.0, \"influencer_field_name\":\"bucket_time\","
|
+ "\"probability\":0.0, \"influencer_field_name\":\"bucket_time\","
|
||||||
+ "\"initial_anomaly_score\":0.0}]}},{\"quantiles\": {\"job_id\":\"foo\", \"quantile_state\":\"[normalizer 1.1, normalizer 2" +
|
+ "\"initial_anomaly_score\":0.0}]}},{\"quantiles\": {\"job_id\":\"foo\", \"quantile_state\":\"[normalizer 1.1, normalizer 2" +
|
||||||
|
@ -50,7 +50,7 @@ public class AutodetectResultsParserTests extends ESTestCase {
|
||||||
+ "\"probability\":0.0473552,\"by_field_name\":\"airline\",\"by_field_value\":\"SWA\", \"typical\":[152.148],"
|
+ "\"probability\":0.0473552,\"by_field_name\":\"airline\",\"by_field_value\":\"SWA\", \"typical\":[152.148],"
|
||||||
+ "\"actual\":[96.6425],\"field_name\":\"responsetime\",\"function\":\"min\",\"partition_field_name\":\"\","
|
+ "\"actual\":[96.6425],\"field_name\":\"responsetime\",\"function\":\"min\",\"partition_field_name\":\"\","
|
||||||
+ "\"partition_field_value\":\"\"}],"
|
+ "\"partition_field_value\":\"\"}],"
|
||||||
+ "\"initial_anomaly_score\":0.0140005, \"anomaly_score\":20.22688, \"max_normalized_probability\":10.5688, \"record_count\":4,"
|
+ "\"initial_anomaly_score\":0.0140005, \"anomaly_score\":20.22688, \"record_count\":4,"
|
||||||
+ "\"event_count\":820,\"bucket_influencers\":[{\"timestamp\":1359453600000,\"bucket_span\":22,\"sequence_num\":5,"
|
+ "\"event_count\":820,\"bucket_influencers\":[{\"timestamp\":1359453600000,\"bucket_span\":22,\"sequence_num\":5,"
|
||||||
+ "\"job_id\":\"foo\", \"raw_anomaly_score\":0.0140005, \"probability\":0.01,\"influencer_field_name\":\"bucket_time\","
|
+ "\"job_id\":\"foo\", \"raw_anomaly_score\":0.0140005, \"probability\":0.01,\"influencer_field_name\":\"bucket_time\","
|
||||||
+ "\"initial_anomaly_score\":20.22688,\"anomaly_score\":20.22688} ,{\"timestamp\":1359453600000,\"bucket_span\":22,"
|
+ "\"initial_anomaly_score\":20.22688,\"anomaly_score\":20.22688} ,{\"timestamp\":1359453600000,\"bucket_span\":22,"
|
||||||
|
@ -65,168 +65,168 @@ public class AutodetectResultsParserTests extends ESTestCase {
|
||||||
+ "\"sequence_num\":1,\"function\":\"max\","
|
+ "\"sequence_num\":1,\"function\":\"max\","
|
||||||
+ "\"causes\":[{\"probability\":1.38951e-08,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"causes\":[{\"probability\":1.38951e-08,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"mail.google.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[9.19027e+07]}],"
|
+ "\"over_field_value\":\"mail.google.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[9.19027e+07]}],"
|
||||||
+ "\"normalized_probability\":100,\"anomaly_score\":44.7324},{\"probability\":3.86587e-07,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":100,\"anomaly_score\":44.7324},{\"probability\":3.86587e-07,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"armmf.adobe.com\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"armmf.adobe.com\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":3.86587e-07,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":3.86587e-07,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"armmf.adobe.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[3.20093e+07]}],"
|
+ "\"over_field_value\":\"armmf.adobe.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[3.20093e+07]}],"
|
||||||
+ "\"normalized_probability\":89.5834,\"anomaly_score\":44.7324},{\"probability\":0.00500083,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":89.5834,\"anomaly_score\":44.7324},{\"probability\":0.00500083,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"0.docs.google.com\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"0.docs.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":0.00500083,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":0.00500083,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"0.docs.google.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[6.61812e+06]}],"
|
+ "\"over_field_value\":\"0.docs.google.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[6.61812e+06]}],"
|
||||||
+ "\"normalized_probability\":1.19856,\"anomaly_score\":44.7324},{\"probability\":0.0152333,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":1.19856,\"anomaly_score\":44.7324},{\"probability\":0.0152333,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"emea.salesforce.com\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"emea.salesforce.com\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":0.0152333,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":0.0152333,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"emea.salesforce.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[5.36373e+06]}],"
|
+ "\"over_field_value\":\"emea.salesforce.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[5.36373e+06]}],"
|
||||||
+ "\"normalized_probability\":0.303996,\"anomaly_score\":44.7324}],\"raw_anomaly_score\":1.30397,\"anomaly_score\":44.7324,"
|
+ "\"record_score\":0.303996,\"anomaly_score\":44.7324}],\"raw_anomaly_score\":1.30397,\"anomaly_score\":44.7324,"
|
||||||
+ "\"max_normalized_probability\":100,\"record_count\":4,\"event_count\":1235}" + ",{\"flush\":\"testing2\"}"
|
+ "\"record_count\":4,\"event_count\":1235}" + ",{\"flush\":\"testing2\"}"
|
||||||
+ ",{\"timestamp\":1379590800,\"records\":[{\"probability\":1.9008e-08,\"field_name\":\"sum_cs_bytes_\","
|
+ ",{\"timestamp\":1379590800,\"records\":[{\"probability\":1.9008e-08,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"mail.google.com\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"mail.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":1.9008e-08,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":1.9008e-08,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"mail.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.1498e+08]}],"
|
+ "\"over_field_value\":\"mail.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.1498e+08]}],"
|
||||||
+ "\"normalized_probability\":93.6213,\"anomaly_score\":1.19192},{\"probability\":1.01013e-06,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":93.6213,\"anomaly_score\":1.19192},{\"probability\":1.01013e-06,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"armmf.adobe.com\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"armmf.adobe.com\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":1.01013e-06,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":1.01013e-06,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"armmf.adobe.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[3.25808e+07]}],"
|
+ "\"over_field_value\":\"armmf.adobe.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[3.25808e+07]}],"
|
||||||
+ "\"normalized_probability\":86.5825,\"anomaly_score\":1.19192},{\"probability\":0.000386185,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":86.5825,\"anomaly_score\":1.19192},{\"probability\":0.000386185,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"0.docs.google.com\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"0.docs.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":0.000386185,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":0.000386185,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"0.docs.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[3.22855e+06]}],"
|
+ "\"over_field_value\":\"0.docs.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[3.22855e+06]}],"
|
||||||
+ "\"normalized_probability\":17.1179,\"anomaly_score\":1.19192},{\"probability\":0.00208033,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":17.1179,\"anomaly_score\":1.19192},{\"probability\":0.00208033,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"docs.google.com\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"docs.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":0.00208033,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":0.00208033,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"docs.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.43328e+06]}],"
|
+ "\"over_field_value\":\"docs.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.43328e+06]}],"
|
||||||
+ "\"normalized_probability\":3.0692,\"anomaly_score\":1.19192},{\"probability\":0.00312988,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":3.0692,\"anomaly_score\":1.19192},{\"probability\":0.00312988,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"booking2.airasia.com\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"booking2.airasia.com\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":0.00312988,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":0.00312988,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"booking2.airasia.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.15764e+06]}],"
|
+ "\"over_field_value\":\"booking2.airasia.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.15764e+06]}],"
|
||||||
+ "\"normalized_probability\":1.99532,\"anomaly_score\":1.19192},{\"probability\":0.00379229,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":1.99532,\"anomaly_score\":1.19192},{\"probability\":0.00379229,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.facebook.com\",\"function\":\"max\",\"causes\":["
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.facebook.com\",\"function\":\"max\",\"causes\":["
|
||||||
+ "{\"probability\":0.00379229,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "{\"probability\":0.00379229,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"www.facebook.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.0443e+06]}],"
|
+ "\"over_field_value\":\"www.facebook.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.0443e+06]}],"
|
||||||
+ "\"normalized_probability\":1.62352,\"anomaly_score\":1.19192},{\"probability\":0.00623576,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":1.62352,\"anomaly_score\":1.19192},{\"probability\":0.00623576,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.airasia.com\",\"function\":\"max\",\"causes\":["
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.airasia.com\",\"function\":\"max\",\"causes\":["
|
||||||
+ "{\"probability\":0.00623576,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "{\"probability\":0.00623576,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"www.airasia.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[792699]}],"
|
+ "\"over_field_value\":\"www.airasia.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[792699]}],"
|
||||||
+ "\"normalized_probability\":0.935134,\"anomaly_score\":1.19192},{\"probability\":0.00665308,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":0.935134,\"anomaly_score\":1.19192},{\"probability\":0.00665308,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.google.com\",\"function\":\"max\",\"causes\":["
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.google.com\",\"function\":\"max\",\"causes\":["
|
||||||
+ "{\"probability\":0.00665308,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "{\"probability\":0.00665308,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"www.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[763985]}],"
|
+ "\"over_field_value\":\"www.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[763985]}],"
|
||||||
+ "\"normalized_probability\":0.868119,\"anomaly_score\":1.19192},{\"probability\":0.00709315,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":0.868119,\"anomaly_score\":1.19192},{\"probability\":0.00709315,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"0.drive.google.com\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"0.drive.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":0.00709315,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":0.00709315,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"0.drive.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[736442]}],"
|
+ "\"over_field_value\":\"0.drive.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[736442]}],"
|
||||||
+ "\"normalized_probability\":0.805994,\"anomaly_score\":1.19192},{\"probability\":0.00755789,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":0.805994,\"anomaly_score\":1.19192},{\"probability\":0.00755789,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"resources2.news.com.au\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"resources2.news.com.au\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":0.00755789,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":0.00755789,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"resources2.news.com.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[709962]}],"
|
+ "\"over_field_value\":\"resources2.news.com.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[709962]}],"
|
||||||
+ "\"normalized_probability\":0.748239,\"anomaly_score\":1.19192},{\"probability\":0.00834974,\"field_name\":"
|
+ "\"record_score\":0.748239,\"anomaly_score\":1.19192},{\"probability\":0.00834974,\"field_name\":"
|
||||||
+ "\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.calypso.net.au\",\"function\":\"max\","
|
+ "\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.calypso.net.au\",\"function\":\"max\","
|
||||||
+ "\"causes\":[{\"probability\":0.00834974,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"causes\":[{\"probability\":0.00834974,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"www.calypso.net.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[669968]}],"
|
+ "\"over_field_value\":\"www.calypso.net.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[669968]}],"
|
||||||
+ "\"normalized_probability\":0.664644,\"anomaly_score\":1.19192},{\"probability\":0.0107711,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":0.664644,\"anomaly_score\":1.19192},{\"probability\":0.0107711,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"ad.yieldmanager.com\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"ad.yieldmanager.com\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":0.0107711,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":0.0107711,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"ad.yieldmanager.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[576067]}],"
|
+ "\"over_field_value\":\"ad.yieldmanager.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[576067]}],"
|
||||||
+ "\"normalized_probability\":0.485277,\"anomaly_score\":1.19192},{\"probability\":0.0123367,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":0.485277,\"anomaly_score\":1.19192},{\"probability\":0.0123367,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.google-analytics.com\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.google-analytics.com\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":0.0123367,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":0.0123367,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"www.google-analytics.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[530594]}],"
|
+ "\"over_field_value\":\"www.google-analytics.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[530594]}],"
|
||||||
+ "\"normalized_probability\":0.406783,\"anomaly_score\":1.19192},{\"probability\":0.0125647,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":0.406783,\"anomaly_score\":1.19192},{\"probability\":0.0125647,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"bs.serving-sys.com\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"bs.serving-sys.com\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":0.0125647,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":0.0125647,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"bs.serving-sys.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[524690]}],"
|
+ "\"over_field_value\":\"bs.serving-sys.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[524690]}],"
|
||||||
+ "\"normalized_probability\":0.396986,\"anomaly_score\":1.19192},{\"probability\":0.0141652,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":0.396986,\"anomaly_score\":1.19192},{\"probability\":0.0141652,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.google.com.au\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.google.com.au\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":0.0141652,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":0.0141652,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"www.google.com.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[487328]}],"
|
+ "\"over_field_value\":\"www.google.com.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[487328]}],"
|
||||||
+ "\"normalized_probability\":0.337075,\"anomaly_score\":1.19192},{\"probability\":0.0141742,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":0.337075,\"anomaly_score\":1.19192},{\"probability\":0.0141742,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"resources1.news.com.au\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"resources1.news.com.au\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":0.0141742,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":0.0141742,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"resources1.news.com.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[487136]}],"
|
+ "\"over_field_value\":\"resources1.news.com.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[487136]}],"
|
||||||
+ "\"normalized_probability\":0.336776,\"anomaly_score\":1.19192},{\"probability\":0.0145263,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":0.336776,\"anomaly_score\":1.19192},{\"probability\":0.0145263,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"b.mail.google.com\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"b.mail.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":0.0145263,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"probability\":0.0145263,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"b.mail.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[479766]}],"
|
+ "\"over_field_value\":\"b.mail.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[479766]}],"
|
||||||
+ "\"normalized_probability\":0.325385,\"anomaly_score\":1.19192},{\"probability\":0.0151447,\"field_name\":\"sum_cs_bytes_\","
|
+ "\"record_score\":0.325385,\"anomaly_score\":1.19192},{\"probability\":0.0151447,\"field_name\":\"sum_cs_bytes_\","
|
||||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.rei.com\",\"function\":\"max\",\"causes\":[{"
|
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.rei.com\",\"function\":\"max\",\"causes\":[{"
|
||||||
+ "\"probability\":0.0151447,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.rei" +
|
+ "\"probability\":0.0151447,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.rei" +
|
||||||
".com\","
|
".com\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[467450]}],\"normalized_probability\":0.306657,\"anomaly_score\":1" +
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[467450]}],\"record_score\":0.306657,\"anomaly_score\":1" +
|
||||||
".19192},"
|
".19192},"
|
||||||
+ "{\"probability\":0.0164073,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "{\"probability\":0.0164073,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"s3.amazonaws.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0164073,"
|
+ "\"over_field_value\":\"s3.amazonaws.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0164073,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"s3.amazonaws.com\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"s3.amazonaws.com\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[444511]}],\"normalized_probability\":0.272805,\"anomaly_score\":1" +
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[444511]}],\"record_score\":0.272805,\"anomaly_score\":1" +
|
||||||
".19192},"
|
".19192},"
|
||||||
+ "{\"probability\":0.0201927,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "{\"probability\":0.0201927,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"0-p-06-ash2.channel.facebook.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0201927,"
|
+ "\"over_field_value\":\"0-p-06-ash2.channel.facebook.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0201927,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"0-p-06-ash2.channel.facebook.com\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"0-p-06-ash2.channel.facebook.com\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[389243]}],\"normalized_probability\":0.196685,\"anomaly_score\":1" +
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[389243]}],\"record_score\":0.196685,\"anomaly_score\":1" +
|
||||||
".19192},"
|
".19192},"
|
||||||
+ "{\"probability\":0.0218721,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "{\"probability\":0.0218721,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"booking.airasia.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0218721,"
|
+ "\"over_field_value\":\"booking.airasia.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0218721,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"booking.airasia.com\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"booking.airasia.com\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[369509]}],\"normalized_probability\":0.171353,"
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[369509]}],\"record_score\":0.171353,"
|
||||||
+ "\"anomaly_score\":1.19192},{\"probability\":0.0242411,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"anomaly_score\":1.19192},{\"probability\":0.0242411,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"www.yammer.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0242411,"
|
+ "\"over_field_value\":\"www.yammer.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0242411,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.yammer.com\"," +
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.yammer.com\"," +
|
||||||
"\"function\":\"max\","
|
"\"function\":\"max\","
|
||||||
+ "\"typical\":[31356],\"actual\":[345295]}],\"normalized_probability\":0.141585,\"anomaly_score\":1.19192},"
|
+ "\"typical\":[31356],\"actual\":[345295]}],\"record_score\":0.141585,\"anomaly_score\":1.19192},"
|
||||||
+ "{\"probability\":0.0258232,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "{\"probability\":0.0258232,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"safebrowsing-cache.google.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0258232,"
|
+ "\"over_field_value\":\"safebrowsing-cache.google.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0258232,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"safebrowsing-cache.google.com\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"safebrowsing-cache.google.com\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[331051]}],\"normalized_probability\":0.124748,\"anomaly_score\":1" +
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[331051]}],\"record_score\":0.124748,\"anomaly_score\":1" +
|
||||||
".19192},"
|
".19192},"
|
||||||
+ "{\"probability\":0.0259695,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "{\"probability\":0.0259695,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"fbcdn-profile-a.akamaihd.net\",\"function\":\"max\",\"causes\":[{\"probability\":0.0259695,"
|
+ "\"over_field_value\":\"fbcdn-profile-a.akamaihd.net\",\"function\":\"max\",\"causes\":[{\"probability\":0.0259695,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"fbcdn-profile-a.akamaihd.net\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"fbcdn-profile-a.akamaihd.net\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[329801]}],\"normalized_probability\":0.123294,\"anomaly_score\":1" +
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[329801]}],\"record_score\":0.123294,\"anomaly_score\":1" +
|
||||||
".19192},"
|
".19192},"
|
||||||
+ "{\"probability\":0.0268874,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "{\"probability\":0.0268874,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"www.oag.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0268874,"
|
+ "\"over_field_value\":\"www.oag.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0268874,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.oag.com\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.oag.com\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[322200]}],\"normalized_probability\":0.114537,"
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[322200]}],\"record_score\":0.114537,"
|
||||||
+ "\"anomaly_score\":1.19192},{\"probability\":0.0279146,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"anomaly_score\":1.19192},{\"probability\":0.0279146,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"booking.qatarairways.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0279146,"
|
+ "\"over_field_value\":\"booking.qatarairways.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0279146,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"booking.qatarairways.com\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"booking.qatarairways.com\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[314153]}],\"normalized_probability\":0.105419,\"anomaly_score\":1" +
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[314153]}],\"record_score\":0.105419,\"anomaly_score\":1" +
|
||||||
".19192},"
|
".19192},"
|
||||||
+ "{\"probability\":0.0309351,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "{\"probability\":0.0309351,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"resources3.news.com.au\",\"function\":\"max\",\"causes\":[{\"probability\":0.0309351,"
|
+ "\"over_field_value\":\"resources3.news.com.au\",\"function\":\"max\",\"causes\":[{\"probability\":0.0309351,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"resources3.news.com.au\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"resources3.news.com.au\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[292918]}],\"normalized_probability\":0.0821156,\"anomaly_score\":1" +
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[292918]}],\"record_score\":0.0821156,\"anomaly_score\":1" +
|
||||||
".19192}"
|
".19192}"
|
||||||
+ ",{\"probability\":0.0335204,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ ",{\"probability\":0.0335204,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"resources0.news.com.au\",\"function\":\"max\",\"causes\":[{\"probability\":0.0335204,"
|
+ "\"over_field_value\":\"resources0.news.com.au\",\"function\":\"max\",\"causes\":[{\"probability\":0.0335204,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"resources0.news.com.au\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"resources0.news.com.au\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[277136]}],\"normalized_probability\":0.0655063,\"anomaly_score\":1" +
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[277136]}],\"record_score\":0.0655063,\"anomaly_score\":1" +
|
||||||
".19192}"
|
".19192}"
|
||||||
+ ",{\"probability\":0.0354927,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ ",{\"probability\":0.0354927,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"www.southwest.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0354927,"
|
+ "\"over_field_value\":\"www.southwest.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0354927,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.southwest.com\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.southwest.com\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[266310]}],\"normalized_probability\":0.0544615,"
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[266310]}],\"record_score\":0.0544615,"
|
||||||
+ "\"anomaly_score\":1.19192},{\"probability\":0.0392043,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"anomaly_score\":1.19192},{\"probability\":0.0392043,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"syndication.twimg.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0392043,"
|
+ "\"over_field_value\":\"syndication.twimg.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0392043,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"syndication.twimg.com\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"syndication.twimg.com\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[248276]}],\"normalized_probability\":0.0366913,\"anomaly_score\":1" +
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[248276]}],\"record_score\":0.0366913,\"anomaly_score\":1" +
|
||||||
".19192}"
|
".19192}"
|
||||||
+ ",{\"probability\":0.0400853,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\""
|
+ ",{\"probability\":0.0400853,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\""
|
||||||
+ ",\"over_field_value\":\"mts0.google.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0400853,"
|
+ ",\"over_field_value\":\"mts0.google.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0400853,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"mts0.google.com\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"mts0.google.com\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[244381]}],\"normalized_probability\":0.0329562,"
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[244381]}],\"record_score\":0.0329562,"
|
||||||
+ "\"anomaly_score\":1.19192},{\"probability\":0.0407335,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"anomaly_score\":1.19192},{\"probability\":0.0407335,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"www.onthegotours.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0407335,"
|
+ "\"over_field_value\":\"www.onthegotours.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0407335,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.onthegotours.com\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.onthegotours.com\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[241600]}],\"normalized_probability\":0.0303116,"
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[241600]}],\"record_score\":0.0303116,"
|
||||||
+ "\"anomaly_score\":1.19192},{\"probability\":0.0470889,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"anomaly_score\":1.19192},{\"probability\":0.0470889,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"chatenabled.mail.google.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0470889,"
|
+ "\"over_field_value\":\"chatenabled.mail.google.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0470889,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"chatenabled.mail.google.com\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"chatenabled.mail.google.com\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[217573]}],\"normalized_probability\":0.00823738,"
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[217573]}],\"record_score\":0.00823738,"
|
||||||
+ "\"anomaly_score\":1.19192},{\"probability\":0.0491243,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
+ "\"anomaly_score\":1.19192},{\"probability\":0.0491243,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||||
+ "\"over_field_value\":\"googleads.g.doubleclick.net\",\"function\":\"max\",\"causes\":[{\"probability\":0.0491243,"
|
+ "\"over_field_value\":\"googleads.g.doubleclick.net\",\"function\":\"max\",\"causes\":[{\"probability\":0.0491243,"
|
||||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"googleads.g.doubleclick.net\","
|
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"googleads.g.doubleclick.net\","
|
||||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[210926]}],\"normalized_probability\":0.00237509,"
|
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[210926]}],\"record_score\":0.00237509,"
|
||||||
+ "\"anomaly_score\":1.19192}],\"raw_anomaly_score\":1.26918,\"anomaly_score\":1.19192,\"max_normalized_probability\":93.6213,"
|
+ "\"anomaly_score\":1.19192}],\"raw_anomaly_score\":1.26918,\"anomaly_score\":1.19192,"
|
||||||
+ "\"record_count\":34,\"event_count\":1159}" + "]";
|
+ "\"record_count\":34,\"event_count\":1159}" + "]";
|
||||||
|
|
||||||
public void testParser() throws IOException {
|
public void testParser() throws IOException {
|
||||||
|
|
|
@ -41,12 +41,11 @@ public class BucketNormalizableTests extends ESTestCase {
|
||||||
bucket.setBucketInfluencers(Arrays.asList(bucketInfluencer1, bucketInfluencer2));
|
bucket.setBucketInfluencers(Arrays.asList(bucketInfluencer1, bucketInfluencer2));
|
||||||
|
|
||||||
bucket.setAnomalyScore(88.0);
|
bucket.setAnomalyScore(88.0);
|
||||||
bucket.setMaxNormalizedProbability(2.0);
|
|
||||||
|
|
||||||
AnomalyRecord record1 = new AnomalyRecord("foo", bucket.getTimestamp(), 600, 3);
|
AnomalyRecord record1 = new AnomalyRecord("foo", bucket.getTimestamp(), 600, 3);
|
||||||
record1.setNormalizedProbability(1.0);
|
record1.setRecordScore(1.0);
|
||||||
AnomalyRecord record2 = new AnomalyRecord("foo", bucket.getTimestamp(), 600, 4);
|
AnomalyRecord record2 = new AnomalyRecord("foo", bucket.getTimestamp(), 600, 4);
|
||||||
record2.setNormalizedProbability(2.0);
|
record2.setRecordScore(2.0);
|
||||||
bucket.setRecords(Arrays.asList(record1, record2));
|
bucket.setRecords(Arrays.asList(record1, record2));
|
||||||
|
|
||||||
List<PartitionScore> partitionScores = new ArrayList<>();
|
List<PartitionScore> partitionScores = new ArrayList<>();
|
||||||
|
@ -149,10 +148,9 @@ public class BucketNormalizableTests extends ESTestCase {
|
||||||
BucketNormalizable bucketNormalizable = new BucketNormalizable(bucket, INDEX_NAME);
|
BucketNormalizable bucketNormalizable = new BucketNormalizable(bucket, INDEX_NAME);
|
||||||
|
|
||||||
assertTrue(bucketNormalizable.setMaxChildrenScore(Normalizable.ChildType.BUCKET_INFLUENCER, 95.0));
|
assertTrue(bucketNormalizable.setMaxChildrenScore(Normalizable.ChildType.BUCKET_INFLUENCER, 95.0));
|
||||||
assertTrue(bucketNormalizable.setMaxChildrenScore(Normalizable.ChildType.RECORD, 42.0));
|
assertFalse(bucketNormalizable.setMaxChildrenScore(Normalizable.ChildType.RECORD, 42.0));
|
||||||
|
|
||||||
assertEquals(95.0, bucket.getAnomalyScore(), EPSILON);
|
assertEquals(95.0, bucket.getAnomalyScore(), EPSILON);
|
||||||
assertEquals(42.0, bucket.getMaxNormalizedProbability(), EPSILON);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSetMaxChildrenScore_GivenSameScores() {
|
public void testSetMaxChildrenScore_GivenSameScores() {
|
||||||
|
@ -162,7 +160,6 @@ public class BucketNormalizableTests extends ESTestCase {
|
||||||
assertFalse(bucketNormalizable.setMaxChildrenScore(Normalizable.ChildType.RECORD, 2.0));
|
assertFalse(bucketNormalizable.setMaxChildrenScore(Normalizable.ChildType.RECORD, 2.0));
|
||||||
|
|
||||||
assertEquals(88.0, bucket.getAnomalyScore(), EPSILON);
|
assertEquals(88.0, bucket.getAnomalyScore(), EPSILON);
|
||||||
assertEquals(2.0, bucket.getMaxNormalizedProbability(), EPSILON);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSetParentScore() {
|
public void testSetParentScore() {
|
||||||
|
|
|
@ -19,8 +19,8 @@ public class InfluencerNormalizableTests extends ESTestCase {
|
||||||
@Before
|
@Before
|
||||||
public void setUpInfluencer() {
|
public void setUpInfluencer() {
|
||||||
influencer = new Influencer("foo", "airline", "AAL", new Date(), 600, 1);
|
influencer = new Influencer("foo", "airline", "AAL", new Date(), 600, 1);
|
||||||
influencer.setAnomalyScore(1.0);
|
influencer.setInfluencerScore(1.0);
|
||||||
influencer.setInitialAnomalyScore(2.0);
|
influencer.setInitialInfluencerScore(2.0);
|
||||||
influencer.setProbability(0.05);
|
influencer.setProbability(0.05);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -66,7 +66,7 @@ public class InfluencerNormalizableTests extends ESTestCase {
|
||||||
normalizable.setNormalizedScore(99.0);
|
normalizable.setNormalizedScore(99.0);
|
||||||
|
|
||||||
assertEquals(99.0, normalizable.getNormalizedScore(), EPSILON);
|
assertEquals(99.0, normalizable.getNormalizedScore(), EPSILON);
|
||||||
assertEquals(99.0, influencer.getAnomalyScore(), EPSILON);
|
assertEquals(99.0, influencer.getInfluencerScore(), EPSILON);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGetChildrenTypes() {
|
public void testGetChildrenTypes() {
|
||||||
|
|
|
@ -124,7 +124,6 @@ public class ScoresUpdaterTests extends ESTestCase {
|
||||||
Bucket bucket = generateBucket(new Date(0));
|
Bucket bucket = generateBucket(new Date(0));
|
||||||
bucket.setAnomalyScore(42.0);
|
bucket.setAnomalyScore(42.0);
|
||||||
bucket.addBucketInfluencer(createTimeBucketInfluencer(bucket.getTimestamp(), 0.04, 42.0));
|
bucket.addBucketInfluencer(createTimeBucketInfluencer(bucket.getTimestamp(), 0.04, 42.0));
|
||||||
bucket.setMaxNormalizedProbability(50.0);
|
|
||||||
|
|
||||||
Deque<Bucket> buckets = new ArrayDeque<>();
|
Deque<Bucket> buckets = new ArrayDeque<>();
|
||||||
buckets.add(bucket);
|
buckets.add(bucket);
|
||||||
|
@ -166,14 +165,12 @@ public class ScoresUpdaterTests extends ESTestCase {
|
||||||
Bucket bucket = generateBucket(new Date(i * 1000));
|
Bucket bucket = generateBucket(new Date(i * 1000));
|
||||||
bucket.setAnomalyScore(42.0);
|
bucket.setAnomalyScore(42.0);
|
||||||
bucket.addBucketInfluencer(createTimeBucketInfluencer(bucket.getTimestamp(), 0.04, 42.0));
|
bucket.addBucketInfluencer(createTimeBucketInfluencer(bucket.getTimestamp(), 0.04, 42.0));
|
||||||
bucket.setMaxNormalizedProbability(50.0);
|
|
||||||
batch1.add(bucket);
|
batch1.add(bucket);
|
||||||
}
|
}
|
||||||
|
|
||||||
Bucket secondBatchBucket = generateBucket(new Date(10000 * 1000));
|
Bucket secondBatchBucket = generateBucket(new Date(10000 * 1000));
|
||||||
secondBatchBucket.addBucketInfluencer(createTimeBucketInfluencer(secondBatchBucket.getTimestamp(), 0.04, 42.0));
|
secondBatchBucket.addBucketInfluencer(createTimeBucketInfluencer(secondBatchBucket.getTimestamp(), 0.04, 42.0));
|
||||||
secondBatchBucket.setAnomalyScore(42.0);
|
secondBatchBucket.setAnomalyScore(42.0);
|
||||||
secondBatchBucket.setMaxNormalizedProbability(50.0);
|
|
||||||
Deque<Bucket> batch2 = new ArrayDeque<>();
|
Deque<Bucket> batch2 = new ArrayDeque<>();
|
||||||
batch2.add(secondBatchBucket);
|
batch2.add(secondBatchBucket);
|
||||||
|
|
||||||
|
@ -193,7 +190,6 @@ public class ScoresUpdaterTests extends ESTestCase {
|
||||||
Bucket bucket1 = generateBucket(new Date(0));
|
Bucket bucket1 = generateBucket(new Date(0));
|
||||||
bucket1.setAnomalyScore(42.0);
|
bucket1.setAnomalyScore(42.0);
|
||||||
bucket1.addBucketInfluencer(createTimeBucketInfluencer(bucket1.getTimestamp(), 0.04, 42.0));
|
bucket1.addBucketInfluencer(createTimeBucketInfluencer(bucket1.getTimestamp(), 0.04, 42.0));
|
||||||
bucket1.setMaxNormalizedProbability(50.0);
|
|
||||||
List<BatchedResultsIterator.ResultWithIndex<AnomalyRecord>> records = new ArrayList<>();
|
List<BatchedResultsIterator.ResultWithIndex<AnomalyRecord>> records = new ArrayList<>();
|
||||||
Date date = new Date();
|
Date date = new Date();
|
||||||
for (int i=0; i<100000; i++) {
|
for (int i=0; i<100000; i++) {
|
||||||
|
@ -203,7 +199,6 @@ public class ScoresUpdaterTests extends ESTestCase {
|
||||||
Bucket bucket2 = generateBucket(new Date(10000 * 1000));
|
Bucket bucket2 = generateBucket(new Date(10000 * 1000));
|
||||||
bucket2.addBucketInfluencer(createTimeBucketInfluencer(bucket2.getTimestamp(), 0.04, 42.0));
|
bucket2.addBucketInfluencer(createTimeBucketInfluencer(bucket2.getTimestamp(), 0.04, 42.0));
|
||||||
bucket2.setAnomalyScore(42.0);
|
bucket2.setAnomalyScore(42.0);
|
||||||
bucket2.setMaxNormalizedProbability(50.0);
|
|
||||||
|
|
||||||
Deque<Bucket> batch = new ArrayDeque<>();
|
Deque<Bucket> batch = new ArrayDeque<>();
|
||||||
batch.add(bucket1);
|
batch.add(bucket1);
|
||||||
|
@ -240,7 +235,6 @@ public class ScoresUpdaterTests extends ESTestCase {
|
||||||
Bucket bucket = generateBucket(new Date(2509200000L));
|
Bucket bucket = generateBucket(new Date(2509200000L));
|
||||||
bucket.setAnomalyScore(42.0);
|
bucket.setAnomalyScore(42.0);
|
||||||
bucket.addBucketInfluencer(createTimeBucketInfluencer(bucket.getTimestamp(), 0.04, 42.0));
|
bucket.addBucketInfluencer(createTimeBucketInfluencer(bucket.getTimestamp(), 0.04, 42.0));
|
||||||
bucket.setMaxNormalizedProbability(50.0);
|
|
||||||
|
|
||||||
Deque<Bucket> buckets = new ArrayDeque<>();
|
Deque<Bucket> buckets = new ArrayDeque<>();
|
||||||
buckets.add(bucket);
|
buckets.add(bucket);
|
||||||
|
@ -258,7 +252,6 @@ public class ScoresUpdaterTests extends ESTestCase {
|
||||||
Bucket bucket = generateBucket(new Date(3600000));
|
Bucket bucket = generateBucket(new Date(3600000));
|
||||||
bucket.setAnomalyScore(42.0);
|
bucket.setAnomalyScore(42.0);
|
||||||
bucket.addBucketInfluencer(createTimeBucketInfluencer(bucket.getTimestamp(), 0.04, 42.0));
|
bucket.addBucketInfluencer(createTimeBucketInfluencer(bucket.getTimestamp(), 0.04, 42.0));
|
||||||
bucket.setMaxNormalizedProbability(50.0);
|
|
||||||
|
|
||||||
Deque<Bucket> buckets = new ArrayDeque<>();
|
Deque<Bucket> buckets = new ArrayDeque<>();
|
||||||
buckets.add(bucket);
|
buckets.add(bucket);
|
||||||
|
@ -277,7 +270,6 @@ public class ScoresUpdaterTests extends ESTestCase {
|
||||||
Bucket bucket = generateBucket(new Date(2700000));
|
Bucket bucket = generateBucket(new Date(2700000));
|
||||||
bucket.setAnomalyScore(42.0);
|
bucket.setAnomalyScore(42.0);
|
||||||
bucket.addBucketInfluencer(createTimeBucketInfluencer(bucket.getTimestamp(), 0.04, 42.0));
|
bucket.addBucketInfluencer(createTimeBucketInfluencer(bucket.getTimestamp(), 0.04, 42.0));
|
||||||
bucket.setMaxNormalizedProbability(50.0);
|
|
||||||
|
|
||||||
Deque<Bucket> buckets = new ArrayDeque<>();
|
Deque<Bucket> buckets = new ArrayDeque<>();
|
||||||
buckets.add(bucket);
|
buckets.add(bucket);
|
||||||
|
|
|
@ -30,10 +30,9 @@ public class AnomalyRecordTests extends AbstractSerializingTestCase<AnomalyRecor
|
||||||
AnomalyRecord anomalyRecord = new AnomalyRecord(jobId, new Date(randomNonNegativeLong()), randomNonNegativeLong(), sequenceNum);
|
AnomalyRecord anomalyRecord = new AnomalyRecord(jobId, new Date(randomNonNegativeLong()), randomNonNegativeLong(), sequenceNum);
|
||||||
anomalyRecord.setActual(Collections.singletonList(randomDouble()));
|
anomalyRecord.setActual(Collections.singletonList(randomDouble()));
|
||||||
anomalyRecord.setTypical(Collections.singletonList(randomDouble()));
|
anomalyRecord.setTypical(Collections.singletonList(randomDouble()));
|
||||||
anomalyRecord.setAnomalyScore(randomDouble());
|
|
||||||
anomalyRecord.setProbability(randomDouble());
|
anomalyRecord.setProbability(randomDouble());
|
||||||
anomalyRecord.setNormalizedProbability(randomDouble());
|
anomalyRecord.setRecordScore(randomDouble());
|
||||||
anomalyRecord.setInitialNormalizedProbability(randomDouble());
|
anomalyRecord.setInitialRecordScore(randomDouble());
|
||||||
anomalyRecord.setInterim(randomBoolean());
|
anomalyRecord.setInterim(randomBoolean());
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
anomalyRecord.setFieldName(randomAsciiOfLength(12));
|
anomalyRecord.setFieldName(randomAsciiOfLength(12));
|
||||||
|
|
|
@ -52,9 +52,6 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
bucket.setInterim(randomBoolean());
|
bucket.setInterim(randomBoolean());
|
||||||
}
|
}
|
||||||
if (randomBoolean()) {
|
|
||||||
bucket.setMaxNormalizedProbability(randomDouble());
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
int size = randomInt(10);
|
int size = randomInt(10);
|
||||||
List<PartitionScore> partitionScores = new ArrayList<>(size);
|
List<PartitionScore> partitionScores = new ArrayList<>(size);
|
||||||
|
@ -130,16 +127,6 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
||||||
assertTrue(b1.equals(b2));
|
assertTrue(b1.equals(b2));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testEquals_GivenDifferentMaxNormalizedProbability() {
|
|
||||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
|
||||||
bucket1.setMaxNormalizedProbability(55.0);
|
|
||||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
|
||||||
bucket2.setMaxNormalizedProbability(55.1);
|
|
||||||
|
|
||||||
assertFalse(bucket1.equals(bucket2));
|
|
||||||
assertFalse(bucket2.equals(bucket1));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testEquals_GivenDifferentEventCount() {
|
public void testEquals_GivenDifferentEventCount() {
|
||||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||||
bucket1.setEventCount(3);
|
bucket1.setEventCount(3);
|
||||||
|
@ -183,9 +170,9 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
||||||
|
|
||||||
public void testEquals_GivenSameNumberOfRecordsButDifferent() {
|
public void testEquals_GivenSameNumberOfRecordsButDifferent() {
|
||||||
AnomalyRecord anomalyRecord1 = new AnomalyRecord("foo", new Date(123), 123, 1);
|
AnomalyRecord anomalyRecord1 = new AnomalyRecord("foo", new Date(123), 123, 1);
|
||||||
anomalyRecord1.setAnomalyScore(1.0);
|
anomalyRecord1.setRecordScore(1.0);
|
||||||
AnomalyRecord anomalyRecord2 = new AnomalyRecord("foo", new Date(123), 123, 2);
|
AnomalyRecord anomalyRecord2 = new AnomalyRecord("foo", new Date(123), 123, 2);
|
||||||
anomalyRecord1.setAnomalyScore(2.0);
|
anomalyRecord1.setRecordScore(2.0);
|
||||||
|
|
||||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||||
bucket1.setRecords(Arrays.asList(anomalyRecord1));
|
bucket1.setRecords(Arrays.asList(anomalyRecord1));
|
||||||
|
@ -231,7 +218,6 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
||||||
bucket1.setInitialAnomalyScore(92.0);
|
bucket1.setInitialAnomalyScore(92.0);
|
||||||
bucket1.setEventCount(134);
|
bucket1.setEventCount(134);
|
||||||
bucket1.setInterim(true);
|
bucket1.setInterim(true);
|
||||||
bucket1.setMaxNormalizedProbability(33.3);
|
|
||||||
bucket1.setRecordCount(4);
|
bucket1.setRecordCount(4);
|
||||||
bucket1.setRecords(Arrays.asList(record));
|
bucket1.setRecords(Arrays.asList(record));
|
||||||
bucket1.addBucketInfluencer(bucketInfluencer);
|
bucket1.addBucketInfluencer(bucketInfluencer);
|
||||||
|
@ -241,7 +227,6 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
||||||
bucket2.setInitialAnomalyScore(92.0);
|
bucket2.setInitialAnomalyScore(92.0);
|
||||||
bucket2.setEventCount(134);
|
bucket2.setEventCount(134);
|
||||||
bucket2.setInterim(true);
|
bucket2.setInterim(true);
|
||||||
bucket2.setMaxNormalizedProbability(33.3);
|
|
||||||
bucket2.setRecordCount(4);
|
bucket2.setRecordCount(4);
|
||||||
bucket2.setRecords(Arrays.asList(record));
|
bucket2.setRecords(Arrays.asList(record));
|
||||||
bucket2.addBucketInfluencer(bucketInfluencer);
|
bucket2.addBucketInfluencer(bucketInfluencer);
|
||||||
|
|
|
@ -16,12 +16,12 @@ import java.util.Date;
|
||||||
|
|
||||||
public class InfluencerTests extends AbstractSerializingTestCase<Influencer> {
|
public class InfluencerTests extends AbstractSerializingTestCase<Influencer> {
|
||||||
|
|
||||||
public Influencer createTestInstance(String jobId) {
|
public Influencer createTestInstance(String jobId) {
|
||||||
Influencer influencer = new Influencer(jobId, randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(1, 20),
|
Influencer influencer = new Influencer(jobId, randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(1, 20),
|
||||||
new Date(randomNonNegativeLong()), randomNonNegativeLong(), randomIntBetween(1, 1000));
|
new Date(randomNonNegativeLong()), randomNonNegativeLong(), randomIntBetween(1, 1000));
|
||||||
influencer.setInterim(randomBoolean());
|
influencer.setInterim(randomBoolean());
|
||||||
influencer.setAnomalyScore(randomDouble());
|
influencer.setInfluencerScore(randomDouble());
|
||||||
influencer.setInitialAnomalyScore(randomDouble());
|
influencer.setInitialInfluencerScore(randomDouble());
|
||||||
influencer.setProbability(randomDouble());
|
influencer.setProbability(randomDouble());
|
||||||
return influencer;
|
return influencer;
|
||||||
}
|
}
|
||||||
|
@ -50,12 +50,12 @@ public class InfluencerTests extends AbstractSerializingTestCase<Influencer> {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToXContentDoesNotIncludeNameValueFieldWhenReservedWord() throws IOException {
|
public void testToXContentDoesNotIncludeNameValueFieldWhenReservedWord() throws IOException {
|
||||||
Influencer influencer = new Influencer("foo", AnomalyRecord.ANOMALY_SCORE.getPreferredName(), "bar", new Date(), 300L, 0);
|
Influencer influencer = new Influencer("foo", Influencer.INFLUENCER_SCORE.getPreferredName(), "bar", new Date(), 300L, 0);
|
||||||
XContentBuilder builder = toXContent(influencer, XContentType.JSON);
|
XContentBuilder builder = toXContent(influencer, XContentType.JSON);
|
||||||
XContentParser parser = createParser(builder);
|
XContentParser parser = createParser(builder);
|
||||||
Object serialisedFieldValue = parser.map().get(AnomalyRecord.ANOMALY_SCORE.getPreferredName());
|
Object serialisedFieldValue = parser.map().get(Influencer.INFLUENCER_SCORE.getPreferredName());
|
||||||
|
assertNotNull(serialisedFieldValue);
|
||||||
assertNotEquals("bar", serialisedFieldValue);
|
assertNotEquals("bar", serialisedFieldValue);
|
||||||
assertEquals(0.0, (Double)serialisedFieldValue, 0.0001);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,9 +52,9 @@ public class PerPartitionMaxProbabilitiesTests extends AbstractSerializingTestCa
|
||||||
assertEquals(2, pProbs.size());
|
assertEquals(2, pProbs.size());
|
||||||
for (PerPartitionMaxProbabilities.PartitionProbability pProb : pProbs) {
|
for (PerPartitionMaxProbabilities.PartitionProbability pProb : pProbs) {
|
||||||
if (pProb.getPartitionValue().equals("A")) {
|
if (pProb.getPartitionValue().equals("A")) {
|
||||||
assertEquals(40.0, pProb.getMaxNormalizedProbability(), 0.0001);
|
assertEquals(40.0, pProb.getMaxRecordScore(), 0.0001);
|
||||||
} else {
|
} else {
|
||||||
assertEquals(90.0, pProb.getMaxNormalizedProbability(), 0.0001);
|
assertEquals(90.0, pProb.getMaxRecordScore(), 0.0001);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -73,10 +73,10 @@ public class PerPartitionMaxProbabilitiesTests extends AbstractSerializingTestCa
|
||||||
assertEquals(90.0, ppMax.getMaxProbabilityForPartition("B"), 0.0001);
|
assertEquals(90.0, ppMax.getMaxProbabilityForPartition("B"), 0.0001);
|
||||||
}
|
}
|
||||||
|
|
||||||
private AnomalyRecord createAnomalyRecord(String partitionFieldValue, double normalizedProbability) {
|
private AnomalyRecord createAnomalyRecord(String partitionFieldValue, double recordScore) {
|
||||||
AnomalyRecord record = new AnomalyRecord("foo", new Date(), 600, 1);
|
AnomalyRecord record = new AnomalyRecord("foo", new Date(), 600, 1);
|
||||||
record.setPartitionFieldValue(partitionFieldValue);
|
record.setPartitionFieldValue(partitionFieldValue);
|
||||||
record.setNormalizedProbability(normalizedProbability);
|
record.setRecordScore(recordScore);
|
||||||
return record;
|
return record;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,10 +50,6 @@
|
||||||
"anomaly_score": {
|
"anomaly_score": {
|
||||||
"type": "double",
|
"type": "double",
|
||||||
"description": "Filter for the most anomalous buckets"
|
"description": "Filter for the most anomalous buckets"
|
||||||
},
|
|
||||||
"max_normalized_probability": {
|
|
||||||
"type": "double",
|
|
||||||
"description": "Filter for buckets containing the most anomalous records"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
@ -31,9 +31,9 @@
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "end timestamp for the requested influencers"
|
"description": "end timestamp for the requested influencers"
|
||||||
},
|
},
|
||||||
"anomaly_score": {
|
"influencer_score": {
|
||||||
"type": "double",
|
"type": "double",
|
||||||
"description": "anomaly score threshold for the requested influencers"
|
"description": "influencer score threshold for the requested influencers"
|
||||||
},
|
},
|
||||||
"sort": {
|
"sort": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
|
|
|
@ -36,10 +36,7 @@
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "End time filter for records"
|
"description": "End time filter for records"
|
||||||
},
|
},
|
||||||
"anomaly_score": {
|
"record_score": {
|
||||||
"type": "double"
|
|
||||||
},
|
|
||||||
"max_normalized_probability": {
|
|
||||||
"type": "double"
|
"type": "double"
|
||||||
},
|
},
|
||||||
"partition_value": {
|
"partition_value": {
|
||||||
|
|
|
@ -10,8 +10,8 @@ setup:
|
||||||
type: keyword
|
type: keyword
|
||||||
"timestamp":
|
"timestamp":
|
||||||
type: date
|
type: date
|
||||||
"anomaly_score":
|
"influencer_score":
|
||||||
type: float
|
type: double
|
||||||
"result_type":
|
"result_type":
|
||||||
type: keyword
|
type: keyword
|
||||||
- do:
|
- do:
|
||||||
|
@ -25,7 +25,7 @@ setup:
|
||||||
"timestamp": "2016-06-01T00:00:00Z",
|
"timestamp": "2016-06-01T00:00:00Z",
|
||||||
"influencer_field_name": "foo",
|
"influencer_field_name": "foo",
|
||||||
"influencer_field_value": "bar",
|
"influencer_field_value": "bar",
|
||||||
"anomaly_score": 80.0,
|
"influencer_score": 80.0,
|
||||||
"result_type" : "influencer",
|
"result_type" : "influencer",
|
||||||
"bucket_span" : 1,
|
"bucket_span" : 1,
|
||||||
"sequence_num" : 1
|
"sequence_num" : 1
|
||||||
|
@ -42,7 +42,7 @@ setup:
|
||||||
"timestamp": "2016-06-02T00:00:00Z",
|
"timestamp": "2016-06-02T00:00:00Z",
|
||||||
"influencer_field_name": "foo",
|
"influencer_field_name": "foo",
|
||||||
"influencer_field_value": "zoo",
|
"influencer_field_value": "zoo",
|
||||||
"anomaly_score": 50.0,
|
"influencer_score": 50.0,
|
||||||
"result_type" : "influencer",
|
"result_type" : "influencer",
|
||||||
"bucket_span" : 1,
|
"bucket_span" : 1,
|
||||||
"sequence_num" : 2
|
"sequence_num" : 2
|
||||||
|
|
|
@ -23,7 +23,7 @@ setup:
|
||||||
"job_id": "farequote",
|
"job_id": "farequote",
|
||||||
"result_type": "record",
|
"result_type": "record",
|
||||||
"timestamp": "2016-06-01T00:00:00Z",
|
"timestamp": "2016-06-01T00:00:00Z",
|
||||||
"anomaly_score": 60.0,
|
"record_score": 60.0,
|
||||||
"bucket_span": 1,
|
"bucket_span": 1,
|
||||||
"sequence_num": 1
|
"sequence_num": 1
|
||||||
}
|
}
|
||||||
|
@ -38,7 +38,7 @@ setup:
|
||||||
"job_id": "farequote",
|
"job_id": "farequote",
|
||||||
"result_type": "record",
|
"result_type": "record",
|
||||||
"timestamp": "2016-06-02T00:00:00Z",
|
"timestamp": "2016-06-02T00:00:00Z",
|
||||||
"anomaly_score": 80.0,
|
"record_score": 80.0,
|
||||||
"bucket_span": 1,
|
"bucket_span": 1,
|
||||||
"sequence_num": 2
|
"sequence_num": 2
|
||||||
}
|
}
|
||||||
|
|
|
@ -101,7 +101,7 @@ setup:
|
||||||
"timestamp": "2016-06-02T00:00:00Z",
|
"timestamp": "2016-06-02T00:00:00Z",
|
||||||
"influencer_field_name": "foo",
|
"influencer_field_name": "foo",
|
||||||
"influencer_field_value": "zoo",
|
"influencer_field_value": "zoo",
|
||||||
"anomaly_score": 50.0,
|
"influencer_score": 50.0,
|
||||||
"bucket_span": 1,
|
"bucket_span": 1,
|
||||||
"sequence_num": 3
|
"sequence_num": 3
|
||||||
}
|
}
|
||||||
|
@ -118,7 +118,7 @@ setup:
|
||||||
"timestamp": "2016-05-01T00:00:00Z",
|
"timestamp": "2016-05-01T00:00:00Z",
|
||||||
"influencer_field_name": "foo",
|
"influencer_field_name": "foo",
|
||||||
"influencer_field_value": "zoo",
|
"influencer_field_value": "zoo",
|
||||||
"anomaly_score": 50.0,
|
"influencer_score": 50.0,
|
||||||
"bucket_span": 1,
|
"bucket_span": 1,
|
||||||
"sequence_num": 4
|
"sequence_num": 4
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue