Use jobId_timestamp_bucketSpan as ID for bucket (elastic/elasticsearch#375)
Removes the reliance on ES autogenerated UUIDs and instead uses `{jobId}_{timestamp}_{bucketSpan}` Original commit: elastic/x-pack-elasticsearch@3cd774edd8
This commit is contained in:
parent
b526d7920d
commit
2b2307a82b
|
@ -32,8 +32,6 @@ class ElasticsearchBatchedBucketsIterator extends ElasticsearchBatchedResultsIte
|
|||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("failed to parse bucket", e);
|
||||
}
|
||||
Bucket bucket = Bucket.PARSER.apply(parser, () -> parseFieldMatcher);
|
||||
bucket.setId(hit.getId());
|
||||
return bucket;
|
||||
return Bucket.PARSER.apply(parser, () -> parseFieldMatcher);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -363,7 +363,6 @@ public class ElasticsearchJobProvider implements JobProvider {
|
|||
throw new ElasticsearchParseException("failed to parser bucket", e);
|
||||
}
|
||||
Bucket bucket = Bucket.PARSER.apply(parser, () -> parseFieldMatcher);
|
||||
bucket.setId(hit.getId());
|
||||
|
||||
if (includeInterim || bucket.isInterim() == false) {
|
||||
results.add(bucket);
|
||||
|
@ -412,7 +411,6 @@ public class ElasticsearchJobProvider implements JobProvider {
|
|||
throw new ElasticsearchParseException("failed to parser bucket", e);
|
||||
}
|
||||
Bucket bucket = Bucket.PARSER.apply(parser, () -> parseFieldMatcher);
|
||||
bucket.setId(hit.getId());
|
||||
|
||||
// don't return interim buckets if not requested
|
||||
if (bucket.isInterim() && query.isIncludeInterim() == false) {
|
||||
|
|
|
@ -79,7 +79,6 @@ public class JobResultsPersister extends AbstractComponent {
|
|||
IndexResponse response = client.prepareIndex(indexName, Result.TYPE.getPreferredName())
|
||||
.setSource(content)
|
||||
.execute().actionGet();
|
||||
bucket.setId(response.getId());
|
||||
persistBucketInfluencersStandalone(jobId, bucket.getId(), bucket.getBucketInfluencers(), bucket.getTimestamp(),
|
||||
bucket.isInterim());
|
||||
|
||||
|
|
|
@ -15,7 +15,10 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.xpack.prelert.job.Job;
|
||||
import org.elasticsearch.xpack.prelert.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.prelert.utils.time.TimeUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -36,7 +39,7 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
|||
/*
|
||||
* Field Names
|
||||
*/
|
||||
public static final ParseField JOB_ID = new ParseField("jobId");
|
||||
public static final ParseField JOB_ID = Job.ID;
|
||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomalyScore");
|
||||
public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initialAnomalyScore");
|
||||
|
@ -60,11 +63,11 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
|||
public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE);
|
||||
|
||||
public static final ConstructingObjectParser<Bucket, ParseFieldMatcherSupplier> PARSER =
|
||||
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, a -> new Bucket((String) a[0]));
|
||||
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, a -> new Bucket((String) a[0], (Date) a[1], (long) a[2]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), JOB_ID);
|
||||
PARSER.declareField(Bucket::setTimestamp, p -> {
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
||||
|
@ -72,6 +75,7 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
|||
}
|
||||
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" + TIMESTAMP.getPreferredName() + "]");
|
||||
}, TIMESTAMP, ValueType.VALUE);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
||||
PARSER.declareDouble(Bucket::setAnomalyScore, ANOMALY_SCORE);
|
||||
PARSER.declareDouble(Bucket::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE);
|
||||
PARSER.declareDouble(Bucket::setMaxNormalizedProbability, MAX_NORMALIZED_PROBABILITY);
|
||||
|
@ -80,20 +84,16 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
|||
PARSER.declareLong(Bucket::setEventCount, EVENT_COUNT);
|
||||
PARSER.declareObjectArray(Bucket::setRecords, AnomalyRecord.PARSER, RECORDS);
|
||||
PARSER.declareObjectArray(Bucket::setBucketInfluencers, BucketInfluencer.PARSER, BUCKET_INFLUENCERS);
|
||||
PARSER.declareLong(Bucket::setBucketSpan, BUCKET_SPAN);
|
||||
PARSER.declareLong(Bucket::setProcessingTimeMs, PROCESSING_TIME_MS);
|
||||
PARSER.declareObjectArray(Bucket::setPartitionScores, PartitionScore.PARSER, PARTITION_SCORES);
|
||||
PARSER.declareString((bucket, s) -> {}, Result.RESULT_TYPE);
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private String id;
|
||||
private Date timestamp;
|
||||
private final Date timestamp;
|
||||
private final long bucketSpan;
|
||||
private double anomalyScore;
|
||||
private long bucketSpan;
|
||||
|
||||
private double initialAnomalyScore;
|
||||
|
||||
private double maxNormalizedProbability;
|
||||
private int recordCount;
|
||||
private List<AnomalyRecord> records = Collections.emptyList();
|
||||
|
@ -105,17 +105,16 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
|||
private Map<String, Double> perPartitionMaxProbability = Collections.emptyMap();
|
||||
private List<PartitionScore> partitionScores = Collections.emptyList();
|
||||
|
||||
public Bucket(String jobId) {
|
||||
public Bucket(String jobId, Date timestamp, long bucketSpan) {
|
||||
this.jobId = jobId;
|
||||
this.timestamp = ExceptionsHelper.requireNonNull(timestamp, TIMESTAMP.getPreferredName());
|
||||
this.bucketSpan = bucketSpan;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public Bucket(StreamInput in) throws IOException {
|
||||
jobId = in.readString();
|
||||
id = in.readOptionalString();
|
||||
if (in.readBoolean()) {
|
||||
timestamp = new Date(in.readLong());
|
||||
}
|
||||
timestamp = new Date(in.readLong());
|
||||
anomalyScore = in.readDouble();
|
||||
bucketSpan = in.readLong();
|
||||
initialAnomalyScore = in.readDouble();
|
||||
|
@ -134,12 +133,7 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(jobId);
|
||||
out.writeOptionalString(id);
|
||||
boolean hasTimestamp = timestamp != null;
|
||||
out.writeBoolean(hasTimestamp);
|
||||
if (hasTimestamp) {
|
||||
out.writeLong(timestamp.getTime());
|
||||
}
|
||||
out.writeLong(timestamp.getTime());
|
||||
out.writeDouble(anomalyScore);
|
||||
out.writeLong(bucketSpan);
|
||||
out.writeDouble(initialAnomalyScore);
|
||||
|
@ -159,9 +153,7 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(JOB_ID.getPreferredName(), jobId);
|
||||
if (timestamp != null) {
|
||||
builder.field(TIMESTAMP.getPreferredName(), timestamp.getTime());
|
||||
}
|
||||
builder.field(TIMESTAMP.getPreferredName(), timestamp.getTime());
|
||||
builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore);
|
||||
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
||||
builder.field(INITIAL_ANOMALY_SCORE.getPreferredName(), initialAnomalyScore);
|
||||
|
@ -186,11 +178,7 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
|||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
return jobId + "_" + timestamp.getTime() + "_" + bucketSpan;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -205,10 +193,6 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
|||
return timestamp;
|
||||
}
|
||||
|
||||
public void setTimestamp(Date timestamp) {
|
||||
this.timestamp = timestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bucketspan expressed in seconds
|
||||
*/
|
||||
|
@ -216,13 +200,6 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
|||
return bucketSpan;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bucketspan expressed in seconds
|
||||
*/
|
||||
public void setBucketSpan(long bucketSpan) {
|
||||
this.bucketSpan = bucketSpan;
|
||||
}
|
||||
|
||||
public double getAnomalyScore() {
|
||||
return anomalyScore;
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ public class GetBucketActionResponseTests extends AbstractStreamableTestCase<Get
|
|||
List<Bucket> hits = new ArrayList<>(listSize);
|
||||
for (int j = 0; j < listSize; j++) {
|
||||
String jobId = "foo";
|
||||
Bucket bucket = new Bucket(jobId);
|
||||
Bucket bucket = new Bucket(jobId, new Date(randomLong()), randomPositiveLong());
|
||||
if (randomBoolean()) {
|
||||
bucket.setAnomalyScore(randomDouble());
|
||||
}
|
||||
|
@ -47,15 +47,9 @@ public class GetBucketActionResponseTests extends AbstractStreamableTestCase<Get
|
|||
}
|
||||
bucket.setBucketInfluencers(bucketInfluencers);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setBucketSpan(randomPositiveLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setEventCount(randomPositiveLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setId(randomAsciiOfLengthBetween(1, 20));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setInitialAnomalyScore(randomDouble());
|
||||
}
|
||||
|
@ -104,9 +98,6 @@ public class GetBucketActionResponseTests extends AbstractStreamableTestCase<Get
|
|||
}
|
||||
bucket.setRecords(records);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setTimestamp(new Date(randomLong()));
|
||||
}
|
||||
hits.add(bucket);
|
||||
}
|
||||
QueryPage<Bucket> buckets = new QueryPage<>(hits, listSize, Bucket.RESULTS_FIELD);
|
||||
|
|
|
@ -189,9 +189,9 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(404));
|
||||
assertThat(e.getMessage(), containsString("No known job with id '1'"));
|
||||
|
||||
addBucketResult("1", "1234");
|
||||
addBucketResult("1", "1235");
|
||||
addBucketResult("1", "1236");
|
||||
addBucketResult("1", "1234", 1);
|
||||
addBucketResult("1", "1235", 1);
|
||||
addBucketResult("1", "1236", 1);
|
||||
Response response = client().performRequest("get", PrelertPlugin.BASE_PATH + "results/1/buckets", params);
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
String responseAsString = responseEntityToString(response);
|
||||
|
@ -307,7 +307,7 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
assertThat(e.getMessage(), containsString("Cannot resume job 'farequote' while its status is CLOSED"));
|
||||
}
|
||||
|
||||
private Response addBucketResult(String jobId, String timestamp) throws Exception {
|
||||
private Response addBucketResult(String jobId, String timestamp, long bucketSpan) throws Exception {
|
||||
try {
|
||||
client().performRequest("put", "prelertresults-" + jobId, Collections.emptyMap(), new StringEntity(RESULT_MAPPING));
|
||||
} catch (ResponseException e) {
|
||||
|
@ -316,9 +316,12 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400));
|
||||
}
|
||||
|
||||
String bucketResult =
|
||||
String.format(Locale.ROOT, "{\"jobId\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\"}", jobId, timestamp);
|
||||
return client().performRequest("put", "prelertresults-" + jobId + "/result/" + timestamp,
|
||||
String bucketResult = String.format(Locale.ROOT,
|
||||
"{\"jobId\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\", \"bucketSpan\": \"%s\"}",
|
||||
jobId, timestamp, bucketSpan);
|
||||
String id = String.format(Locale.ROOT,
|
||||
"%s_%s_%s", jobId, timestamp, bucketSpan);
|
||||
return client().performRequest("put", "prelertresults-" + jobId + "/result/" + id,
|
||||
Collections.singletonMap("refresh", "true"), new StringEntity(bucketResult));
|
||||
}
|
||||
|
||||
|
|
|
@ -212,6 +212,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("jobId", "foo");
|
||||
map.put("timestamp", now.getTime());
|
||||
map.put("bucketSpan", 22);
|
||||
source.add(map);
|
||||
|
||||
ArgumentCaptor<QueryBuilder> queryBuilder = ArgumentCaptor.forClass(QueryBuilder.class);
|
||||
|
@ -245,6 +246,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("jobId", "foo");
|
||||
map.put("timestamp", now.getTime());
|
||||
map.put("bucketSpan", 22);
|
||||
source.add(map);
|
||||
|
||||
ArgumentCaptor<QueryBuilder> queryBuilder = ArgumentCaptor.forClass(QueryBuilder.class);
|
||||
|
@ -279,6 +281,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("jobId", "foo");
|
||||
map.put("timestamp", now.getTime());
|
||||
map.put("bucketSpan", 22);
|
||||
source.add(map);
|
||||
|
||||
ArgumentCaptor<QueryBuilder> queryBuilder = ArgumentCaptor.forClass(QueryBuilder.class);
|
||||
|
@ -343,6 +346,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("jobId", "foo");
|
||||
map.put("timestamp", now.getTime());
|
||||
map.put("bucketSpan", 22);
|
||||
source.add(map);
|
||||
|
||||
ArgumentCaptor<QueryBuilder> queryBuilder = ArgumentCaptor.forClass(QueryBuilder.class);
|
||||
|
@ -371,6 +375,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("jobId", "foo");
|
||||
map.put("timestamp", now.getTime());
|
||||
map.put("bucketSpan", 22);
|
||||
map.put("isInterim", true);
|
||||
source.add(map);
|
||||
|
||||
|
@ -545,8 +550,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
public void testexpandBucket() throws InterruptedException, ExecutionException, IOException {
|
||||
String jobId = "TestJobIdentification";
|
||||
Date now = new Date();
|
||||
Bucket bucket = new Bucket("foo");
|
||||
bucket.setTimestamp(now);
|
||||
Bucket bucket = new Bucket("foo", now, 22);
|
||||
|
||||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
for (int i = 0; i < 400; i++) {
|
||||
|
@ -577,8 +581,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
throws InterruptedException, ExecutionException, IOException {
|
||||
String jobId = "TestJobIdentification";
|
||||
Date now = new Date();
|
||||
Bucket bucket = new Bucket("foo");
|
||||
bucket.setTimestamp(now);
|
||||
Bucket bucket = new Bucket("foo", now, 22);
|
||||
|
||||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
for (int i = 0; i < 600; i++) {
|
||||
|
@ -998,8 +1001,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private Bucket createBucketAtEpochTime(long epoch) {
|
||||
Bucket b = new Bucket("foo");
|
||||
b.setTimestamp(new Date(epoch));
|
||||
Bucket b = new Bucket("foo", new Date(epoch), 123);
|
||||
b.setMaxNormalizedProbability(10.0);
|
||||
return b;
|
||||
}
|
||||
|
|
|
@ -42,12 +42,8 @@ public class JobResultsPersisterTests extends ESTestCase {
|
|||
.prepareBulk(response);
|
||||
|
||||
Client client = clientBuilder.build();
|
||||
Bucket bucket = new Bucket("foo");
|
||||
bucket.setId("1");
|
||||
bucket.setTimestamp(new Date());
|
||||
bucket.setId(responseId);
|
||||
Bucket bucket = new Bucket("foo", new Date(), 123456);
|
||||
bucket.setAnomalyScore(99.9);
|
||||
bucket.setBucketSpan(123456);
|
||||
bucket.setEventCount(57);
|
||||
bucket.setInitialAnomalyScore(88.8);
|
||||
bucket.setMaxNormalizedProbability(42.0);
|
||||
|
|
|
@ -29,11 +29,13 @@ import java.util.stream.Collectors;
|
|||
public class AutodetectResultsParserTests extends ESTestCase {
|
||||
private static final double EPSILON = 0.000001;
|
||||
|
||||
public static final String METRIC_OUTPUT_SAMPLE = "[{\"bucket\": {\"jobId\":\"foo\",\"timestamp\":1359450000000,\"records\":[],"
|
||||
public static final String METRIC_OUTPUT_SAMPLE = "[{\"bucket\": {\"jobId\":\"foo\",\"timestamp\":1359450000000,"
|
||||
+ "\"bucketSpan\":22, \"records\":[],"
|
||||
+ "\"maxNormalizedProbability\":0, \"anomalyScore\":0,\"recordCount\":0,\"eventCount\":806,\"bucketInfluencers\":["
|
||||
+ "{\"jobId\":\"foo\",\"anomalyScore\":0, \"probability\":0.0, \"influencerFieldName\":\"bucketTime\","
|
||||
+ "\"initialAnomalyScore\":0.0}]}},{\"quantiles\": {\"jobId\":\"foo\", \"quantileState\":\"[normaliser 1.1, normaliser 2.1]\"}}"
|
||||
+ ",{\"bucket\": {\"jobId\":\"foo\",\"timestamp\":1359453600000,\"records\":[{\"jobId\":\"foo\",\"probability\":0.0637541,"
|
||||
+ ",{\"bucket\": {\"jobId\":\"foo\",\"timestamp\":1359453600000,\"bucketSpan\":22,"
|
||||
+ "\"records\":[{\"jobId\":\"foo\",\"probability\":0.0637541,"
|
||||
+ "\"byFieldName\":\"airline\",\"byFieldValue\":\"JZA\", \"typical\":[1020.08],\"actual\":[1042.14],"
|
||||
+ "\"fieldName\":\"responsetime\",\"function\":\"max\",\"partitionFieldName\":\"\",\"partitionFieldValue\":\"\"},"
|
||||
+ "{\"jobId\":\"foo\",\"probability\":0.00748292,\"byFieldName\":\"airline\",\"byFieldValue\":\"AMX\", "
|
||||
|
|
|
@ -38,8 +38,7 @@ public class AutodetectResultTests extends AbstractSerializingTestCase<Autodetec
|
|||
FlushAcknowledgement flushAcknowledgement;
|
||||
String jobId = "foo";
|
||||
if (randomBoolean()) {
|
||||
bucket = new Bucket(jobId);
|
||||
bucket.setId(randomAsciiOfLengthBetween(1, 20));
|
||||
bucket = new Bucket(jobId, new Date(randomLong()), randomPositiveLong());
|
||||
} else {
|
||||
bucket = null;
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
@Override
|
||||
protected Bucket createTestInstance() {
|
||||
String jobId = "foo";
|
||||
Bucket bucket = new Bucket(jobId);
|
||||
Bucket bucket = new Bucket(jobId, new Date(randomLong()), randomPositiveLong());
|
||||
|
||||
if (randomBoolean()) {
|
||||
bucket.setAnomalyScore(randomDouble());
|
||||
|
@ -42,15 +42,9 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
bucket.setBucketInfluencers(bucketInfluencers);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setBucketSpan(randomPositiveLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setEventCount(randomPositiveLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setId(randomAsciiOfLengthBetween(1, 20));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setInitialAnomalyScore(randomDouble());
|
||||
}
|
||||
|
@ -99,9 +93,6 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
bucket.setRecords(records);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setTimestamp(new Date(randomLong()));
|
||||
}
|
||||
return bucket;
|
||||
}
|
||||
|
||||
|
@ -116,22 +107,22 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testEquals_GivenDifferentClass() {
|
||||
Bucket bucket = new Bucket("foo");
|
||||
Bucket bucket = new Bucket("foo", new Date(randomLong()), randomPositiveLong());
|
||||
assertFalse(bucket.equals("a string"));
|
||||
}
|
||||
|
||||
public void testEquals_GivenTwoDefaultBuckets() {
|
||||
Bucket bucket1 = new Bucket("foo");
|
||||
Bucket bucket2 = new Bucket("foo");
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
|
||||
assertTrue(bucket1.equals(bucket2));
|
||||
assertTrue(bucket2.equals(bucket1));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentAnomalyScore() {
|
||||
Bucket bucket1 = new Bucket("foo");
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
bucket1.setAnomalyScore(3.0);
|
||||
Bucket bucket2 = new Bucket("foo");
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
bucket2.setAnomalyScore(2.0);
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
|
@ -139,17 +130,15 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testEquals_GivenSameDates() {
|
||||
Bucket b1 = new Bucket("foo");
|
||||
b1.setTimestamp(new Date(1234567890L));
|
||||
Bucket b2 = new Bucket("foo");
|
||||
b2.setTimestamp(new Date(1234567890L));
|
||||
|
||||
Bucket b1 = new Bucket("foo", new Date(1234567890L), 123);
|
||||
Bucket b2 = new Bucket("foo", new Date(1234567890L), 123);
|
||||
assertTrue(b1.equals(b2));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentMaxNormalizedProbability() {
|
||||
Bucket bucket1 = new Bucket("foo");
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
bucket1.setMaxNormalizedProbability(55.0);
|
||||
Bucket bucket2 = new Bucket("foo");
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
bucket2.setMaxNormalizedProbability(55.1);
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
|
@ -157,9 +146,9 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testEquals_GivenDifferentEventCount() {
|
||||
Bucket bucket1 = new Bucket("foo");
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
bucket1.setEventCount(3);
|
||||
Bucket bucket2 = new Bucket("foo");
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
bucket2.setEventCount(100);
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
|
@ -167,9 +156,9 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testEquals_GivenDifferentRecordCount() {
|
||||
Bucket bucket1 = new Bucket("foo");
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
bucket1.setRecordCount(300);
|
||||
Bucket bucket2 = new Bucket("foo");
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
bucket2.setRecordCount(400);
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
|
@ -177,10 +166,9 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testEquals_GivenOneHasRecordsAndTheOtherDoesNot() {
|
||||
Bucket bucket1 = new Bucket("foo");
|
||||
bucket1.setId("1");
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
bucket1.setRecords(Arrays.asList(new AnomalyRecord("foo")));
|
||||
Bucket bucket2 = new Bucket("foo");
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
bucket2.setRecords(null);
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
|
@ -188,9 +176,9 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testEquals_GivenDifferentNumberOfRecords() {
|
||||
Bucket bucket1 = new Bucket("foo");
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
bucket1.setRecords(Arrays.asList(new AnomalyRecord("foo")));
|
||||
Bucket bucket2 = new Bucket("foo");
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
bucket2.setRecords(Arrays.asList(new AnomalyRecord("foo"), new AnomalyRecord("foo")));
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
|
@ -203,9 +191,9 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
AnomalyRecord anomalyRecord2 = new AnomalyRecord("foo");
|
||||
anomalyRecord1.setAnomalyScore(2.0);
|
||||
|
||||
Bucket bucket1 = new Bucket("foo");
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
bucket1.setRecords(Arrays.asList(anomalyRecord1));
|
||||
Bucket bucket2 = new Bucket("foo");
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
bucket2.setRecords(Arrays.asList(anomalyRecord2));
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
|
@ -213,9 +201,9 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testEquals_GivenDifferentIsInterim() {
|
||||
Bucket bucket1 = new Bucket("foo");
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
bucket1.setInterim(true);
|
||||
Bucket bucket2 = new Bucket("foo");
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
bucket2.setInterim(false);
|
||||
|
||||
assertFalse(bucket1.equals(bucket2));
|
||||
|
@ -223,13 +211,13 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testEquals_GivenDifferentBucketInfluencers() {
|
||||
Bucket bucket1 = new Bucket("foo");
|
||||
Bucket bucket1 = new Bucket("foo", new Date(123), 123);
|
||||
BucketInfluencer influencer1 = new BucketInfluencer("foo");
|
||||
influencer1.setInfluencerFieldName("foo");
|
||||
bucket1.addBucketInfluencer(influencer1);
|
||||
;
|
||||
|
||||
Bucket bucket2 = new Bucket("foo");
|
||||
Bucket bucket2 = new Bucket("foo", new Date(123), 123);
|
||||
BucketInfluencer influencer2 = new BucketInfluencer("foo");
|
||||
influencer2.setInfluencerFieldName("bar");
|
||||
bucket2.addBucketInfluencer(influencer2);
|
||||
|
@ -243,29 +231,25 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
BucketInfluencer bucketInfluencer = new BucketInfluencer("foo");
|
||||
Date date = new Date();
|
||||
|
||||
Bucket bucket1 = new Bucket("foo");
|
||||
Bucket bucket1 = new Bucket("foo", date, 123);
|
||||
bucket1.setAnomalyScore(42.0);
|
||||
bucket1.setInitialAnomalyScore(92.0);
|
||||
bucket1.setEventCount(134);
|
||||
bucket1.setId("13546461");
|
||||
bucket1.setInterim(true);
|
||||
bucket1.setMaxNormalizedProbability(33.3);
|
||||
bucket1.setRecordCount(4);
|
||||
bucket1.setRecords(Arrays.asList(record));
|
||||
bucket1.addBucketInfluencer(bucketInfluencer);
|
||||
bucket1.setTimestamp(date);
|
||||
|
||||
Bucket bucket2 = new Bucket("foo");
|
||||
Bucket bucket2 = new Bucket("foo", date, 123);
|
||||
bucket2.setAnomalyScore(42.0);
|
||||
bucket2.setInitialAnomalyScore(92.0);
|
||||
bucket2.setEventCount(134);
|
||||
bucket2.setId("13546461");
|
||||
bucket2.setInterim(true);
|
||||
bucket2.setMaxNormalizedProbability(33.3);
|
||||
bucket2.setRecordCount(4);
|
||||
bucket2.setRecords(Arrays.asList(record));
|
||||
bucket2.addBucketInfluencer(bucketInfluencer);
|
||||
bucket2.setTimestamp(date);
|
||||
|
||||
assertTrue(bucket1.equals(bucket2));
|
||||
assertTrue(bucket2.equals(bucket1));
|
||||
|
@ -273,7 +257,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testIsNormalisable_GivenNullBucketInfluencers() {
|
||||
Bucket bucket = new Bucket("foo");
|
||||
Bucket bucket = new Bucket("foo", new Date(123), 123);
|
||||
bucket.setBucketInfluencers(null);
|
||||
bucket.setAnomalyScore(90.0);
|
||||
|
||||
|
@ -281,7 +265,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testIsNormalisable_GivenEmptyBucketInfluencers() {
|
||||
Bucket bucket = new Bucket("foo");
|
||||
Bucket bucket = new Bucket("foo", new Date(123), 123);
|
||||
bucket.setBucketInfluencers(Collections.emptyList());
|
||||
bucket.setAnomalyScore(90.0);
|
||||
|
||||
|
@ -289,7 +273,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testIsNormalisable_GivenAnomalyScoreIsZeroAndRecordCountIsZero() {
|
||||
Bucket bucket = new Bucket("foo");
|
||||
Bucket bucket = new Bucket("foo", new Date(123), 123);
|
||||
bucket.addBucketInfluencer(new BucketInfluencer("foo"));
|
||||
bucket.setAnomalyScore(0.0);
|
||||
bucket.setRecordCount(0);
|
||||
|
@ -298,7 +282,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testIsNormalisable_GivenAnomalyScoreIsZeroAndRecordCountIsNonZero() {
|
||||
Bucket bucket = new Bucket("foo");
|
||||
Bucket bucket = new Bucket("foo", new Date(123), 123);
|
||||
bucket.addBucketInfluencer(new BucketInfluencer("foo"));
|
||||
bucket.setAnomalyScore(0.0);
|
||||
bucket.setRecordCount(1);
|
||||
|
@ -307,7 +291,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testIsNormalisable_GivenAnomalyScoreIsNonZeroAndRecordCountIsZero() {
|
||||
Bucket bucket = new Bucket("foo");
|
||||
Bucket bucket = new Bucket("foo", new Date(123), 123);
|
||||
bucket.addBucketInfluencer(new BucketInfluencer("foo"));
|
||||
bucket.setAnomalyScore(1.0);
|
||||
bucket.setRecordCount(0);
|
||||
|
@ -316,7 +300,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testIsNormalisable_GivenAnomalyScoreIsNonZeroAndRecordCountIsNonZero() {
|
||||
Bucket bucket = new Bucket("foo");
|
||||
Bucket bucket = new Bucket("foo", new Date(123), 123);
|
||||
bucket.addBucketInfluencer(new BucketInfluencer("foo"));
|
||||
bucket.setAnomalyScore(1.0);
|
||||
bucket.setRecordCount(1);
|
||||
|
@ -332,7 +316,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
records.add(createAnomalyRecord("B", 15.0));
|
||||
records.add(createAnomalyRecord("B", 45.0));
|
||||
|
||||
Bucket bucket = new Bucket("foo");
|
||||
Bucket bucket = new Bucket("foo", new Date(123), 123);
|
||||
bucket.setRecords(records);
|
||||
|
||||
Map<String, Double> ppProb = bucket.calcMaxNormalizedProbabilityPerPartition();
|
||||
|
@ -354,7 +338,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
pScore.add(new PartitionScore("pf", "pv4", 60, 0.1));
|
||||
pScore.add(new PartitionScore("pf", "pv2", 40, 0.1));
|
||||
|
||||
Bucket bucket = new Bucket("foo");
|
||||
Bucket bucket = new Bucket("foo", new Date(123), 123);
|
||||
bucket.setPartitionScores(pScore);
|
||||
|
||||
double anomalyScore = bucket.partitionAnomalyScore("pv1");
|
||||
|
|
|
@ -17,8 +17,8 @@ setup:
|
|||
index:
|
||||
index: prelertresults-farequote
|
||||
type: result
|
||||
id: 1
|
||||
body: { "jobId": "farequote", "result_type": "bucket", "timestamp": "2016-06-01T00:00:00Z" }
|
||||
id: "farequote_1464739200000_1"
|
||||
body: { "jobId": "farequote", "result_type": "bucket", "timestamp": "2016-06-01T00:00:00Z", "bucketSpan":1 }
|
||||
|
||||
- do:
|
||||
indices.refresh:
|
||||
|
|
|
@ -51,22 +51,22 @@ setup:
|
|||
index:
|
||||
index: prelertresults-foo
|
||||
type: result
|
||||
id: 1
|
||||
body: { "jobId": "foo", "result_type": "bucket", "timestamp": "2016-06-02T00:00:00Z" }
|
||||
id: "foo_1464825600000_1"
|
||||
body: { "jobId": "foo", "result_type": "bucket", "timestamp": "2016-06-02T00:00:00Z", "bucketSpan":1 }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: result
|
||||
id: 2
|
||||
body: { "jobId": "foo", "result_type": "bucket", "timestamp": "2016-06-01T12:00:00Z" }
|
||||
id: "foo_1464782400000_1"
|
||||
body: { "jobId": "foo", "result_type": "bucket", "timestamp": "2016-06-01T12:00:00Z", "bucketSpan":1 }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: result
|
||||
id: 3
|
||||
body: { "jobId": "foo", "result_type": "bucket", "timestamp": "2016-05-01T00:00:00Z" }
|
||||
id: "foo_1462060800000_1"
|
||||
body: { "jobId": "foo", "result_type": "bucket", "timestamp": "2016-05-01T00:00:00Z", "bucketSpan":1 }
|
||||
|
||||
- do:
|
||||
indices.refresh:
|
||||
|
|
Loading…
Reference in New Issue