[ML] add detectorIndex to modelplot and forecast (elastic/x-pack-elasticsearch#2796)

add detector_index to model plots and forecast

relates elastic/x-pack-elasticsearch#2547

corresponding ml-cpp change: elastic/machine-learning-cpp#361

Original commit: elastic/x-pack-elasticsearch@5927d8578e
This commit is contained in:
Hendrik Muhs 2017-10-27 12:54:42 +02:00 committed by GitHub
parent c7e94b3b4c
commit f74e680142
7 changed files with 90 additions and 64 deletions

View File

@ -34,8 +34,6 @@ public class Forecast implements ToXContentObject, Writeable {
public static final ParseField FORECAST_ID = new ParseField("forecast_id"); public static final ParseField FORECAST_ID = new ParseField("forecast_id");
public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name"); public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name");
public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value"); public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value");
public static final ParseField OVER_FIELD_NAME = new ParseField("over_field_name");
public static final ParseField OVER_FIELD_VALUE = new ParseField("over_field_value");
public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name"); public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name");
public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value"); public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value");
public static final ParseField MODEL_FEATURE = new ParseField("model_feature"); public static final ParseField MODEL_FEATURE = new ParseField("model_feature");
@ -43,9 +41,11 @@ public class Forecast implements ToXContentObject, Writeable {
public static final ParseField FORECAST_UPPER = new ParseField("forecast_upper"); public static final ParseField FORECAST_UPPER = new ParseField("forecast_upper");
public static final ParseField FORECAST_PREDICTION = new ParseField("forecast_prediction"); public static final ParseField FORECAST_PREDICTION = new ParseField("forecast_prediction");
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
public static final ParseField DETECTOR_INDEX = new ParseField("detector_index");
public static final ConstructingObjectParser<Forecast, Void> PARSER = public static final ConstructingObjectParser<Forecast, Void> PARSER =
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, a -> new Forecast((String) a[0], (long) a[1], (Date) a[2], (long) a[3])); new ConstructingObjectParser<>(RESULT_TYPE_VALUE, a ->
new Forecast((String) a[0], (long) a[1], (Date) a[2], (long) a[3], (int) a[4]));
static { static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
@ -60,11 +60,10 @@ public class Forecast implements ToXContentObject, Writeable {
+ Result.TIMESTAMP.getPreferredName() + "]"); + Result.TIMESTAMP.getPreferredName() + "]");
}, Result.TIMESTAMP, ValueType.VALUE); }, Result.TIMESTAMP, ValueType.VALUE);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
PARSER.declareInt(ConstructingObjectParser.constructorArg(), DETECTOR_INDEX);
PARSER.declareString((modelForecast, s) -> {}, Result.RESULT_TYPE); PARSER.declareString((modelForecast, s) -> {}, Result.RESULT_TYPE);
PARSER.declareString(Forecast::setPartitionFieldName, PARTITION_FIELD_NAME); PARSER.declareString(Forecast::setPartitionFieldName, PARTITION_FIELD_NAME);
PARSER.declareString(Forecast::setPartitionFieldValue, PARTITION_FIELD_VALUE); PARSER.declareString(Forecast::setPartitionFieldValue, PARTITION_FIELD_VALUE);
PARSER.declareString(Forecast::setOverFieldName, OVER_FIELD_NAME);
PARSER.declareString(Forecast::setOverFieldValue, OVER_FIELD_VALUE);
PARSER.declareString(Forecast::setByFieldName, BY_FIELD_NAME); PARSER.declareString(Forecast::setByFieldName, BY_FIELD_NAME);
PARSER.declareString(Forecast::setByFieldValue, BY_FIELD_VALUE); PARSER.declareString(Forecast::setByFieldValue, BY_FIELD_VALUE);
PARSER.declareString(Forecast::setModelFeature, MODEL_FEATURE); PARSER.declareString(Forecast::setModelFeature, MODEL_FEATURE);
@ -77,10 +76,9 @@ public class Forecast implements ToXContentObject, Writeable {
private final long forecastId; private final long forecastId;
private final Date timestamp; private final Date timestamp;
private final long bucketSpan; private final long bucketSpan;
private int detectorIndex;
private String partitionFieldName; private String partitionFieldName;
private String partitionFieldValue; private String partitionFieldValue;
private String overFieldName;
private String overFieldValue;
private String byFieldName; private String byFieldName;
private String byFieldValue; private String byFieldValue;
private String modelFeature; private String modelFeature;
@ -88,11 +86,12 @@ public class Forecast implements ToXContentObject, Writeable {
private double forecastUpper; private double forecastUpper;
private double forecastPrediction; private double forecastPrediction;
public Forecast(String jobId, long forecastId, Date timestamp, long bucketSpan) { public Forecast(String jobId, long forecastId, Date timestamp, long bucketSpan, int detectorIndex) {
this.jobId = jobId; this.jobId = jobId;
this.forecastId = forecastId; this.forecastId = forecastId;
this.timestamp = timestamp; this.timestamp = timestamp;
this.bucketSpan = bucketSpan; this.bucketSpan = bucketSpan;
this.detectorIndex = detectorIndex;
} }
public Forecast(StreamInput in) throws IOException { public Forecast(StreamInput in) throws IOException {
@ -101,8 +100,6 @@ public class Forecast implements ToXContentObject, Writeable {
timestamp = new Date(in.readLong()); timestamp = new Date(in.readLong());
partitionFieldName = in.readOptionalString(); partitionFieldName = in.readOptionalString();
partitionFieldValue = in.readOptionalString(); partitionFieldValue = in.readOptionalString();
overFieldName = in.readOptionalString();
overFieldValue = in.readOptionalString();
byFieldName = in.readOptionalString(); byFieldName = in.readOptionalString();
byFieldValue = in.readOptionalString(); byFieldValue = in.readOptionalString();
modelFeature = in.readOptionalString(); modelFeature = in.readOptionalString();
@ -110,6 +107,7 @@ public class Forecast implements ToXContentObject, Writeable {
forecastUpper = in.readDouble(); forecastUpper = in.readDouble();
forecastPrediction = in.readDouble(); forecastPrediction = in.readDouble();
bucketSpan = in.readLong(); bucketSpan = in.readLong();
detectorIndex = in.readInt();
} }
@Override @Override
@ -119,8 +117,6 @@ public class Forecast implements ToXContentObject, Writeable {
out.writeLong(timestamp.getTime()); out.writeLong(timestamp.getTime());
out.writeOptionalString(partitionFieldName); out.writeOptionalString(partitionFieldName);
out.writeOptionalString(partitionFieldValue); out.writeOptionalString(partitionFieldValue);
out.writeOptionalString(overFieldName);
out.writeOptionalString(overFieldValue);
out.writeOptionalString(byFieldName); out.writeOptionalString(byFieldName);
out.writeOptionalString(byFieldValue); out.writeOptionalString(byFieldValue);
out.writeOptionalString(modelFeature); out.writeOptionalString(modelFeature);
@ -128,6 +124,7 @@ public class Forecast implements ToXContentObject, Writeable {
out.writeDouble(forecastUpper); out.writeDouble(forecastUpper);
out.writeDouble(forecastPrediction); out.writeDouble(forecastPrediction);
out.writeLong(bucketSpan); out.writeLong(bucketSpan);
out.writeInt(detectorIndex);
} }
@Override @Override
@ -137,6 +134,7 @@ public class Forecast implements ToXContentObject, Writeable {
builder.field(FORECAST_ID.getPreferredName(), forecastId); builder.field(FORECAST_ID.getPreferredName(), forecastId);
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex);
if (timestamp != null) { if (timestamp != null) {
builder.dateField(Result.TIMESTAMP.getPreferredName(), builder.dateField(Result.TIMESTAMP.getPreferredName(),
Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime());
@ -147,12 +145,6 @@ public class Forecast implements ToXContentObject, Writeable {
if (partitionFieldValue != null) { if (partitionFieldValue != null) {
builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue); builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue);
} }
if (overFieldName != null) {
builder.field(OVER_FIELD_NAME.getPreferredName(), overFieldName);
}
if (overFieldValue != null) {
builder.field(OVER_FIELD_VALUE.getPreferredName(), overFieldValue);
}
if (byFieldName != null) { if (byFieldName != null) {
builder.field(BY_FIELD_NAME.getPreferredName(), byFieldName); builder.field(BY_FIELD_NAME.getPreferredName(), byFieldName);
} }
@ -178,12 +170,12 @@ public class Forecast implements ToXContentObject, Writeable {
} }
public String getId() { public String getId() {
int valuesHash = Objects.hash(byFieldValue, overFieldValue, partitionFieldValue); int valuesHash = Objects.hash(byFieldValue, partitionFieldValue);
int length = (byFieldValue == null ? 0 : byFieldValue.length()) + int length = (byFieldValue == null ? 0 : byFieldValue.length()) +
(overFieldValue == null ? 0 : overFieldValue.length()) +
(partitionFieldValue == null ? 0 : partitionFieldValue.length()); (partitionFieldValue == null ? 0 : partitionFieldValue.length());
return jobId + "_model_forecast_" + forecastId + "_" + timestamp.getTime() + "_" + bucketSpan + "_" return jobId + "_model_forecast_" + forecastId + "_" + timestamp.getTime()
+ (modelFeature == null ? "" : modelFeature) + "_" + valuesHash + "_" + length; + "_" + bucketSpan + "_" + detectorIndex + "_"
+ valuesHash + "_" + length;
} }
public Date getTimestamp() { public Date getTimestamp() {
@ -202,6 +194,10 @@ public class Forecast implements ToXContentObject, Writeable {
this.partitionFieldName = partitionFieldName; this.partitionFieldName = partitionFieldName;
} }
public int getDetectorIndex() {
return detectorIndex;
}
public String getPartitionFieldValue() { public String getPartitionFieldValue() {
return partitionFieldValue; return partitionFieldValue;
} }
@ -210,22 +206,6 @@ public class Forecast implements ToXContentObject, Writeable {
this.partitionFieldValue = partitionFieldValue; this.partitionFieldValue = partitionFieldValue;
} }
public String getOverFieldName() {
return overFieldName;
}
public void setOverFieldName(String overFieldName) {
this.overFieldName = overFieldName;
}
public String getOverFieldValue() {
return overFieldValue;
}
public void setOverFieldValue(String overFieldValue) {
this.overFieldValue = overFieldValue;
}
public String getByFieldName() { public String getByFieldName() {
return byFieldName; return byFieldName;
} }
@ -288,21 +268,20 @@ public class Forecast implements ToXContentObject, Writeable {
Objects.equals(this.timestamp, that.timestamp) && Objects.equals(this.timestamp, that.timestamp) &&
Objects.equals(this.partitionFieldValue, that.partitionFieldValue) && Objects.equals(this.partitionFieldValue, that.partitionFieldValue) &&
Objects.equals(this.partitionFieldName, that.partitionFieldName) && Objects.equals(this.partitionFieldName, that.partitionFieldName) &&
Objects.equals(this.overFieldValue, that.overFieldValue) &&
Objects.equals(this.overFieldName, that.overFieldName) &&
Objects.equals(this.byFieldValue, that.byFieldValue) && Objects.equals(this.byFieldValue, that.byFieldValue) &&
Objects.equals(this.byFieldName, that.byFieldName) && Objects.equals(this.byFieldName, that.byFieldName) &&
Objects.equals(this.modelFeature, that.modelFeature) && Objects.equals(this.modelFeature, that.modelFeature) &&
this.forecastLower == that.forecastLower && this.forecastLower == that.forecastLower &&
this.forecastUpper == that.forecastUpper && this.forecastUpper == that.forecastUpper &&
this.forecastPrediction == that.forecastPrediction && this.forecastPrediction == that.forecastPrediction &&
this.bucketSpan == that.bucketSpan; this.bucketSpan == that.bucketSpan &&
this.detectorIndex == that.detectorIndex;
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(jobId, forecastId, timestamp, partitionFieldName, partitionFieldValue, return Objects.hash(jobId, forecastId, timestamp, partitionFieldName, partitionFieldValue,
overFieldName, overFieldValue, byFieldName, byFieldValue, byFieldName, byFieldValue, modelFeature, forecastLower, forecastUpper,
modelFeature, forecastLower, forecastUpper, forecastPrediction, bucketSpan); forecastPrediction, bucketSpan, detectorIndex);
} }
} }

View File

@ -44,9 +44,11 @@ public class ModelPlot implements ToXContentObject, Writeable {
public static final ParseField MODEL_MEDIAN = new ParseField("model_median"); public static final ParseField MODEL_MEDIAN = new ParseField("model_median");
public static final ParseField ACTUAL = new ParseField("actual"); public static final ParseField ACTUAL = new ParseField("actual");
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
public static final ParseField DETECTOR_INDEX = new ParseField("detector_index");
public static final ConstructingObjectParser<ModelPlot, Void> PARSER = public static final ConstructingObjectParser<ModelPlot, Void> PARSER =
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, a -> new ModelPlot((String) a[0], (Date) a[1], (long) a[2])); new ConstructingObjectParser<>(RESULT_TYPE_VALUE, a ->
new ModelPlot((String) a[0], (Date) a[1], (long) a[2], (int) a[3]));
static { static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
@ -60,6 +62,7 @@ public class ModelPlot implements ToXContentObject, Writeable {
+ Result.TIMESTAMP.getPreferredName() + "]"); + Result.TIMESTAMP.getPreferredName() + "]");
}, Result.TIMESTAMP, ValueType.VALUE); }, Result.TIMESTAMP, ValueType.VALUE);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
PARSER.declareInt(ConstructingObjectParser.constructorArg(), DETECTOR_INDEX);
PARSER.declareString((modelPlot, s) -> {}, Result.RESULT_TYPE); PARSER.declareString((modelPlot, s) -> {}, Result.RESULT_TYPE);
PARSER.declareString(ModelPlot::setPartitionFieldName, PARTITION_FIELD_NAME); PARSER.declareString(ModelPlot::setPartitionFieldName, PARTITION_FIELD_NAME);
PARSER.declareString(ModelPlot::setPartitionFieldValue, PARTITION_FIELD_VALUE); PARSER.declareString(ModelPlot::setPartitionFieldValue, PARTITION_FIELD_VALUE);
@ -77,6 +80,7 @@ public class ModelPlot implements ToXContentObject, Writeable {
private final String jobId; private final String jobId;
private final Date timestamp; private final Date timestamp;
private final long bucketSpan; private final long bucketSpan;
private int detectorIndex;
private String partitionFieldName; private String partitionFieldName;
private String partitionFieldValue; private String partitionFieldValue;
private String overFieldName; private String overFieldName;
@ -92,10 +96,11 @@ public class ModelPlot implements ToXContentObject, Writeable {
*/ */
private Double actual; private Double actual;
public ModelPlot(String jobId, Date timestamp, long bucketSpan) { public ModelPlot(String jobId, Date timestamp, long bucketSpan, int detectorIndex) {
this.jobId = jobId; this.jobId = jobId;
this.timestamp = timestamp; this.timestamp = timestamp;
this.bucketSpan = bucketSpan; this.bucketSpan = bucketSpan;
this.detectorIndex = detectorIndex;
} }
public ModelPlot(StreamInput in) throws IOException { public ModelPlot(StreamInput in) throws IOException {
@ -134,6 +139,12 @@ public class ModelPlot implements ToXContentObject, Writeable {
} else { } else {
bucketSpan = 0; bucketSpan = 0;
} }
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
detectorIndex = in.readInt();
} else {
// default to -1 as marker for no detector index
detectorIndex = -1;
}
} }
@Override @Override
@ -177,6 +188,9 @@ public class ModelPlot implements ToXContentObject, Writeable {
if (out.getVersion().onOrAfter(Version.V_5_5_0)) { if (out.getVersion().onOrAfter(Version.V_5_5_0)) {
out.writeLong(bucketSpan); out.writeLong(bucketSpan);
} }
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
out.writeInt(detectorIndex);
}
} }
@Override @Override
@ -185,6 +199,8 @@ public class ModelPlot implements ToXContentObject, Writeable {
builder.field(Job.ID.getPreferredName(), jobId); builder.field(Job.ID.getPreferredName(), jobId);
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex);
if (timestamp != null) { if (timestamp != null) {
builder.dateField(Result.TIMESTAMP.getPreferredName(), builder.dateField(Result.TIMESTAMP.getPreferredName(),
Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime());
@ -229,8 +245,8 @@ public class ModelPlot implements ToXContentObject, Writeable {
int length = (byFieldValue == null ? 0 : byFieldValue.length()) + int length = (byFieldValue == null ? 0 : byFieldValue.length()) +
(overFieldValue == null ? 0 : overFieldValue.length()) + (overFieldValue == null ? 0 : overFieldValue.length()) +
(partitionFieldValue == null ? 0 : partitionFieldValue.length()); (partitionFieldValue == null ? 0 : partitionFieldValue.length());
return jobId + "_model_plot_" + timestamp.getTime() + "_" + bucketSpan + "_" + return jobId + "_model_plot_" + timestamp.getTime() + "_" + bucketSpan
(modelFeature == null ? "" : modelFeature) + "_" + valuesHash + "_" + length; + "_" + detectorIndex + "_" + valuesHash + "_" + length;
} }
public Date getTimestamp() { public Date getTimestamp() {
@ -241,6 +257,10 @@ public class ModelPlot implements ToXContentObject, Writeable {
return bucketSpan; return bucketSpan;
} }
public int getDetectorIndex() {
return detectorIndex;
}
public String getPartitionFieldName() { public String getPartitionFieldName() {
return partitionFieldName; return partitionFieldName;
} }
@ -351,13 +371,14 @@ public class ModelPlot implements ToXContentObject, Writeable {
this.modelUpper == that.modelUpper && this.modelUpper == that.modelUpper &&
this.modelMedian == that.modelMedian && this.modelMedian == that.modelMedian &&
Objects.equals(this.actual, that.actual) && Objects.equals(this.actual, that.actual) &&
this.bucketSpan == that.bucketSpan; this.bucketSpan == that.bucketSpan &&
this.detectorIndex == that.detectorIndex;
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(jobId, timestamp, partitionFieldName, partitionFieldValue, return Objects.hash(jobId, timestamp, partitionFieldName, partitionFieldValue,
overFieldName, overFieldValue, byFieldName, byFieldValue, overFieldName, overFieldValue, byFieldName, byFieldValue,
modelFeature, modelLower, modelUpper, modelMedian, actual, bucketSpan); modelFeature, modelLower, modelUpper, modelMedian, actual, bucketSpan, detectorIndex);
} }
} }

View File

@ -47,7 +47,7 @@ public class JobRenormalizedResultsPersisterTests extends ESTestCase {
Client client = new MockClientBuilder("cluster").bulk(bulkResponse).build(); Client client = new MockClientBuilder("cluster").bulk(bulkResponse).build();
JobRenormalizedResultsPersister persister = new JobRenormalizedResultsPersister("foo", Settings.EMPTY, client); JobRenormalizedResultsPersister persister = new JobRenormalizedResultsPersister("foo", Settings.EMPTY, client);
ModelPlot modelPlot = new ModelPlot("foo", new Date(), 123456); ModelPlot modelPlot = new ModelPlot("foo", new Date(), 123456, 0);
for (int i=0; i<=JobRenormalizedResultsPersister.BULK_LIMIT; i++) { for (int i=0; i<=JobRenormalizedResultsPersister.BULK_LIMIT; i++) {
persister.updateResult("bar", "index-foo", modelPlot); persister.updateResult("bar", "index-foo", modelPlot);
} }

View File

@ -183,7 +183,7 @@ public class JobResultsPersisterTests extends ESTestCase {
JobResultsPersister persister = new JobResultsPersister(Settings.EMPTY, client); JobResultsPersister persister = new JobResultsPersister(Settings.EMPTY, client);
JobResultsPersister.Builder bulkBuilder = persister.bulkPersisterBuilder("foo"); JobResultsPersister.Builder bulkBuilder = persister.bulkPersisterBuilder("foo");
ModelPlot modelPlot = new ModelPlot("foo", new Date(), 123456); ModelPlot modelPlot = new ModelPlot("foo", new Date(), 123456, 0);
for (int i=0; i<=JobRenormalizedResultsPersister.BULK_LIMIT; i++) { for (int i=0; i<=JobRenormalizedResultsPersister.BULK_LIMIT; i++) {
bulkBuilder.persistModelPlot(modelPlot); bulkBuilder.persistModelPlot(modelPlot);
} }

View File

@ -82,12 +82,12 @@ public class AutodetectResultTests extends AbstractSerializingTestCase<Autodetec
modelSizeStats = null; modelSizeStats = null;
} }
if (randomBoolean()) { if (randomBoolean()) {
modelPlot = new ModelPlot(jobId, new Date(randomLong()), randomNonNegativeLong()); modelPlot = new ModelPlot(jobId, new Date(randomLong()), randomNonNegativeLong(), randomInt());
} else { } else {
modelPlot = null; modelPlot = null;
} }
if (randomBoolean()) { if (randomBoolean()) {
forecast = new Forecast(jobId, randomNonNegativeLong(), new Date(randomLong()), randomNonNegativeLong()); forecast = new Forecast(jobId, randomNonNegativeLong(), new Date(randomLong()), randomNonNegativeLong(), randomInt());
} else { } else {
forecast = null; forecast = null;
} }

View File

@ -11,6 +11,7 @@ import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException; import java.io.IOException;
import java.util.Date; import java.util.Date;
import java.util.Objects;
public class ForecastTests extends AbstractSerializingTestCase<Forecast> { public class ForecastTests extends AbstractSerializingTestCase<Forecast> {
@ -25,7 +26,9 @@ public class ForecastTests extends AbstractSerializingTestCase<Forecast> {
} }
public Forecast createTestInstance(String jobId) { public Forecast createTestInstance(String jobId) {
Forecast forecast = new Forecast(jobId, randomNonNegativeLong(), new Date(randomLong()), randomNonNegativeLong()); Forecast forecast =
new Forecast(jobId, randomNonNegativeLong(), new Date(randomLong()),
randomNonNegativeLong(), randomInt());
if (randomBoolean()) { if (randomBoolean()) {
forecast.setByFieldName(randomAlphaOfLengthBetween(1, 20)); forecast.setByFieldName(randomAlphaOfLengthBetween(1, 20));
@ -65,4 +68,27 @@ public class ForecastTests extends AbstractSerializingTestCase<Forecast> {
return Forecast.PARSER.apply(parser, null); return Forecast.PARSER.apply(parser, null);
} }
public void testId() {
Forecast forecast = new Forecast("job-foo", 222, new Date(100L), 60L, 2);
String byFieldValue = null;
String partitionFieldValue = null;
int valuesHash = Objects.hash(byFieldValue, partitionFieldValue);
assertEquals("job-foo_model_forecast_222_100_60_2_" + valuesHash + "_0", forecast.getId());
int length = 0;
if (randomBoolean()) {
byFieldValue = randomAlphaOfLength(10);
length += byFieldValue.length();
forecast.setByFieldValue(byFieldValue);
}
if (randomBoolean()) {
partitionFieldValue = randomAlphaOfLength(10);
length += partitionFieldValue.length();
forecast.setPartitionFieldValue(partitionFieldValue);
}
valuesHash = Objects.hash(byFieldValue, partitionFieldValue);
assertEquals("job-foo_model_forecast_222_100_60_2_" + valuesHash + "_" + length, forecast.getId());
}
} }

View File

@ -27,7 +27,8 @@ public class ModelPlotTests extends AbstractSerializingTestCase<ModelPlot> {
} }
public ModelPlot createTestInstance(String jobId) { public ModelPlot createTestInstance(String jobId) {
ModelPlot modelPlot = new ModelPlot(jobId, new Date(randomLong()), randomNonNegativeLong()); ModelPlot modelPlot =
new ModelPlot(jobId, new Date(randomLong()), randomNonNegativeLong(), randomInt());
if (randomBoolean()) { if (randomBoolean()) {
modelPlot.setByFieldName(randomAlphaOfLengthBetween(1, 20)); modelPlot.setByFieldName(randomAlphaOfLengthBetween(1, 20));
} }
@ -69,13 +70,15 @@ public class ModelPlotTests extends AbstractSerializingTestCase<ModelPlot> {
} }
public void testEquals_GivenSameObject() { public void testEquals_GivenSameObject() {
ModelPlot modelPlot = new ModelPlot(randomAlphaOfLength(15), new Date(randomLong()), randomNonNegativeLong()); ModelPlot modelPlot =
new ModelPlot(randomAlphaOfLength(15), new Date(randomLong()), randomNonNegativeLong(), randomInt());
assertTrue(modelPlot.equals(modelPlot)); assertTrue(modelPlot.equals(modelPlot));
} }
public void testEquals_GivenObjectOfDifferentClass() { public void testEquals_GivenObjectOfDifferentClass() {
ModelPlot modelPlot = new ModelPlot(randomAlphaOfLength(15), new Date(randomLong()), randomNonNegativeLong()); ModelPlot modelPlot =
new ModelPlot(randomAlphaOfLength(15), new Date(randomLong()), randomNonNegativeLong(), randomInt());
assertFalse(modelPlot.equals("a string")); assertFalse(modelPlot.equals("a string"));
} }
@ -209,16 +212,13 @@ public class ModelPlotTests extends AbstractSerializingTestCase<ModelPlot> {
} }
public void testId() { public void testId() {
ModelPlot plot = new ModelPlot("job-foo", new Date(100L), 60L); ModelPlot plot = new ModelPlot("job-foo", new Date(100L), 60L, 33);
String byFieldValue = null; String byFieldValue = null;
String overFieldValue = null; String overFieldValue = null;
String partitionFieldValue = null; String partitionFieldValue = null;
int valuesHash = Objects.hash(byFieldValue, overFieldValue, partitionFieldValue); int valuesHash = Objects.hash(byFieldValue, overFieldValue, partitionFieldValue);
assertEquals("job-foo_model_plot_100_60__" + valuesHash + "_0", plot.getId()); assertEquals("job-foo_model_plot_100_60_33_" + valuesHash + "_0", plot.getId());
plot.setModelFeature("a-feature");
assertEquals("job-foo_model_plot_100_60_a-feature_" + valuesHash + "_0", plot.getId());
int length = 0; int length = 0;
if (randomBoolean()) { if (randomBoolean()) {
@ -238,11 +238,11 @@ public class ModelPlotTests extends AbstractSerializingTestCase<ModelPlot> {
} }
valuesHash = Objects.hash(byFieldValue, overFieldValue, partitionFieldValue); valuesHash = Objects.hash(byFieldValue, overFieldValue, partitionFieldValue);
assertEquals("job-foo_model_plot_100_60_a-feature_" + valuesHash + "_" + length, plot.getId()); assertEquals("job-foo_model_plot_100_60_33_" + valuesHash + "_" + length, plot.getId());
} }
private ModelPlot createFullyPopulated() { private ModelPlot createFullyPopulated() {
ModelPlot modelPlot = new ModelPlot("foo", new Date(12345678L), 360L); ModelPlot modelPlot = new ModelPlot("foo", new Date(12345678L), 360L, 22);
modelPlot.setByFieldName("by"); modelPlot.setByFieldName("by");
modelPlot.setByFieldValue("by_val"); modelPlot.setByFieldValue("by_val");
modelPlot.setPartitionFieldName("part"); modelPlot.setPartitionFieldName("part");