Adding job process pojos to protocol pkg (#32657)

* Adding job process pojos to protocol pkg

* Removing unused `RESULTS_FIELD`

* Addressing PR comments, removing unnecessary methods
This commit is contained in:
Benjamin Trent 2018-08-07 10:51:52 -05:00 committed by GitHub
parent b3e15851a2
commit 6d50d8b5a9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 1753 additions and 19 deletions

View File

@ -27,6 +27,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
@ -54,7 +55,6 @@ public class DatafeedConfig implements ToXContentObject {
public static final ParseField FREQUENCY = new ParseField("frequency"); public static final ParseField FREQUENCY = new ParseField("frequency");
public static final ParseField INDEXES = new ParseField("indexes"); public static final ParseField INDEXES = new ParseField("indexes");
public static final ParseField INDICES = new ParseField("indices"); public static final ParseField INDICES = new ParseField("indices");
public static final ParseField JOB_ID = new ParseField("job_id");
public static final ParseField TYPES = new ParseField("types"); public static final ParseField TYPES = new ParseField("types");
public static final ParseField QUERY = new ParseField("query"); public static final ParseField QUERY = new ParseField("query");
public static final ParseField SCROLL_SIZE = new ParseField("scroll_size"); public static final ParseField SCROLL_SIZE = new ParseField("scroll_size");
@ -68,7 +68,7 @@ public class DatafeedConfig implements ToXContentObject {
static { static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), ID); PARSER.declareString(ConstructingObjectParser.constructorArg(), ID);
PARSER.declareString(ConstructingObjectParser.constructorArg(), JOB_ID); PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareStringArray(Builder::setIndices, INDEXES); PARSER.declareStringArray(Builder::setIndices, INDEXES);
PARSER.declareStringArray(Builder::setIndices, INDICES); PARSER.declareStringArray(Builder::setIndices, INDICES);
@ -176,7 +176,7 @@ public class DatafeedConfig implements ToXContentObject {
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
builder.field(ID.getPreferredName(), id); builder.field(ID.getPreferredName(), id);
builder.field(JOB_ID.getPreferredName(), jobId); builder.field(Job.ID.getPreferredName(), jobId);
if (queryDelay != null) { if (queryDelay != null) {
builder.field(QUERY_DELAY.getPreferredName(), queryDelay.getStringRep()); builder.field(QUERY_DELAY.getPreferredName(), queryDelay.getStringRep());
} }
@ -257,7 +257,7 @@ public class DatafeedConfig implements ToXContentObject {
public Builder(String id, String jobId) { public Builder(String id, String jobId) {
this.id = Objects.requireNonNull(id, ID.getPreferredName()); this.id = Objects.requireNonNull(id, ID.getPreferredName());
this.jobId = Objects.requireNonNull(jobId, JOB_ID.getPreferredName()); this.jobId = Objects.requireNonNull(jobId, Job.ID.getPreferredName());
} }
public Builder(DatafeedConfig config) { public Builder(DatafeedConfig config) {

View File

@ -26,6 +26,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
@ -49,7 +50,7 @@ public class DatafeedUpdate implements ToXContentObject {
static { static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID); PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID);
PARSER.declareString(Builder::setJobId, DatafeedConfig.JOB_ID); PARSER.declareString(Builder::setJobId, Job.ID);
PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDEXES); PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDEXES);
PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDICES); PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDICES);
PARSER.declareStringArray(Builder::setTypes, DatafeedConfig.TYPES); PARSER.declareStringArray(Builder::setTypes, DatafeedConfig.TYPES);
@ -112,7 +113,7 @@ public class DatafeedUpdate implements ToXContentObject {
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
builder.field(DatafeedConfig.ID.getPreferredName(), id); builder.field(DatafeedConfig.ID.getPreferredName(), id);
addOptionalField(builder, DatafeedConfig.JOB_ID, jobId); addOptionalField(builder, Job.ID, jobId);
if (queryDelay != null) { if (queryDelay != null) {
builder.field(DatafeedConfig.QUERY_DELAY.getPreferredName(), queryDelay.getStringRep()); builder.field(DatafeedConfig.QUERY_DELAY.getPreferredName(), queryDelay.getStringRep());
} }

View File

@ -0,0 +1,25 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.protocol.xpack.ml.job.config;
import org.elasticsearch.common.ParseField;
public class Job {
public static final ParseField ID = new ParseField("job_id");
}

View File

@ -0,0 +1,414 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.protocol.xpack.ml.job.process;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException;
import java.util.Date;
import java.util.Objects;
/**
* Job processed record counts.
* <p>
* The getInput... methods return the actual number of
* fields/records sent the the API including invalid records.
* The getProcessed... methods are the number sent to the
* Engine.
* <p>
* The <code>inputRecordCount</code> field is calculated so it
* should not be set in deserialization but it should be serialised
* so the field is visible.
*/
public class DataCounts implements ToXContentObject {
public static final ParseField PROCESSED_RECORD_COUNT = new ParseField("processed_record_count");
public static final ParseField PROCESSED_FIELD_COUNT = new ParseField("processed_field_count");
public static final ParseField INPUT_BYTES = new ParseField("input_bytes");
public static final ParseField INPUT_RECORD_COUNT = new ParseField("input_record_count");
public static final ParseField INPUT_FIELD_COUNT = new ParseField("input_field_count");
public static final ParseField INVALID_DATE_COUNT = new ParseField("invalid_date_count");
public static final ParseField MISSING_FIELD_COUNT = new ParseField("missing_field_count");
public static final ParseField OUT_OF_ORDER_TIME_COUNT = new ParseField("out_of_order_timestamp_count");
public static final ParseField EMPTY_BUCKET_COUNT = new ParseField("empty_bucket_count");
public static final ParseField SPARSE_BUCKET_COUNT = new ParseField("sparse_bucket_count");
public static final ParseField BUCKET_COUNT = new ParseField("bucket_count");
public static final ParseField EARLIEST_RECORD_TIME = new ParseField("earliest_record_timestamp");
public static final ParseField LATEST_RECORD_TIME = new ParseField("latest_record_timestamp");
public static final ParseField LAST_DATA_TIME = new ParseField("last_data_time");
public static final ParseField LATEST_EMPTY_BUCKET_TIME = new ParseField("latest_empty_bucket_timestamp");
public static final ParseField LATEST_SPARSE_BUCKET_TIME = new ParseField("latest_sparse_bucket_timestamp");
public static final ConstructingObjectParser<DataCounts, Void> PARSER = new ConstructingObjectParser<>("data_counts", true,
a -> new DataCounts((String) a[0], (long) a[1], (long) a[2], (long) a[3], (long) a[4], (long) a[5], (long) a[6],
(long) a[7], (long) a[8], (long) a[9], (long) a[10], (Date) a[11], (Date) a[12], (Date) a[13], (Date) a[14],
(Date) a[15]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), PROCESSED_RECORD_COUNT);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), PROCESSED_FIELD_COUNT);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), INPUT_BYTES);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), INPUT_FIELD_COUNT);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), INVALID_DATE_COUNT);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), MISSING_FIELD_COUNT);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), OUT_OF_ORDER_TIME_COUNT);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), EMPTY_BUCKET_COUNT);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), SPARSE_BUCKET_COUNT);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_COUNT);
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
(p) -> TimeUtil.parseTimeField(p, EARLIEST_RECORD_TIME.getPreferredName()),
EARLIEST_RECORD_TIME,
ValueType.VALUE);
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
(p) -> TimeUtil.parseTimeField(p, LATEST_RECORD_TIME.getPreferredName()),
LATEST_RECORD_TIME,
ValueType.VALUE);
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
(p) -> TimeUtil.parseTimeField(p, LAST_DATA_TIME.getPreferredName()),
LAST_DATA_TIME,
ValueType.VALUE);
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
(p) -> TimeUtil.parseTimeField(p, LATEST_EMPTY_BUCKET_TIME.getPreferredName()),
LATEST_EMPTY_BUCKET_TIME,
ValueType.VALUE);
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
(p) -> TimeUtil.parseTimeField(p, LATEST_SPARSE_BUCKET_TIME.getPreferredName()),
LATEST_SPARSE_BUCKET_TIME,
ValueType.VALUE);
}
private final String jobId;
private long processedRecordCount;
private long processedFieldCount;
private long inputBytes;
private long inputFieldCount;
private long invalidDateCount;
private long missingFieldCount;
private long outOfOrderTimeStampCount;
private long emptyBucketCount;
private long sparseBucketCount;
private long bucketCount;
private Date earliestRecordTimeStamp;
private Date latestRecordTimeStamp;
private Date lastDataTimeStamp;
private Date latestEmptyBucketTimeStamp;
private Date latestSparseBucketTimeStamp;
public DataCounts(String jobId, long processedRecordCount, long processedFieldCount, long inputBytes,
long inputFieldCount, long invalidDateCount, long missingFieldCount, long outOfOrderTimeStampCount,
long emptyBucketCount, long sparseBucketCount, long bucketCount,
Date earliestRecordTimeStamp, Date latestRecordTimeStamp, Date lastDataTimeStamp,
Date latestEmptyBucketTimeStamp, Date latestSparseBucketTimeStamp) {
this.jobId = jobId;
this.processedRecordCount = processedRecordCount;
this.processedFieldCount = processedFieldCount;
this.inputBytes = inputBytes;
this.inputFieldCount = inputFieldCount;
this.invalidDateCount = invalidDateCount;
this.missingFieldCount = missingFieldCount;
this.outOfOrderTimeStampCount = outOfOrderTimeStampCount;
this.emptyBucketCount = emptyBucketCount;
this.sparseBucketCount = sparseBucketCount;
this.bucketCount = bucketCount;
this.latestRecordTimeStamp = latestRecordTimeStamp;
this.earliestRecordTimeStamp = earliestRecordTimeStamp;
this.lastDataTimeStamp = lastDataTimeStamp;
this.latestEmptyBucketTimeStamp = latestEmptyBucketTimeStamp;
this.latestSparseBucketTimeStamp = latestSparseBucketTimeStamp;
}
DataCounts(String jobId) {
this.jobId = jobId;
}
public DataCounts(DataCounts lhs) {
jobId = lhs.jobId;
processedRecordCount = lhs.processedRecordCount;
processedFieldCount = lhs.processedFieldCount;
inputBytes = lhs.inputBytes;
inputFieldCount = lhs.inputFieldCount;
invalidDateCount = lhs.invalidDateCount;
missingFieldCount = lhs.missingFieldCount;
outOfOrderTimeStampCount = lhs.outOfOrderTimeStampCount;
emptyBucketCount = lhs.emptyBucketCount;
sparseBucketCount = lhs.sparseBucketCount;
bucketCount = lhs.bucketCount;
latestRecordTimeStamp = lhs.latestRecordTimeStamp;
earliestRecordTimeStamp = lhs.earliestRecordTimeStamp;
lastDataTimeStamp = lhs.lastDataTimeStamp;
latestEmptyBucketTimeStamp = lhs.latestEmptyBucketTimeStamp;
latestSparseBucketTimeStamp = lhs.latestSparseBucketTimeStamp;
}
public String getJobId() {
return jobId;
}
/**
* Number of records processed by this job.
* This value is the number of records sent passed on to
* the engine i.e. {@linkplain #getInputRecordCount()} minus
* records with bad dates or out of order
*
* @return Number of records processed by this job {@code long}
*/
public long getProcessedRecordCount() {
return processedRecordCount;
}
/**
* Number of data points (processed record count * the number
* of analysed fields) processed by this job. This count does
* not include the time field.
*
* @return Number of data points processed by this job {@code long}
*/
public long getProcessedFieldCount() {
return processedFieldCount;
}
/**
* Total number of input records read.
* This = processed record count + date parse error records count
* + out of order record count.
* <p>
* Records with missing fields are counted as they are still written.
*
* @return Total number of input records read {@code long}
*/
public long getInputRecordCount() {
return processedRecordCount + outOfOrderTimeStampCount
+ invalidDateCount;
}
/**
* The total number of bytes sent to this job.
* This value includes the bytes from any records
* that have been discarded for any reason
* e.g. because the date cannot be read
*
* @return Volume in bytes
*/
public long getInputBytes() {
return inputBytes;
}
/**
* The total number of fields sent to the job
* including fields that aren't analysed.
*
* @return The total number of fields sent to the job
*/
public long getInputFieldCount() {
return inputFieldCount;
}
/**
* The number of records with an invalid date field that could
* not be parsed or converted to epoch time.
*
* @return The number of records with an invalid date field
*/
public long getInvalidDateCount() {
return invalidDateCount;
}
/**
* The number of missing fields that had been
* configured for analysis.
*
* @return The number of missing fields
*/
public long getMissingFieldCount() {
return missingFieldCount;
}
/**
* The number of records with a timestamp that is
* before the time of the latest record. Records should
* be in ascending chronological order
*
* @return The number of records with a timestamp that is before the time of the latest record
*/
public long getOutOfOrderTimeStampCount() {
return outOfOrderTimeStampCount;
}
/**
* The number of buckets with no records in it. Used to measure general data fitness and/or
* configuration problems (bucket span).
*
* @return Number of empty buckets processed by this job {@code long}
*/
public long getEmptyBucketCount() {
return emptyBucketCount;
}
/**
* The number of buckets with few records compared to the overall counts.
* Used to measure general data fitness and/or configuration problems (bucket span).
*
* @return Number of sparse buckets processed by this job {@code long}
*/
public long getSparseBucketCount() {
return sparseBucketCount;
}
/**
* The number of buckets overall.
*
* @return Number of buckets processed by this job {@code long}
*/
public long getBucketCount() {
return bucketCount;
}
/**
* The time of the first record seen.
*
* @return The first record time
*/
public Date getEarliestRecordTimeStamp() {
return earliestRecordTimeStamp;
}
/**
* The time of the latest record seen.
*
* @return Latest record time
*/
public Date getLatestRecordTimeStamp() {
return latestRecordTimeStamp;
}
/**
* The wall clock time the latest record was seen.
*
* @return Wall clock time of the lastest record
*/
public Date getLastDataTimeStamp() {
return lastDataTimeStamp;
}
/**
* The time of the latest empty bucket seen.
*
* @return Latest empty bucket time
*/
public Date getLatestEmptyBucketTimeStamp() {
return latestEmptyBucketTimeStamp;
}
/**
* The time of the latest sparse bucket seen.
*
* @return Latest sparse bucket time
*/
public Date getLatestSparseBucketTimeStamp() {
return latestSparseBucketTimeStamp;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Job.ID.getPreferredName(), jobId);
builder.field(PROCESSED_RECORD_COUNT.getPreferredName(), processedRecordCount);
builder.field(PROCESSED_FIELD_COUNT.getPreferredName(), processedFieldCount);
builder.field(INPUT_BYTES.getPreferredName(), inputBytes);
builder.field(INPUT_FIELD_COUNT.getPreferredName(), inputFieldCount);
builder.field(INVALID_DATE_COUNT.getPreferredName(), invalidDateCount);
builder.field(MISSING_FIELD_COUNT.getPreferredName(), missingFieldCount);
builder.field(OUT_OF_ORDER_TIME_COUNT.getPreferredName(), outOfOrderTimeStampCount);
builder.field(EMPTY_BUCKET_COUNT.getPreferredName(), emptyBucketCount);
builder.field(SPARSE_BUCKET_COUNT.getPreferredName(), sparseBucketCount);
builder.field(BUCKET_COUNT.getPreferredName(), bucketCount);
if (earliestRecordTimeStamp != null) {
builder.timeField(EARLIEST_RECORD_TIME.getPreferredName(), EARLIEST_RECORD_TIME.getPreferredName() + "_string",
earliestRecordTimeStamp.getTime());
}
if (latestRecordTimeStamp != null) {
builder.timeField(LATEST_RECORD_TIME.getPreferredName(), LATEST_RECORD_TIME.getPreferredName() + "_string",
latestRecordTimeStamp.getTime());
}
if (lastDataTimeStamp != null) {
builder.timeField(LAST_DATA_TIME.getPreferredName(), LAST_DATA_TIME.getPreferredName() + "_string",
lastDataTimeStamp.getTime());
}
if (latestEmptyBucketTimeStamp != null) {
builder.timeField(LATEST_EMPTY_BUCKET_TIME.getPreferredName(), LATEST_EMPTY_BUCKET_TIME.getPreferredName() + "_string",
latestEmptyBucketTimeStamp.getTime());
}
if (latestSparseBucketTimeStamp != null) {
builder.timeField(LATEST_SPARSE_BUCKET_TIME.getPreferredName(), LATEST_SPARSE_BUCKET_TIME.getPreferredName() + "_string",
latestSparseBucketTimeStamp.getTime());
}
builder.field(INPUT_RECORD_COUNT.getPreferredName(), getInputRecordCount());
builder.endObject();
return builder;
}
/**
* Equality test
*/
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
DataCounts that = (DataCounts) other;
return Objects.equals(this.jobId, that.jobId) &&
this.processedRecordCount == that.processedRecordCount &&
this.processedFieldCount == that.processedFieldCount &&
this.inputBytes == that.inputBytes &&
this.inputFieldCount == that.inputFieldCount &&
this.invalidDateCount == that.invalidDateCount &&
this.missingFieldCount == that.missingFieldCount &&
this.outOfOrderTimeStampCount == that.outOfOrderTimeStampCount &&
this.emptyBucketCount == that.emptyBucketCount &&
this.sparseBucketCount == that.sparseBucketCount &&
this.bucketCount == that.bucketCount &&
Objects.equals(this.latestRecordTimeStamp, that.latestRecordTimeStamp) &&
Objects.equals(this.earliestRecordTimeStamp, that.earliestRecordTimeStamp) &&
Objects.equals(this.lastDataTimeStamp, that.lastDataTimeStamp) &&
Objects.equals(this.latestEmptyBucketTimeStamp, that.latestEmptyBucketTimeStamp) &&
Objects.equals(this.latestSparseBucketTimeStamp, that.latestSparseBucketTimeStamp);
}
@Override
public int hashCode() {
return Objects.hash(jobId, processedRecordCount, processedFieldCount,
inputBytes, inputFieldCount, invalidDateCount, missingFieldCount,
outOfOrderTimeStampCount, lastDataTimeStamp, emptyBucketCount, sparseBucketCount, bucketCount,
latestRecordTimeStamp, earliestRecordTimeStamp, latestEmptyBucketTimeStamp, latestSparseBucketTimeStamp);
}
}

View File

@ -0,0 +1,293 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.protocol.xpack.ml.job.process;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import org.elasticsearch.protocol.xpack.ml.job.results.Result;
import java.io.IOException;
import java.util.Date;
import java.util.Locale;
import java.util.Objects;
/**
* Provide access to the C++ model memory usage numbers for the Java process.
*/
public class ModelSizeStats implements ToXContentObject {
/**
* Result type
*/
public static final String RESULT_TYPE_VALUE = "model_size_stats";
public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE);
/**
* Field Names
*/
public static final ParseField MODEL_BYTES_FIELD = new ParseField("model_bytes");
public static final ParseField TOTAL_BY_FIELD_COUNT_FIELD = new ParseField("total_by_field_count");
public static final ParseField TOTAL_OVER_FIELD_COUNT_FIELD = new ParseField("total_over_field_count");
public static final ParseField TOTAL_PARTITION_FIELD_COUNT_FIELD = new ParseField("total_partition_field_count");
public static final ParseField BUCKET_ALLOCATION_FAILURES_COUNT_FIELD = new ParseField("bucket_allocation_failures_count");
public static final ParseField MEMORY_STATUS_FIELD = new ParseField("memory_status");
public static final ParseField LOG_TIME_FIELD = new ParseField("log_time");
public static final ParseField TIMESTAMP_FIELD = new ParseField("timestamp");
public static final ConstructingObjectParser<Builder, Void> PARSER =
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, true, a -> new Builder((String) a[0]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareLong(Builder::setModelBytes, MODEL_BYTES_FIELD);
PARSER.declareLong(Builder::setBucketAllocationFailuresCount, BUCKET_ALLOCATION_FAILURES_COUNT_FIELD);
PARSER.declareLong(Builder::setTotalByFieldCount, TOTAL_BY_FIELD_COUNT_FIELD);
PARSER.declareLong(Builder::setTotalOverFieldCount, TOTAL_OVER_FIELD_COUNT_FIELD);
PARSER.declareLong(Builder::setTotalPartitionFieldCount, TOTAL_PARTITION_FIELD_COUNT_FIELD);
PARSER.declareField(Builder::setLogTime,
(p) -> TimeUtil.parseTimeField(p, LOG_TIME_FIELD.getPreferredName()),
LOG_TIME_FIELD,
ValueType.VALUE);
PARSER.declareField(Builder::setTimestamp,
(p) -> TimeUtil.parseTimeField(p, TIMESTAMP_FIELD.getPreferredName()),
TIMESTAMP_FIELD,
ValueType.VALUE);
PARSER.declareField(Builder::setMemoryStatus, p -> MemoryStatus.fromString(p.text()), MEMORY_STATUS_FIELD, ValueType.STRING);
}
/**
* The status of the memory monitored by the ResourceMonitor. OK is default,
* SOFT_LIMIT means that the models have done some aggressive pruning to
* keep the memory below the limit, and HARD_LIMIT means that samples have
* been dropped
*/
public enum MemoryStatus {
OK, SOFT_LIMIT, HARD_LIMIT;
public static MemoryStatus fromString(String statusName) {
return valueOf(statusName.trim().toUpperCase(Locale.ROOT));
}
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
}
private final String jobId;
private final long modelBytes;
private final long totalByFieldCount;
private final long totalOverFieldCount;
private final long totalPartitionFieldCount;
private final long bucketAllocationFailuresCount;
private final MemoryStatus memoryStatus;
private final Date timestamp;
private final Date logTime;
private ModelSizeStats(String jobId, long modelBytes, long totalByFieldCount, long totalOverFieldCount,
long totalPartitionFieldCount, long bucketAllocationFailuresCount, MemoryStatus memoryStatus,
Date timestamp, Date logTime) {
this.jobId = jobId;
this.modelBytes = modelBytes;
this.totalByFieldCount = totalByFieldCount;
this.totalOverFieldCount = totalOverFieldCount;
this.totalPartitionFieldCount = totalPartitionFieldCount;
this.bucketAllocationFailuresCount = bucketAllocationFailuresCount;
this.memoryStatus = memoryStatus;
this.timestamp = timestamp;
this.logTime = logTime;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Job.ID.getPreferredName(), jobId);
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
builder.field(MODEL_BYTES_FIELD.getPreferredName(), modelBytes);
builder.field(TOTAL_BY_FIELD_COUNT_FIELD.getPreferredName(), totalByFieldCount);
builder.field(TOTAL_OVER_FIELD_COUNT_FIELD.getPreferredName(), totalOverFieldCount);
builder.field(TOTAL_PARTITION_FIELD_COUNT_FIELD.getPreferredName(), totalPartitionFieldCount);
builder.field(BUCKET_ALLOCATION_FAILURES_COUNT_FIELD.getPreferredName(), bucketAllocationFailuresCount);
builder.field(MEMORY_STATUS_FIELD.getPreferredName(), memoryStatus);
builder.timeField(LOG_TIME_FIELD.getPreferredName(), LOG_TIME_FIELD.getPreferredName() + "_string", logTime.getTime());
if (timestamp != null) {
builder.timeField(TIMESTAMP_FIELD.getPreferredName(), TIMESTAMP_FIELD.getPreferredName() + "_string", timestamp.getTime());
}
builder.endObject();
return builder;
}
public String getJobId() {
return jobId;
}
public long getModelBytes() {
return modelBytes;
}
public long getTotalByFieldCount() {
return totalByFieldCount;
}
public long getTotalPartitionFieldCount() {
return totalPartitionFieldCount;
}
public long getTotalOverFieldCount() {
return totalOverFieldCount;
}
public long getBucketAllocationFailuresCount() {
return bucketAllocationFailuresCount;
}
public MemoryStatus getMemoryStatus() {
return memoryStatus;
}
/**
* The timestamp of the last processed record when this instance was created.
*
* @return The record time
*/
public Date getTimestamp() {
return timestamp;
}
/**
* The wall clock time at the point when this instance was created.
*
* @return The wall clock time
*/
public Date getLogTime() {
return logTime;
}
@Override
public int hashCode() {
return Objects.hash(jobId, modelBytes, totalByFieldCount, totalOverFieldCount, totalPartitionFieldCount,
this.bucketAllocationFailuresCount, memoryStatus, timestamp, logTime);
}
/**
* Compare all the fields.
*/
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
ModelSizeStats that = (ModelSizeStats) other;
return this.modelBytes == that.modelBytes && this.totalByFieldCount == that.totalByFieldCount
&& this.totalOverFieldCount == that.totalOverFieldCount && this.totalPartitionFieldCount == that.totalPartitionFieldCount
&& this.bucketAllocationFailuresCount == that.bucketAllocationFailuresCount
&& Objects.equals(this.memoryStatus, that.memoryStatus) && Objects.equals(this.timestamp, that.timestamp)
&& Objects.equals(this.logTime, that.logTime)
&& Objects.equals(this.jobId, that.jobId);
}
public static class Builder {
private final String jobId;
private long modelBytes;
private long totalByFieldCount;
private long totalOverFieldCount;
private long totalPartitionFieldCount;
private long bucketAllocationFailuresCount;
private MemoryStatus memoryStatus;
private Date timestamp;
private Date logTime;
public Builder(String jobId) {
this.jobId = jobId;
memoryStatus = MemoryStatus.OK;
logTime = new Date();
}
public Builder(ModelSizeStats modelSizeStats) {
this.jobId = modelSizeStats.jobId;
this.modelBytes = modelSizeStats.modelBytes;
this.totalByFieldCount = modelSizeStats.totalByFieldCount;
this.totalOverFieldCount = modelSizeStats.totalOverFieldCount;
this.totalPartitionFieldCount = modelSizeStats.totalPartitionFieldCount;
this.bucketAllocationFailuresCount = modelSizeStats.bucketAllocationFailuresCount;
this.memoryStatus = modelSizeStats.memoryStatus;
this.timestamp = modelSizeStats.timestamp;
this.logTime = modelSizeStats.logTime;
}
public Builder setModelBytes(long modelBytes) {
this.modelBytes = modelBytes;
return this;
}
public Builder setTotalByFieldCount(long totalByFieldCount) {
this.totalByFieldCount = totalByFieldCount;
return this;
}
public Builder setTotalPartitionFieldCount(long totalPartitionFieldCount) {
this.totalPartitionFieldCount = totalPartitionFieldCount;
return this;
}
public Builder setTotalOverFieldCount(long totalOverFieldCount) {
this.totalOverFieldCount = totalOverFieldCount;
return this;
}
public Builder setBucketAllocationFailuresCount(long bucketAllocationFailuresCount) {
this.bucketAllocationFailuresCount = bucketAllocationFailuresCount;
return this;
}
public Builder setMemoryStatus(MemoryStatus memoryStatus) {
Objects.requireNonNull(memoryStatus, "[" + MEMORY_STATUS_FIELD.getPreferredName() + "] must not be null");
this.memoryStatus = memoryStatus;
return this;
}
public Builder setTimestamp(Date timestamp) {
this.timestamp = timestamp;
return this;
}
public Builder setLogTime(Date logTime) {
this.logTime = logTime;
return this;
}
public ModelSizeStats build() {
return new ModelSizeStats(jobId, modelBytes, totalByFieldCount, totalOverFieldCount, totalPartitionFieldCount,
bucketAllocationFailuresCount, memoryStatus, timestamp, logTime);
}
}
}

View File

@ -0,0 +1,330 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.protocol.xpack.ml.job.process;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException;
import java.util.Date;
import java.util.Objects;
/**
* ModelSnapshot Result POJO
*/
public class ModelSnapshot implements ToXContentObject {
/**
* Field Names
*/
public static final ParseField TIMESTAMP = new ParseField("timestamp");
public static final ParseField DESCRIPTION = new ParseField("description");
public static final ParseField SNAPSHOT_DOC_COUNT = new ParseField("snapshot_doc_count");
public static final ParseField LATEST_RECORD_TIME = new ParseField("latest_record_time_stamp");
public static final ParseField LATEST_RESULT_TIME = new ParseField("latest_result_time_stamp");
public static final ParseField QUANTILES = new ParseField("quantiles");
public static final ParseField RETAIN = new ParseField("retain");
public static final ParseField MIN_VERSION = new ParseField("min_version");
public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id");
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("model_snapshot", true, Builder::new);
static {
PARSER.declareString(Builder::setJobId, Job.ID);
PARSER.declareString(Builder::setMinVersion, MIN_VERSION);
PARSER.declareField(Builder::setTimestamp,
(p) -> TimeUtil.parseTimeField(p, TIMESTAMP.getPreferredName()),
TIMESTAMP,
ValueType.VALUE);
PARSER.declareString(Builder::setDescription, DESCRIPTION);
PARSER.declareString(Builder::setSnapshotId, SNAPSHOT_ID);
PARSER.declareInt(Builder::setSnapshotDocCount, SNAPSHOT_DOC_COUNT);
PARSER.declareObject(Builder::setModelSizeStats, ModelSizeStats.PARSER,
ModelSizeStats.RESULT_TYPE_FIELD);
PARSER.declareField(Builder::setLatestRecordTimeStamp,
(p) -> TimeUtil.parseTimeField(p, LATEST_RECORD_TIME.getPreferredName()),
LATEST_RECORD_TIME,
ValueType.VALUE);
PARSER.declareField(Builder::setLatestResultTimeStamp,
(p) -> TimeUtil.parseTimeField(p, LATEST_RESULT_TIME.getPreferredName()),
LATEST_RESULT_TIME,
ValueType.VALUE);
PARSER.declareObject(Builder::setQuantiles, Quantiles.PARSER, QUANTILES);
PARSER.declareBoolean(Builder::setRetain, RETAIN);
}
private final String jobId;
/**
* The minimum version a node should have to be able
* to read this model snapshot.
*/
private final Version minVersion;
private final Date timestamp;
private final String description;
private final String snapshotId;
private final int snapshotDocCount;
private final ModelSizeStats modelSizeStats;
private final Date latestRecordTimeStamp;
private final Date latestResultTimeStamp;
private final Quantiles quantiles;
private final boolean retain;
private ModelSnapshot(String jobId, Version minVersion, Date timestamp, String description, String snapshotId, int snapshotDocCount,
ModelSizeStats modelSizeStats, Date latestRecordTimeStamp, Date latestResultTimeStamp, Quantiles quantiles,
boolean retain) {
this.jobId = jobId;
this.minVersion = minVersion;
this.timestamp = timestamp;
this.description = description;
this.snapshotId = snapshotId;
this.snapshotDocCount = snapshotDocCount;
this.modelSizeStats = modelSizeStats;
this.latestRecordTimeStamp = latestRecordTimeStamp;
this.latestResultTimeStamp = latestResultTimeStamp;
this.quantiles = quantiles;
this.retain = retain;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Job.ID.getPreferredName(), jobId);
builder.field(MIN_VERSION.getPreferredName(), minVersion);
if (timestamp != null) {
builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.getTime());
}
if (description != null) {
builder.field(DESCRIPTION.getPreferredName(), description);
}
if (snapshotId != null) {
builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId);
}
builder.field(SNAPSHOT_DOC_COUNT.getPreferredName(), snapshotDocCount);
if (modelSizeStats != null) {
builder.field(ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName(), modelSizeStats);
}
if (latestRecordTimeStamp != null) {
builder.timeField(LATEST_RECORD_TIME.getPreferredName(), LATEST_RECORD_TIME.getPreferredName() + "_string",
latestRecordTimeStamp.getTime());
}
if (latestResultTimeStamp != null) {
builder.timeField(LATEST_RESULT_TIME.getPreferredName(), LATEST_RESULT_TIME.getPreferredName() + "_string",
latestResultTimeStamp.getTime());
}
if (quantiles != null) {
builder.field(QUANTILES.getPreferredName(), quantiles);
}
builder.field(RETAIN.getPreferredName(), retain);
builder.endObject();
return builder;
}
public String getJobId() {
return jobId;
}
public Version getMinVersion() {
return minVersion;
}
public Date getTimestamp() {
return timestamp;
}
public String getDescription() {
return description;
}
public String getSnapshotId() {
return snapshotId;
}
public int getSnapshotDocCount() {
return snapshotDocCount;
}
public ModelSizeStats getModelSizeStats() {
return modelSizeStats;
}
public Quantiles getQuantiles() {
return quantiles;
}
public Date getLatestRecordTimeStamp() {
return latestRecordTimeStamp;
}
public Date getLatestResultTimeStamp() {
return latestResultTimeStamp;
}
@Override
public int hashCode() {
return Objects.hash(jobId, minVersion, timestamp, description, snapshotId, quantiles, snapshotDocCount, modelSizeStats,
latestRecordTimeStamp, latestResultTimeStamp, retain);
}
/**
* Compare all the fields.
*/
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
ModelSnapshot that = (ModelSnapshot) other;
return Objects.equals(this.jobId, that.jobId)
&& Objects.equals(this.minVersion, that.minVersion)
&& Objects.equals(this.timestamp, that.timestamp)
&& Objects.equals(this.description, that.description)
&& Objects.equals(this.snapshotId, that.snapshotId)
&& this.snapshotDocCount == that.snapshotDocCount
&& Objects.equals(this.modelSizeStats, that.modelSizeStats)
&& Objects.equals(this.quantiles, that.quantiles)
&& Objects.equals(this.latestRecordTimeStamp, that.latestRecordTimeStamp)
&& Objects.equals(this.latestResultTimeStamp, that.latestResultTimeStamp)
&& this.retain == that.retain;
}
public static class Builder {
private String jobId;
// Stored snapshot documents created prior to 6.3.0 will have no
// value for min_version. We default it to 5.5.0 as there were
// no model changes between 5.5.0 and 6.3.0.
private Version minVersion = Version.V_5_5_0;
private Date timestamp;
private String description;
private String snapshotId;
private int snapshotDocCount;
private ModelSizeStats modelSizeStats;
private Date latestRecordTimeStamp;
private Date latestResultTimeStamp;
private Quantiles quantiles;
private boolean retain;
public Builder() {
}
public Builder(String jobId) {
this.jobId = jobId;
}
public Builder(ModelSnapshot modelSnapshot) {
this.jobId = modelSnapshot.jobId;
this.timestamp = modelSnapshot.timestamp;
this.description = modelSnapshot.description;
this.snapshotId = modelSnapshot.snapshotId;
this.snapshotDocCount = modelSnapshot.snapshotDocCount;
this.modelSizeStats = modelSnapshot.modelSizeStats;
this.latestRecordTimeStamp = modelSnapshot.latestRecordTimeStamp;
this.latestResultTimeStamp = modelSnapshot.latestResultTimeStamp;
this.quantiles = modelSnapshot.quantiles;
this.retain = modelSnapshot.retain;
this.minVersion = modelSnapshot.minVersion;
}
public Builder setJobId(String jobId) {
this.jobId = jobId;
return this;
}
Builder setMinVersion(Version minVersion) {
this.minVersion = minVersion;
return this;
}
Builder setMinVersion(String minVersion) {
this.minVersion = Version.fromString(minVersion);
return this;
}
public Builder setTimestamp(Date timestamp) {
this.timestamp = timestamp;
return this;
}
public Builder setDescription(String description) {
this.description = description;
return this;
}
public Builder setSnapshotId(String snapshotId) {
this.snapshotId = snapshotId;
return this;
}
public Builder setSnapshotDocCount(int snapshotDocCount) {
this.snapshotDocCount = snapshotDocCount;
return this;
}
public Builder setModelSizeStats(ModelSizeStats.Builder modelSizeStats) {
this.modelSizeStats = modelSizeStats.build();
return this;
}
public Builder setModelSizeStats(ModelSizeStats modelSizeStats) {
this.modelSizeStats = modelSizeStats;
return this;
}
public Builder setLatestRecordTimeStamp(Date latestRecordTimeStamp) {
this.latestRecordTimeStamp = latestRecordTimeStamp;
return this;
}
public Builder setLatestResultTimeStamp(Date latestResultTimeStamp) {
this.latestResultTimeStamp = latestResultTimeStamp;
return this;
}
public Builder setQuantiles(Quantiles quantiles) {
this.quantiles = quantiles;
return this;
}
public Builder setRetain(boolean value) {
this.retain = value;
return this;
}
public ModelSnapshot build() {
return new ModelSnapshot(jobId, minVersion, timestamp, description, snapshotId, snapshotDocCount, modelSizeStats,
latestRecordTimeStamp, latestResultTimeStamp, quantiles, retain);
}
}
}

View File

@ -0,0 +1,112 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.protocol.xpack.ml.job.process;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException;
import java.util.Date;
import java.util.Objects;
/**
* Quantiles Result POJO
*/
public class Quantiles implements ToXContentObject {
/**
* Field Names
*/
public static final ParseField TIMESTAMP = new ParseField("timestamp");
public static final ParseField QUANTILE_STATE = new ParseField("quantile_state");
public static final ConstructingObjectParser<Quantiles, Void> PARSER =
new ConstructingObjectParser<>("quantiles", true, a -> new Quantiles((String) a[0], (Date) a[1], (String) a[2]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> new Date(p.longValue()), TIMESTAMP, ValueType.LONG);
PARSER.declareString(ConstructingObjectParser.constructorArg(), QUANTILE_STATE);
}
private final String jobId;
private final Date timestamp;
private final String quantileState;
public Quantiles(String jobId, Date timestamp, String quantileState) {
this.jobId = jobId;
this.timestamp = Objects.requireNonNull(timestamp);
this.quantileState = Objects.requireNonNull(quantileState);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Job.ID.getPreferredName(), jobId);
if (timestamp != null) {
builder.field(TIMESTAMP.getPreferredName(), timestamp.getTime());
}
if (quantileState != null) {
builder.field(QUANTILE_STATE.getPreferredName(), quantileState);
}
builder.endObject();
return builder;
}
public String getJobId() {
return jobId;
}
public Date getTimestamp() {
return timestamp;
}
public String getQuantileState() {
return quantileState;
}
@Override
public int hashCode() {
return Objects.hash(jobId, timestamp, quantileState);
}
/**
* Compare all the fields.
*/
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
Quantiles that = (Quantiles) other;
return Objects.equals(this.jobId, that.jobId) && Objects.equals(this.timestamp, that.timestamp)
&& Objects.equals(this.quantileState, that.quantileState);
}
}

View File

@ -0,0 +1,48 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.protocol.xpack.ml.job.process;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.time.format.DateTimeFormatter;
import java.util.Date;
final class TimeUtil {
/**
* Parse out a Date object given the current parser and field name.
*
* @param parser current XContentParser
* @param fieldName the field's preferred name (utilized in exception)
* @return parsed Date object
* @throws IOException from XContentParser
*/
static Date parseTimeField(XContentParser parser, String fieldName) throws IOException {
if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
return new Date(parser.longValue());
} else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(parser.text())).toInstant().toEpochMilli());
}
throw new IllegalArgumentException(
"unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]");
}
}

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException; import java.io.IOException;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
@ -88,7 +89,7 @@ public class AnomalyRecord implements ToXContentObject {
static { static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
if (p.currentToken() == Token.VALUE_NUMBER) { if (p.currentToken() == Token.VALUE_NUMBER) {
return new Date(p.longValue()); return new Date(p.longValue());
@ -159,7 +160,7 @@ public class AnomalyRecord implements ToXContentObject {
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
builder.field(Result.JOB_ID.getPreferredName(), jobId); builder.field(Job.ID.getPreferredName(), jobId);
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
builder.field(PROBABILITY.getPreferredName(), probability); builder.field(PROBABILITY.getPreferredName(), probability);
builder.field(RECORD_SCORE.getPreferredName(), recordScore); builder.field(RECORD_SCORE.getPreferredName(), recordScore);

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException; import java.io.IOException;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
@ -61,7 +62,7 @@ public class Bucket implements ToXContentObject {
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, true, a -> new Bucket((String) a[0], (Date) a[1], (long) a[2])); new ConstructingObjectParser<>(RESULT_TYPE_VALUE, true, a -> new Bucket((String) a[0], (Date) a[1], (long) a[2]));
static { static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
if (p.currentToken() == Token.VALUE_NUMBER) { if (p.currentToken() == Token.VALUE_NUMBER) {
return new Date(p.longValue()); return new Date(p.longValue());
@ -104,7 +105,7 @@ public class Bucket implements ToXContentObject {
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
builder.field(Result.JOB_ID.getPreferredName(), jobId); builder.field(Job.ID.getPreferredName(), jobId);
builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime());
builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore);
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException; import java.io.IOException;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
@ -54,7 +55,7 @@ public class BucketInfluencer implements ToXContentObject {
a -> new BucketInfluencer((String) a[0], (Date) a[1], (long) a[2])); a -> new BucketInfluencer((String) a[0], (Date) a[1], (long) a[2]));
static { static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
if (p.currentToken() == Token.VALUE_NUMBER) { if (p.currentToken() == Token.VALUE_NUMBER) {
return new Date(p.longValue()); return new Date(p.longValue());
@ -93,7 +94,7 @@ public class BucketInfluencer implements ToXContentObject {
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
builder.field(Result.JOB_ID.getPreferredName(), jobId); builder.field(Job.ID.getPreferredName(), jobId);
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
if (influenceField != null) { if (influenceField != null) {
builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField); builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField);

View File

@ -22,6 +22,7 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -49,7 +50,7 @@ public class CategoryDefinition implements ToXContentObject {
new ConstructingObjectParser<>(TYPE.getPreferredName(), true, a -> new CategoryDefinition((String) a[0])); new ConstructingObjectParser<>(TYPE.getPreferredName(), true, a -> new CategoryDefinition((String) a[0]));
static { static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareLong(CategoryDefinition::setCategoryId, CATEGORY_ID); PARSER.declareLong(CategoryDefinition::setCategoryId, CATEGORY_ID);
PARSER.declareString(CategoryDefinition::setTerms, TERMS); PARSER.declareString(CategoryDefinition::setTerms, TERMS);
PARSER.declareString(CategoryDefinition::setRegex, REGEX); PARSER.declareString(CategoryDefinition::setRegex, REGEX);
@ -130,7 +131,7 @@ public class CategoryDefinition implements ToXContentObject {
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
builder.field(Result.JOB_ID.getPreferredName(), jobId); builder.field(Job.ID.getPreferredName(), jobId);
builder.field(CATEGORY_ID.getPreferredName(), categoryId); builder.field(CATEGORY_ID.getPreferredName(), categoryId);
builder.field(TERMS.getPreferredName(), terms); builder.field(TERMS.getPreferredName(), terms);
builder.field(REGEX.getPreferredName(), regex); builder.field(REGEX.getPreferredName(), regex);

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException; import java.io.IOException;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
@ -57,7 +58,7 @@ public class Influencer implements ToXContentObject {
a -> new Influencer((String) a[0], (String) a[1], (String) a[2], (Date) a[3], (long) a[4])); a -> new Influencer((String) a[0], (String) a[1], (String) a[2], (Date) a[3], (long) a[4]));
static { static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME); PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME);
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE); PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE);
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
@ -98,7 +99,7 @@ public class Influencer implements ToXContentObject {
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
builder.field(Result.JOB_ID.getPreferredName(), jobId); builder.field(Job.ID.getPreferredName(), jobId);
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField); builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField);
builder.field(INFLUENCER_FIELD_VALUE.getPreferredName(), influenceValue); builder.field(INFLUENCER_FIELD_VALUE.getPreferredName(), influenceValue);

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException; import java.io.IOException;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
@ -158,7 +159,7 @@ public class OverallBucket implements ToXContentObject {
new ConstructingObjectParser<>("job_info", true, a -> new JobInfo((String) a[0], (double) a[1])); new ConstructingObjectParser<>("job_info", true, a -> new JobInfo((String) a[0], (double) a[1]));
static { static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX_ANOMALY_SCORE); PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX_ANOMALY_SCORE);
} }
@ -181,7 +182,7 @@ public class OverallBucket implements ToXContentObject {
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
builder.field(Result.JOB_ID.getPreferredName(), jobId); builder.field(Job.ID.getPreferredName(), jobId);
builder.field(MAX_ANOMALY_SCORE.getPreferredName(), maxAnomalyScore); builder.field(MAX_ANOMALY_SCORE.getPreferredName(), maxAnomalyScore);
builder.endObject(); builder.endObject();
return builder; return builder;

View File

@ -28,7 +28,6 @@ public final class Result {
/** /**
* Serialisation fields * Serialisation fields
*/ */
public static final ParseField JOB_ID = new ParseField("job_id");
public static final ParseField TYPE = new ParseField("result"); public static final ParseField TYPE = new ParseField("result");
public static final ParseField RESULT_TYPE = new ParseField("result_type"); public static final ParseField RESULT_TYPE = new ParseField("result_type");
public static final ParseField TIMESTAMP = new ParseField("timestamp"); public static final ParseField TIMESTAMP = new ParseField("timestamp");

View File

@ -0,0 +1,130 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.protocol.xpack.ml.job.process;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import org.joda.time.DateTime;
import java.util.Date;
import static org.hamcrest.Matchers.greaterThan;
public class DataCountsTests extends AbstractXContentTestCase<DataCounts> {
public static DataCounts createTestInstance(String jobId) {
return new DataCounts(jobId, randomIntBetween(1, 1_000_000),
randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000),
randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000),
randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000),
new DateTime(randomDateTimeZone()).toDate(), new DateTime(randomDateTimeZone()).toDate(),
new DateTime(randomDateTimeZone()).toDate(), new DateTime(randomDateTimeZone()).toDate(),
new DateTime(randomDateTimeZone()).toDate());
}
@Override
public DataCounts createTestInstance() {
return createTestInstance(randomAlphaOfLength(10));
}
@Override
protected DataCounts doParseInstance(XContentParser parser) {
return DataCounts.PARSER.apply(parser, null);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
public void testCountsEquals_GivenEqualCounts() {
DataCounts counts1 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
DataCounts counts2 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
assertTrue(counts1.equals(counts2));
assertTrue(counts2.equals(counts1));
}
public void testCountsHashCode_GivenEqualCounts() {
DataCounts counts1 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
DataCounts counts2 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
assertEquals(counts1.hashCode(), counts2.hashCode());
}
public void testCountsCopyConstructor() {
DataCounts counts1 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
DataCounts counts2 = new DataCounts(counts1);
assertEquals(counts1.hashCode(), counts2.hashCode());
}
public void testCountCreatedZero() throws Exception {
DataCounts counts = new DataCounts(randomAlphaOfLength(16));
assertAllFieldsEqualZero(counts);
}
public void testCountCopyCreatedFieldsNotZero() throws Exception {
DataCounts counts1 = createCounts(1, 200, 400, 3, 4, 5, 6, 7, 8, 9, 1479211200000L, 1479384000000L, 13, 14, 15);
assertAllFieldsGreaterThanZero(counts1);
DataCounts counts2 = new DataCounts(counts1);
assertAllFieldsGreaterThanZero(counts2);
}
private void assertAllFieldsEqualZero(DataCounts stats) throws Exception {
assertEquals(0L, stats.getProcessedRecordCount());
assertEquals(0L, stats.getProcessedFieldCount());
assertEquals(0L, stats.getInputBytes());
assertEquals(0L, stats.getInputFieldCount());
assertEquals(0L, stats.getInputRecordCount());
assertEquals(0L, stats.getInvalidDateCount());
assertEquals(0L, stats.getMissingFieldCount());
assertEquals(0L, stats.getOutOfOrderTimeStampCount());
}
private void assertAllFieldsGreaterThanZero(DataCounts stats) throws Exception {
assertThat(stats.getProcessedRecordCount(), greaterThan(0L));
assertThat(stats.getProcessedFieldCount(), greaterThan(0L));
assertThat(stats.getInputBytes(), greaterThan(0L));
assertThat(stats.getInputFieldCount(), greaterThan(0L));
assertThat(stats.getInputRecordCount(), greaterThan(0L));
assertThat(stats.getInputRecordCount(), greaterThan(0L));
assertThat(stats.getInvalidDateCount(), greaterThan(0L));
assertThat(stats.getMissingFieldCount(), greaterThan(0L));
assertThat(stats.getOutOfOrderTimeStampCount(), greaterThan(0L));
assertThat(stats.getLatestRecordTimeStamp().getTime(), greaterThan(0L));
}
private static DataCounts createCounts(
long processedRecordCount, long processedFieldCount, long inputBytes, long inputFieldCount,
long invalidDateCount, long missingFieldCount, long outOfOrderTimeStampCount,
long emptyBucketCount, long sparseBucketCount, long bucketCount,
long earliestRecordTime, long latestRecordTime, long lastDataTimeStamp, long latestEmptyBucketTimeStamp,
long latestSparseBucketTimeStamp) {
DataCounts counts = new DataCounts("foo", processedRecordCount, processedFieldCount, inputBytes,
inputFieldCount, invalidDateCount, missingFieldCount, outOfOrderTimeStampCount,
emptyBucketCount, sparseBucketCount, bucketCount,
new Date(earliestRecordTime), new Date(latestRecordTime),
new Date(lastDataTimeStamp), new Date(latestEmptyBucketTimeStamp), new Date(latestSparseBucketTimeStamp));
return counts;
}
}

View File

@ -0,0 +1,99 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.protocol.xpack.ml.job.process;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import org.elasticsearch.protocol.xpack.ml.job.process.ModelSizeStats.MemoryStatus;
import java.util.Date;
public class ModelSizeStatsTests extends AbstractXContentTestCase<ModelSizeStats> {
public void testDefaultConstructor() {
ModelSizeStats stats = new ModelSizeStats.Builder("foo").build();
assertEquals(0, stats.getModelBytes());
assertEquals(0, stats.getTotalByFieldCount());
assertEquals(0, stats.getTotalOverFieldCount());
assertEquals(0, stats.getTotalPartitionFieldCount());
assertEquals(0, stats.getBucketAllocationFailuresCount());
assertEquals(MemoryStatus.OK, stats.getMemoryStatus());
}
public void testSetMemoryStatus_GivenNull() {
ModelSizeStats.Builder stats = new ModelSizeStats.Builder("foo");
NullPointerException ex = expectThrows(NullPointerException.class, () -> stats.setMemoryStatus(null));
assertEquals("[memory_status] must not be null", ex.getMessage());
}
public void testSetMemoryStatus_GivenSoftLimit() {
ModelSizeStats.Builder stats = new ModelSizeStats.Builder("foo");
stats.setMemoryStatus(MemoryStatus.SOFT_LIMIT);
assertEquals(MemoryStatus.SOFT_LIMIT, stats.build().getMemoryStatus());
}
@Override
protected ModelSizeStats createTestInstance() {
return createRandomized();
}
public static ModelSizeStats createRandomized() {
ModelSizeStats.Builder stats = new ModelSizeStats.Builder("foo");
if (randomBoolean()) {
stats.setBucketAllocationFailuresCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setModelBytes(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setTotalByFieldCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setTotalOverFieldCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setTotalPartitionFieldCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setLogTime(new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis()));
}
if (randomBoolean()) {
stats.setTimestamp(new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis()));
}
if (randomBoolean()) {
stats.setMemoryStatus(randomFrom(MemoryStatus.values()));
}
return stats.build();
}
@Override
protected ModelSizeStats doParseInstance(XContentParser parser) {
return ModelSizeStats.PARSER.apply(parser, null).build();
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,186 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.protocol.xpack.ml.job.process;
import org.elasticsearch.Version;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.util.Date;
public class ModelSnapshotTests extends AbstractXContentTestCase<ModelSnapshot> {
private static final Date DEFAULT_TIMESTAMP = new Date();
private static final String DEFAULT_DESCRIPTION = "a snapshot";
private static final String DEFAULT_ID = "my_id";
private static final int DEFAULT_DOC_COUNT = 7;
private static final Date DEFAULT_LATEST_RESULT_TIMESTAMP = new Date(12345678901234L);
private static final Date DEFAULT_LATEST_RECORD_TIMESTAMP = new Date(12345678904321L);
private static final boolean DEFAULT_RETAIN = true;
public void testCopyBuilder() {
ModelSnapshot modelSnapshot1 = createFullyPopulated().build();
ModelSnapshot modelSnapshot2 = new ModelSnapshot.Builder(modelSnapshot1).build();
assertEquals(modelSnapshot1, modelSnapshot2);
}
public void testEquals_GivenSameObject() {
ModelSnapshot modelSnapshot = createFullyPopulated().build();
assertTrue(modelSnapshot.equals(modelSnapshot));
}
public void testEquals_GivenObjectOfDifferentClass() {
ModelSnapshot modelSnapshot = createFullyPopulated().build();
assertFalse(modelSnapshot.equals("a string"));
}
public void testEquals_GivenEqualModelSnapshots() {
ModelSnapshot modelSnapshot1 = createFullyPopulated().build();
ModelSnapshot modelSnapshot2 = createFullyPopulated().build();
assertEquals(modelSnapshot1, modelSnapshot2);
assertEquals(modelSnapshot2, modelSnapshot1);
assertEquals(modelSnapshot1.hashCode(), modelSnapshot2.hashCode());
}
public void testEquals_GivenDifferentTimestamp() {
ModelSnapshot modelSnapshot1 = createFullyPopulated().build();
ModelSnapshot modelSnapshot2 = createFullyPopulated().setTimestamp(
new Date(modelSnapshot1.getTimestamp().getTime() + 1)).build();
assertFalse(modelSnapshot1.equals(modelSnapshot2));
assertFalse(modelSnapshot2.equals(modelSnapshot1));
}
public void testEquals_GivenDifferentDescription() {
ModelSnapshot modelSnapshot1 = createFullyPopulated().build();
ModelSnapshot modelSnapshot2 = createFullyPopulated()
.setDescription(modelSnapshot1.getDescription() + " blah").build();
assertFalse(modelSnapshot1.equals(modelSnapshot2));
assertFalse(modelSnapshot2.equals(modelSnapshot1));
}
public void testEquals_GivenDifferentId() {
ModelSnapshot modelSnapshot1 = createFullyPopulated().build();
ModelSnapshot modelSnapshot2 = createFullyPopulated()
.setSnapshotId(modelSnapshot1.getSnapshotId() + "_2").build();
assertFalse(modelSnapshot1.equals(modelSnapshot2));
assertFalse(modelSnapshot2.equals(modelSnapshot1));
}
public void testEquals_GivenDifferentDocCount() {
ModelSnapshot modelSnapshot1 = createFullyPopulated().build();
ModelSnapshot modelSnapshot2 = createFullyPopulated()
.setSnapshotDocCount(modelSnapshot1.getSnapshotDocCount() + 1).build();
assertFalse(modelSnapshot1.equals(modelSnapshot2));
assertFalse(modelSnapshot2.equals(modelSnapshot1));
}
public void testEquals_GivenDifferentModelSizeStats() {
ModelSnapshot modelSnapshot1 = createFullyPopulated().build();
ModelSizeStats.Builder modelSizeStats = new ModelSizeStats.Builder("foo");
modelSizeStats.setModelBytes(42L);
ModelSnapshot modelSnapshot2 = createFullyPopulated().setModelSizeStats(modelSizeStats).build();
assertFalse(modelSnapshot1.equals(modelSnapshot2));
assertFalse(modelSnapshot2.equals(modelSnapshot1));
}
public void testEquals_GivenDifferentQuantiles() {
ModelSnapshot modelSnapshot1 = createFullyPopulated().build();
ModelSnapshot modelSnapshot2 = createFullyPopulated()
.setQuantiles(new Quantiles("foo", modelSnapshot1.getQuantiles().getTimestamp(),
"different state")).build();
assertFalse(modelSnapshot1.equals(modelSnapshot2));
assertFalse(modelSnapshot2.equals(modelSnapshot1));
}
public void testEquals_GivenDifferentLatestResultTimestamp() {
ModelSnapshot modelSnapshot1 = createFullyPopulated().build();
ModelSnapshot modelSnapshot2 = createFullyPopulated().setLatestResultTimeStamp(
new Date(modelSnapshot1.getLatestResultTimeStamp().getTime() + 1)).build();
assertFalse(modelSnapshot1.equals(modelSnapshot2));
assertFalse(modelSnapshot2.equals(modelSnapshot1));
}
public void testEquals_GivenDifferentLatestRecordTimestamp() {
ModelSnapshot modelSnapshot1 = createFullyPopulated().build();
ModelSnapshot modelSnapshot2 = createFullyPopulated().setLatestRecordTimeStamp(
new Date(modelSnapshot1.getLatestRecordTimeStamp().getTime() + 1)).build();
assertFalse(modelSnapshot1.equals(modelSnapshot2));
assertFalse(modelSnapshot2.equals(modelSnapshot1));
}
private static ModelSnapshot.Builder createFullyPopulated() {
ModelSnapshot.Builder modelSnapshot = new ModelSnapshot.Builder();
modelSnapshot.setJobId("foo");
modelSnapshot.setMinVersion(Version.CURRENT);
modelSnapshot.setTimestamp(DEFAULT_TIMESTAMP);
modelSnapshot.setDescription(DEFAULT_DESCRIPTION);
modelSnapshot.setSnapshotId(DEFAULT_ID);
modelSnapshot.setSnapshotDocCount(DEFAULT_DOC_COUNT);
ModelSizeStats.Builder modelSizeStatsBuilder = new ModelSizeStats.Builder("foo");
modelSizeStatsBuilder.setLogTime(null);
modelSnapshot.setModelSizeStats(modelSizeStatsBuilder);
modelSnapshot.setLatestResultTimeStamp(DEFAULT_LATEST_RESULT_TIMESTAMP);
modelSnapshot.setLatestRecordTimeStamp(DEFAULT_LATEST_RECORD_TIMESTAMP);
modelSnapshot.setQuantiles(new Quantiles("foo", DEFAULT_TIMESTAMP, "state"));
modelSnapshot.setRetain(DEFAULT_RETAIN);
return modelSnapshot;
}
@Override
protected ModelSnapshot createTestInstance() {
return createRandomized();
}
public static ModelSnapshot createRandomized() {
ModelSnapshot.Builder modelSnapshot = new ModelSnapshot.Builder(randomAlphaOfLengthBetween(1, 20));
modelSnapshot.setMinVersion(Version.CURRENT);
modelSnapshot.setTimestamp(new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis()));
modelSnapshot.setDescription(randomAlphaOfLengthBetween(1, 20));
modelSnapshot.setSnapshotId(randomAlphaOfLengthBetween(1, 20));
modelSnapshot.setSnapshotDocCount(randomInt());
modelSnapshot.setModelSizeStats(ModelSizeStatsTests.createRandomized());
modelSnapshot.setLatestResultTimeStamp(
new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis()));
modelSnapshot.setLatestRecordTimeStamp(
new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis()));
modelSnapshot.setQuantiles(QuantilesTests.createRandomized());
modelSnapshot.setRetain(randomBoolean());
return modelSnapshot.build();
}
@Override
protected ModelSnapshot doParseInstance(XContentParser parser){
return ModelSnapshot.PARSER.apply(parser, null).build();
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,91 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.protocol.xpack.ml.job.process;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.util.Date;
public class QuantilesTests extends AbstractXContentTestCase<Quantiles> {
public void testEquals_GivenSameObject() {
Quantiles quantiles = new Quantiles("foo", new Date(0L), "foo");
assertTrue(quantiles.equals(quantiles));
}
public void testEquals_GivenDifferentClassObject() {
Quantiles quantiles = new Quantiles("foo", new Date(0L), "foo");
assertFalse(quantiles.equals("not a quantiles object"));
}
public void testEquals_GivenEqualQuantilesObject() {
Quantiles quantiles1 = new Quantiles("foo", new Date(0L), "foo");
Quantiles quantiles2 = new Quantiles("foo", new Date(0L), "foo");
assertTrue(quantiles1.equals(quantiles2));
assertTrue(quantiles2.equals(quantiles1));
}
public void testEquals_GivenDifferentState() {
Quantiles quantiles1 = new Quantiles("foo", new Date(0L), "bar1");
Quantiles quantiles2 = new Quantiles("foo", new Date(0L), "bar2");
assertFalse(quantiles1.equals(quantiles2));
assertFalse(quantiles2.equals(quantiles1));
}
public void testHashCode_GivenEqualObject() {
Quantiles quantiles1 = new Quantiles("foo", new Date(0L), "foo");
Quantiles quantiles2 = new Quantiles("foo", new Date(0L), "foo");
assertEquals(quantiles1.hashCode(), quantiles2.hashCode());
}
@Override
protected Quantiles createTestInstance() {
return createRandomized();
}
public static Quantiles createRandomized() {
return new Quantiles(randomAlphaOfLengthBetween(1, 20),
new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis()),
randomAlphaOfLengthBetween(0, 1000));
}
@Override
protected Quantiles doParseInstance(XContentParser parser) {
return Quantiles.PARSER.apply(parser, null);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}