[ML] Parsing objects from internal indices should be lenient (elastic/x-pack-elasticsearch#4256)

All ML objects stored in internal indices are currently parsed
strictly. This means unknown fields lead to parsing failures.
In turn, this means we cannot add new fields in any of those
objects (e.g. bucket, record, calendar, etc.) as it is not
backwards compatible.

This commit changes this by introducing lenient parsing when
it comes to reading those objects from the internal indices.
Note we still use strict parsing for the objects we read from
the c++ process, which is nice as it guarantees we would detect
if any of the fields were renamed on one side but not the other.

Also note that even though this is going in from 6.3, we cannot
introduce new fields until 7.0.

relates elastic/x-pack-elasticsearch#4232

Original commit: elastic/x-pack-elasticsearch@3f95d3c7b9
This commit is contained in:
Dimitris Athanasiou 2018-03-29 17:32:57 +01:00 committed by GitHub
parent 1776905a2b
commit 2aeff7713c
48 changed files with 704 additions and 345 deletions

View File

@ -56,7 +56,7 @@ public class PostCalendarEventsAction extends Action<PostCalendarEventsAction.Re
private static final ObjectParser<List<ScheduledEvent.Builder>, Void> PARSER = new ObjectParser<>(NAME, ArrayList::new);
static {
PARSER.declareObjectArray(List::addAll, (p, c) -> ScheduledEvent.PARSER.apply(p, null), ScheduledEvent.RESULTS_FIELD);
PARSER.declareObjectArray(List::addAll, (p, c) -> ScheduledEvent.STRICT_PARSER.apply(p, null), ScheduledEvent.RESULTS_FIELD);
}
public static Request parseRequest(String calendarId, XContentParser parser) throws IOException {

View File

@ -49,7 +49,7 @@ public class PutCalendarAction extends Action<PutCalendarAction.Request, PutCale
public static class Request extends ActionRequest implements ToXContentObject {
public static Request parseRequest(String calendarId, XContentParser parser) {
Calendar.Builder builder = Calendar.PARSER.apply(parser, null);
Calendar.Builder builder = Calendar.STRICT_PARSER.apply(parser, null);
if (builder.getId() == null) {
builder.setId(calendarId);
} else if (!Strings.isNullOrEmpty(calendarId) && !calendarId.equals(builder.getId())) {

View File

@ -47,7 +47,7 @@ public class PutFilterAction extends Action<PutFilterAction.Request, PutFilterAc
public static class Request extends ActionRequest implements ToXContentObject {
public static Request parseRequest(String filterId, XContentParser parser) {
MlFilter.Builder filter = MlFilter.PARSER.apply(parser, null);
MlFilter.Builder filter = MlFilter.STRICT_PARSER.apply(parser, null);
if (filter.getId() == null) {
filter.setId(filterId);
} else if (!Strings.isNullOrEmpty(filterId) && !filterId.equals(filter.getId())) {

View File

@ -39,13 +39,18 @@ public class Calendar implements ToXContentObject, Writeable {
// For QueryPage
public static final ParseField RESULTS_FIELD = new ParseField("calendars");
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(ID.getPreferredName(), Builder::new);
public static final ObjectParser<Builder, Void> STRICT_PARSER = createParser(false);
public static final ObjectParser<Builder, Void> LENIENT_PARSER = createParser(true);
static {
PARSER.declareString(Builder::setId, ID);
PARSER.declareStringArray(Builder::setJobIds, JOB_IDS);
PARSER.declareString((builder, s) -> {}, TYPE);
PARSER.declareStringOrNull(Builder::setDescription, DESCRIPTION);
private static ObjectParser<Builder, Void> createParser(boolean ignoreUnknownFields) {
ObjectParser<Builder, Void> parser = new ObjectParser<>(ID.getPreferredName(), ignoreUnknownFields, Builder::new);
parser.declareString(Builder::setId, ID);
parser.declareStringArray(Builder::setJobIds, JOB_IDS);
parser.declareString((builder, s) -> {}, TYPE);
parser.declareStringOrNull(Builder::setDescription, DESCRIPTION);
return parser;
}
public static String documentId(String calendarId) {

View File

@ -47,12 +47,14 @@ public class ScheduledEvent implements ToXContentObject, Writeable {
public static final String SCHEDULED_EVENT_TYPE = "scheduled_event";
public static final String DOCUMENT_ID_PREFIX = "event_";
public static final ObjectParser<ScheduledEvent.Builder, Void> PARSER =
new ObjectParser<>("scheduled_event", Builder::new);
public static final ObjectParser<ScheduledEvent.Builder, Void> STRICT_PARSER = createParser(false);
public static final ObjectParser<ScheduledEvent.Builder, Void> LENIENT_PARSER = createParser(true);
static {
PARSER.declareString(ScheduledEvent.Builder::description, DESCRIPTION);
PARSER.declareField(ScheduledEvent.Builder::startTime, p -> {
private static ObjectParser<ScheduledEvent.Builder, Void> createParser(boolean ignoreUnknownFields) {
ObjectParser<ScheduledEvent.Builder, Void> parser = new ObjectParser<>("scheduled_event", ignoreUnknownFields, Builder::new);
parser.declareString(ScheduledEvent.Builder::description, DESCRIPTION);
parser.declareField(ScheduledEvent.Builder::startTime, p -> {
if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) {
return ZonedDateTime.ofInstant(Instant.ofEpochMilli(p.longValue()), ZoneOffset.UTC);
} else if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
@ -61,7 +63,7 @@ public class ScheduledEvent implements ToXContentObject, Writeable {
throw new IllegalArgumentException(
"unexpected token [" + p.currentToken() + "] for [" + START_TIME.getPreferredName() + "]");
}, START_TIME, ObjectParser.ValueType.VALUE);
PARSER.declareField(ScheduledEvent.Builder::endTime, p -> {
parser.declareField(ScheduledEvent.Builder::endTime, p -> {
if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) {
return ZonedDateTime.ofInstant(Instant.ofEpochMilli(p.longValue()), ZoneOffset.UTC);
} else if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
@ -71,8 +73,10 @@ public class ScheduledEvent implements ToXContentObject, Writeable {
"unexpected token [" + p.currentToken() + "] for [" + END_TIME.getPreferredName() + "]");
}, END_TIME, ObjectParser.ValueType.VALUE);
PARSER.declareString(ScheduledEvent.Builder::calendarId, Calendar.ID);
PARSER.declareString((builder, s) -> {}, TYPE);
parser.declareString(ScheduledEvent.Builder::calendarId, Calendar.ID);
parser.declareString((builder, s) -> {}, TYPE);
return parser;
}
public static String documentId(String eventId) {

View File

@ -35,12 +35,17 @@ public class MlFilter implements ToXContentObject, Writeable {
// For QueryPage
public static final ParseField RESULTS_FIELD = new ParseField("filters");
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(TYPE.getPreferredName(), Builder::new);
public static final ObjectParser<Builder, Void> STRICT_PARSER = createParser(false);
public static final ObjectParser<Builder, Void> LENIENT_PARSER = createParser(true);
static {
PARSER.declareString((builder, s) -> {}, TYPE);
PARSER.declareString(Builder::setId, ID);
PARSER.declareStringArray(Builder::setItems, ITEMS);
private static ObjectParser<Builder, Void> createParser(boolean ignoreUnknownFields) {
ObjectParser<Builder, Void> parser = new ObjectParser<>(TYPE.getPreferredName(), ignoreUnknownFields, Builder::new);
parser.declareString((builder, s) -> {}, TYPE);
parser.declareString(Builder::setId, ID);
parser.declareStringArray(Builder::setItems, ITEMS);
return parser;
}
private final String id;

View File

@ -73,10 +73,10 @@ public class DataCounts implements ToXContentObject, Writeable {
public static final ParseField TYPE = new ParseField("data_counts");
public static final ConstructingObjectParser<DataCounts, Void> PARSER =
new ConstructingObjectParser<>("data_counts", a -> new DataCounts((String) a[0], (long) a[1], (long) a[2], (long) a[3],
(long) a[4], (long) a[5], (long) a[6], (long) a[7], (long) a[8], (long) a[9], (long) a[10],
(Date) a[11], (Date) a[12], (Date) a[13], (Date) a[14], (Date) a[15]));
public static final ConstructingObjectParser<DataCounts, Void> PARSER = new ConstructingObjectParser<>("data_counts", true,
a -> new DataCounts((String) a[0], (long) a[1], (long) a[2], (long) a[3], (long) a[4], (long) a[5], (long) a[6],
(long) a[7], (long) a[8], (long) a[9], (long) a[10], (Date) a[11], (Date) a[12], (Date) a[13], (Date) a[14],
(Date) a[15]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);

View File

@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -46,18 +47,21 @@ public class ModelSizeStats implements ToXContentObject, Writeable {
public static final ParseField LOG_TIME_FIELD = new ParseField("log_time");
public static final ParseField TIMESTAMP_FIELD = new ParseField("timestamp");
public static final ConstructingObjectParser<Builder, Void> PARSER = new ConstructingObjectParser<>(
RESULT_TYPE_FIELD.getPreferredName(), a -> new Builder((String) a[0]));
public static final ConstructingObjectParser<Builder, Void> STRICT_PARSER = createParser(false);
public static final ConstructingObjectParser<Builder, Void> LENIENT_PARSER = createParser(true);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareString((modelSizeStat, s) -> {}, Result.RESULT_TYPE);
PARSER.declareLong(Builder::setModelBytes, MODEL_BYTES_FIELD);
PARSER.declareLong(Builder::setBucketAllocationFailuresCount, BUCKET_ALLOCATION_FAILURES_COUNT_FIELD);
PARSER.declareLong(Builder::setTotalByFieldCount, TOTAL_BY_FIELD_COUNT_FIELD);
PARSER.declareLong(Builder::setTotalOverFieldCount, TOTAL_OVER_FIELD_COUNT_FIELD);
PARSER.declareLong(Builder::setTotalPartitionFieldCount, TOTAL_PARTITION_FIELD_COUNT_FIELD);
PARSER.declareField(Builder::setLogTime, p -> {
private static ConstructingObjectParser<Builder, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<Builder, Void> parser = new ConstructingObjectParser<>(RESULT_TYPE_FIELD.getPreferredName(),
ignoreUnknownFields, a -> new Builder((String) a[0]));
parser.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
parser.declareString((modelSizeStat, s) -> {}, Result.RESULT_TYPE);
parser.declareLong(Builder::setModelBytes, MODEL_BYTES_FIELD);
parser.declareLong(Builder::setBucketAllocationFailuresCount, BUCKET_ALLOCATION_FAILURES_COUNT_FIELD);
parser.declareLong(Builder::setTotalByFieldCount, TOTAL_BY_FIELD_COUNT_FIELD);
parser.declareLong(Builder::setTotalOverFieldCount, TOTAL_OVER_FIELD_COUNT_FIELD);
parser.declareLong(Builder::setTotalPartitionFieldCount, TOTAL_PARTITION_FIELD_COUNT_FIELD);
parser.declareField(Builder::setLogTime, p -> {
if (p.currentToken() == Token.VALUE_NUMBER) {
return new Date(p.longValue());
} else if (p.currentToken() == Token.VALUE_STRING) {
@ -66,7 +70,7 @@ public class ModelSizeStats implements ToXContentObject, Writeable {
throw new IllegalArgumentException(
"unexpected token [" + p.currentToken() + "] for [" + LOG_TIME_FIELD.getPreferredName() + "]");
}, LOG_TIME_FIELD, ValueType.VALUE);
PARSER.declareField(Builder::setTimestamp, p -> {
parser.declareField(Builder::setTimestamp, p -> {
if (p.currentToken() == Token.VALUE_NUMBER) {
return new Date(p.longValue());
} else if (p.currentToken() == Token.VALUE_STRING) {
@ -75,7 +79,9 @@ public class ModelSizeStats implements ToXContentObject, Writeable {
throw new IllegalArgumentException(
"unexpected token [" + p.currentToken() + "] for [" + TIMESTAMP_FIELD.getPreferredName() + "]");
}, TIMESTAMP_FIELD, ValueType.VALUE);
PARSER.declareField(Builder::setMemoryStatus, p -> MemoryStatus.fromString(p.text()), MEMORY_STATUS_FIELD, ValueType.STRING);
parser.declareField(Builder::setMemoryStatus, p -> MemoryStatus.fromString(p.text()), MEMORY_STATUS_FIELD, ValueType.STRING);
return parser;
}
/**

View File

@ -56,24 +56,29 @@ public class ModelSnapshot implements ToXContentObject, Writeable {
*/
public static final ParseField TYPE = new ParseField("model_snapshot");
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(TYPE.getPreferredName(), Builder::new);
public static final ObjectParser<Builder, Void> STRICT_PARSER = createParser(false);
public static final ObjectParser<Builder, Void> LENIENT_PARSER = createParser(true);
static {
PARSER.declareString(Builder::setJobId, Job.ID);
PARSER.declareString(Builder::setMinVersion, MIN_VERSION);
PARSER.declareField(Builder::setTimestamp, p -> {
private static ObjectParser<Builder, Void> createParser(boolean ignoreUnknownFields) {
ObjectParser<Builder, Void> parser = new ObjectParser<>(TYPE.getPreferredName(), ignoreUnknownFields, Builder::new);
parser.declareString(Builder::setJobId, Job.ID);
parser.declareString(Builder::setMinVersion, MIN_VERSION);
parser.declareField(Builder::setTimestamp, p -> {
if (p.currentToken() == Token.VALUE_NUMBER) {
return new Date(p.longValue());
} else if (p.currentToken() == Token.VALUE_STRING) {
return new Date(TimeUtils.dateStringToEpoch(p.text()));
}
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" + TIMESTAMP.getPreferredName() + "]");
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
+ TIMESTAMP.getPreferredName() + "]");
}, TIMESTAMP, ValueType.VALUE);
PARSER.declareString(Builder::setDescription, DESCRIPTION);
PARSER.declareString(Builder::setSnapshotId, ModelSnapshotField.SNAPSHOT_ID);
PARSER.declareInt(Builder::setSnapshotDocCount, SNAPSHOT_DOC_COUNT);
PARSER.declareObject(Builder::setModelSizeStats, ModelSizeStats.PARSER, ModelSizeStats.RESULT_TYPE_FIELD);
PARSER.declareField(Builder::setLatestRecordTimeStamp, p -> {
parser.declareString(Builder::setDescription, DESCRIPTION);
parser.declareString(Builder::setSnapshotId, ModelSnapshotField.SNAPSHOT_ID);
parser.declareInt(Builder::setSnapshotDocCount, SNAPSHOT_DOC_COUNT);
parser.declareObject(Builder::setModelSizeStats, ignoreUnknownFields ? ModelSizeStats.LENIENT_PARSER : ModelSizeStats.STRICT_PARSER,
ModelSizeStats.RESULT_TYPE_FIELD);
parser.declareField(Builder::setLatestRecordTimeStamp, p -> {
if (p.currentToken() == Token.VALUE_NUMBER) {
return new Date(p.longValue());
} else if (p.currentToken() == Token.VALUE_STRING) {
@ -82,7 +87,7 @@ public class ModelSnapshot implements ToXContentObject, Writeable {
throw new IllegalArgumentException(
"unexpected token [" + p.currentToken() + "] for [" + LATEST_RECORD_TIME.getPreferredName() + "]");
}, LATEST_RECORD_TIME, ValueType.VALUE);
PARSER.declareField(Builder::setLatestResultTimeStamp, p -> {
parser.declareField(Builder::setLatestResultTimeStamp, p -> {
if (p.currentToken() == Token.VALUE_NUMBER) {
return new Date(p.longValue());
} else if (p.currentToken() == Token.VALUE_STRING) {
@ -91,8 +96,10 @@ public class ModelSnapshot implements ToXContentObject, Writeable {
throw new IllegalArgumentException(
"unexpected token [" + p.currentToken() + "] for [" + LATEST_RESULT_TIME.getPreferredName() + "]");
}, LATEST_RESULT_TIME, ValueType.VALUE);
PARSER.declareObject(Builder::setQuantiles, Quantiles.PARSER, QUANTILES);
PARSER.declareBoolean(Builder::setRetain, RETAIN);
parser.declareObject(Builder::setQuantiles, ignoreUnknownFields ? Quantiles.LENIENT_PARSER : Quantiles.STRICT_PARSER, QUANTILES);
parser.declareBoolean(Builder::setRetain, RETAIN);
return parser;
}
@ -340,7 +347,7 @@ public class ModelSnapshot implements ToXContentObject, Writeable {
try (InputStream stream = bytesReference.streamInput();
XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(bytesReference))
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
return PARSER.apply(parser, null).build();
return LENIENT_PARSER.apply(parser, null).build();
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse modelSnapshot", e);
}

View File

@ -35,13 +35,18 @@ public class Quantiles implements ToXContentObject, Writeable {
*/
public static final ParseField TYPE = new ParseField("quantiles");
public static final ConstructingObjectParser<Quantiles, Void> PARSER = new ConstructingObjectParser<>(
TYPE.getPreferredName(), a -> new Quantiles((String) a[0], (Date) a[1], (String) a[2]));
public static final ConstructingObjectParser<Quantiles, Void> STRICT_PARSER = createParser(false);
public static final ConstructingObjectParser<Quantiles, Void> LENIENT_PARSER = createParser(true);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> new Date(p.longValue()), TIMESTAMP, ValueType.LONG);
PARSER.declareString(ConstructingObjectParser.constructorArg(), QUANTILE_STATE);
private static ConstructingObjectParser<Quantiles, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<Quantiles, Void> parser = new ConstructingObjectParser<>(TYPE.getPreferredName(), ignoreUnknownFields,
a -> new Quantiles((String) a[0], (Date) a[1], (String) a[2]));
parser.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
parser.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> new Date(p.longValue()), TIMESTAMP, ValueType.LONG);
parser.declareString(ConstructingObjectParser.constructorArg(), QUANTILE_STATE);
return parser;
}
public static String documentId(String jobId) {

View File

@ -45,23 +45,30 @@ public class AnomalyCause implements ToXContentObject, Writeable {
*/
public static final ParseField FIELD_NAME = new ParseField("field_name");
public static final ObjectParser<AnomalyCause, Void> PARSER = new ObjectParser<>(ANOMALY_CAUSE.getPreferredName(),
public static final ObjectParser<AnomalyCause, Void> STRICT_PARSER = createParser(false);
public static final ObjectParser<AnomalyCause, Void> LENIENT_PARSER = createParser(true);
private static ObjectParser<AnomalyCause, Void> createParser(boolean ignoreUnknownFields) {
ObjectParser<AnomalyCause, Void> parser = new ObjectParser<>(ANOMALY_CAUSE.getPreferredName(), ignoreUnknownFields,
AnomalyCause::new);
static {
PARSER.declareDouble(AnomalyCause::setProbability, PROBABILITY);
PARSER.declareString(AnomalyCause::setByFieldName, BY_FIELD_NAME);
PARSER.declareString(AnomalyCause::setByFieldValue, BY_FIELD_VALUE);
PARSER.declareString(AnomalyCause::setCorrelatedByFieldValue, CORRELATED_BY_FIELD_VALUE);
PARSER.declareString(AnomalyCause::setPartitionFieldName, PARTITION_FIELD_NAME);
PARSER.declareString(AnomalyCause::setPartitionFieldValue, PARTITION_FIELD_VALUE);
PARSER.declareString(AnomalyCause::setFunction, FUNCTION);
PARSER.declareString(AnomalyCause::setFunctionDescription, FUNCTION_DESCRIPTION);
PARSER.declareDoubleArray(AnomalyCause::setTypical, TYPICAL);
PARSER.declareDoubleArray(AnomalyCause::setActual, ACTUAL);
PARSER.declareString(AnomalyCause::setFieldName, FIELD_NAME);
PARSER.declareString(AnomalyCause::setOverFieldName, OVER_FIELD_NAME);
PARSER.declareString(AnomalyCause::setOverFieldValue, OVER_FIELD_VALUE);
PARSER.declareObjectArray(AnomalyCause::setInfluencers, Influence.PARSER, INFLUENCERS);
parser.declareDouble(AnomalyCause::setProbability, PROBABILITY);
parser.declareString(AnomalyCause::setByFieldName, BY_FIELD_NAME);
parser.declareString(AnomalyCause::setByFieldValue, BY_FIELD_VALUE);
parser.declareString(AnomalyCause::setCorrelatedByFieldValue, CORRELATED_BY_FIELD_VALUE);
parser.declareString(AnomalyCause::setPartitionFieldName, PARTITION_FIELD_NAME);
parser.declareString(AnomalyCause::setPartitionFieldValue, PARTITION_FIELD_VALUE);
parser.declareString(AnomalyCause::setFunction, FUNCTION);
parser.declareString(AnomalyCause::setFunctionDescription, FUNCTION_DESCRIPTION);
parser.declareDoubleArray(AnomalyCause::setTypical, TYPICAL);
parser.declareDoubleArray(AnomalyCause::setActual, ACTUAL);
parser.declareString(AnomalyCause::setFieldName, FIELD_NAME);
parser.declareString(AnomalyCause::setOverFieldName, OVER_FIELD_NAME);
parser.declareString(AnomalyCause::setOverFieldValue, OVER_FIELD_VALUE);
parser.declareObjectArray(AnomalyCause::setInfluencers, ignoreUnknownFields ? Influence.LENIENT_PARSER : Influence.STRICT_PARSER,
INFLUENCERS);
return parser;
}
private double probability;

View File

@ -44,7 +44,6 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
/**
* Result fields (all detector types)
*/
public static final ParseField SEQUENCE_NUM = new ParseField("sequence_num");
public static final ParseField PROBABILITY = new ParseField("probability");
public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name");
public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value");
@ -79,13 +78,18 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
public static final ParseField RECORD_SCORE = new ParseField("record_score");
public static final ParseField INITIAL_RECORD_SCORE = new ParseField("initial_record_score");
public static final ConstructingObjectParser<AnomalyRecord, Void> PARSER =
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, true,
public static final ConstructingObjectParser<AnomalyRecord, Void> STRICT_PARSER = createParser(false);
public static final ConstructingObjectParser<AnomalyRecord, Void> LENIENT_PARSER = createParser(true);
private static ConstructingObjectParser<AnomalyRecord, Void> createParser(boolean ignoreUnknownFields) {
// As a record contains fields named after the data fields, the parser for the record should always ignore unknown fields.
// However, it makes sense to offer strict/lenient parsing for other members, e.g. influences, anomaly causes, etc.
ConstructingObjectParser<AnomalyRecord, Void> parser = new ConstructingObjectParser<>(RESULT_TYPE_VALUE, true,
a -> new AnomalyRecord((String) a[0], (Date) a[1], (long) a[2]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
parser.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
parser.declareField(ConstructingObjectParser.constructorArg(), p -> {
if (p.currentToken() == Token.VALUE_NUMBER) {
return new Date(p.longValue());
} else if (p.currentToken() == Token.VALUE_STRING) {
@ -94,29 +98,31 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
+ Result.TIMESTAMP.getPreferredName() + "]");
}, Result.TIMESTAMP, ValueType.VALUE);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE);
PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY);
PARSER.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE);
PARSER.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE);
PARSER.declareInt(AnomalyRecord::setDetectorIndex, Detector.DETECTOR_INDEX);
PARSER.declareBoolean(AnomalyRecord::setInterim, Result.IS_INTERIM);
PARSER.declareString(AnomalyRecord::setByFieldName, BY_FIELD_NAME);
PARSER.declareString(AnomalyRecord::setByFieldValue, BY_FIELD_VALUE);
PARSER.declareString(AnomalyRecord::setCorrelatedByFieldValue, CORRELATED_BY_FIELD_VALUE);
PARSER.declareString(AnomalyRecord::setPartitionFieldName, PARTITION_FIELD_NAME);
PARSER.declareString(AnomalyRecord::setPartitionFieldValue, PARTITION_FIELD_VALUE);
PARSER.declareString(AnomalyRecord::setFunction, FUNCTION);
PARSER.declareString(AnomalyRecord::setFunctionDescription, FUNCTION_DESCRIPTION);
PARSER.declareDoubleArray(AnomalyRecord::setTypical, TYPICAL);
PARSER.declareDoubleArray(AnomalyRecord::setActual, ACTUAL);
PARSER.declareString(AnomalyRecord::setFieldName, FIELD_NAME);
PARSER.declareString(AnomalyRecord::setOverFieldName, OVER_FIELD_NAME);
PARSER.declareString(AnomalyRecord::setOverFieldValue, OVER_FIELD_VALUE);
PARSER.declareObjectArray(AnomalyRecord::setCauses, AnomalyCause.PARSER, CAUSES);
PARSER.declareObjectArray(AnomalyRecord::setInfluencers, Influence.PARSER, INFLUENCERS);
// For bwc with 5.4
PARSER.declareInt((anomalyRecord, sequenceNum) -> {}, SEQUENCE_NUM);
parser.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
parser.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE);
parser.declareDouble(AnomalyRecord::setProbability, PROBABILITY);
parser.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE);
parser.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE);
parser.declareInt(AnomalyRecord::setDetectorIndex, Detector.DETECTOR_INDEX);
parser.declareBoolean(AnomalyRecord::setInterim, Result.IS_INTERIM);
parser.declareString(AnomalyRecord::setByFieldName, BY_FIELD_NAME);
parser.declareString(AnomalyRecord::setByFieldValue, BY_FIELD_VALUE);
parser.declareString(AnomalyRecord::setCorrelatedByFieldValue, CORRELATED_BY_FIELD_VALUE);
parser.declareString(AnomalyRecord::setPartitionFieldName, PARTITION_FIELD_NAME);
parser.declareString(AnomalyRecord::setPartitionFieldValue, PARTITION_FIELD_VALUE);
parser.declareString(AnomalyRecord::setFunction, FUNCTION);
parser.declareString(AnomalyRecord::setFunctionDescription, FUNCTION_DESCRIPTION);
parser.declareDoubleArray(AnomalyRecord::setTypical, TYPICAL);
parser.declareDoubleArray(AnomalyRecord::setActual, ACTUAL);
parser.declareString(AnomalyRecord::setFieldName, FIELD_NAME);
parser.declareString(AnomalyRecord::setOverFieldName, OVER_FIELD_NAME);
parser.declareString(AnomalyRecord::setOverFieldValue, OVER_FIELD_VALUE);
parser.declareObjectArray(AnomalyRecord::setCauses, ignoreUnknownFields ? AnomalyCause.LENIENT_PARSER : AnomalyCause.STRICT_PARSER,
CAUSES);
parser.declareObjectArray(AnomalyRecord::setInfluencers, ignoreUnknownFields ? Influence.LENIENT_PARSER : Influence.STRICT_PARSER,
INFLUENCERS);
return parser;
}
private final String jobId;

View File

@ -46,9 +46,6 @@ public class Bucket implements ToXContentObject, Writeable {
public static final ParseField PARTITION_SCORES = new ParseField("partition_scores");
public static final ParseField SCHEDULED_EVENTS = new ParseField("scheduled_events");
// Only exists for backwards compatibility; no longer added to mappings
private static final ParseField RECORD_COUNT = new ParseField("record_count");
// Used for QueryPage
public static final ParseField RESULTS_FIELD = new ParseField("buckets");
@ -58,12 +55,15 @@ public class Bucket implements ToXContentObject, Writeable {
public static final String RESULT_TYPE_VALUE = "bucket";
public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE);
public static final ConstructingObjectParser<Bucket, Void> PARSER =
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, a -> new Bucket((String) a[0], (Date) a[1], (long) a[2]));
public static final ConstructingObjectParser<Bucket, Void> STRICT_PARSER = createParser(false);
public static final ConstructingObjectParser<Bucket, Void> LENIENT_PARSER = createParser(true);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), JOB_ID);
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
private static ConstructingObjectParser<Bucket, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<Bucket, Void> parser = new ConstructingObjectParser<>(RESULT_TYPE_VALUE, ignoreUnknownFields,
a -> new Bucket((String) a[0], (Date) a[1], (long) a[2]));
parser.declareString(ConstructingObjectParser.constructorArg(), JOB_ID);
parser.declareField(ConstructingObjectParser.constructorArg(), p -> {
if (p.currentToken() == Token.VALUE_NUMBER) {
return new Date(p.longValue());
} else if (p.currentToken() == Token.VALUE_STRING) {
@ -72,19 +72,22 @@ public class Bucket implements ToXContentObject, Writeable {
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
+ Result.TIMESTAMP.getPreferredName() + "]");
}, Result.TIMESTAMP, ValueType.VALUE);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
PARSER.declareDouble(Bucket::setAnomalyScore, ANOMALY_SCORE);
PARSER.declareDouble(Bucket::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE);
PARSER.declareBoolean(Bucket::setInterim, Result.IS_INTERIM);
PARSER.declareLong(Bucket::setEventCount, EVENT_COUNT);
PARSER.declareObjectArray(Bucket::setRecords, AnomalyRecord.PARSER, RECORDS);
PARSER.declareObjectArray(Bucket::setBucketInfluencers, BucketInfluencer.PARSER, BUCKET_INFLUENCERS);
PARSER.declareLong(Bucket::setProcessingTimeMs, PROCESSING_TIME_MS);
PARSER.declareObjectArray(Bucket::setPartitionScores, PartitionScore.PARSER, PARTITION_SCORES);
PARSER.declareString((bucket, s) -> {}, Result.RESULT_TYPE);
// For bwc with 5.4
PARSER.declareInt((bucket, recordCount) -> {}, RECORD_COUNT);
PARSER.declareStringArray(Bucket::setScheduledEvents, SCHEDULED_EVENTS);
parser.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
parser.declareDouble(Bucket::setAnomalyScore, ANOMALY_SCORE);
parser.declareDouble(Bucket::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE);
parser.declareBoolean(Bucket::setInterim, Result.IS_INTERIM);
parser.declareLong(Bucket::setEventCount, EVENT_COUNT);
parser.declareObjectArray(Bucket::setRecords, ignoreUnknownFields ? AnomalyRecord.LENIENT_PARSER : AnomalyRecord.STRICT_PARSER,
RECORDS);
parser.declareObjectArray(Bucket::setBucketInfluencers, ignoreUnknownFields ?
BucketInfluencer.LENIENT_PARSER : BucketInfluencer.STRICT_PARSER, BUCKET_INFLUENCERS);
parser.declareLong(Bucket::setProcessingTimeMs, PROCESSING_TIME_MS);
parser.declareObjectArray(Bucket::setPartitionScores, ignoreUnknownFields ?
PartitionScore.LENIENT_PARSER : PartitionScore.STRICT_PARSER, PARTITION_SCORES);
parser.declareString((bucket, s) -> {}, Result.RESULT_TYPE);
parser.declareStringArray(Bucket::setScheduledEvents, SCHEDULED_EVENTS);
return parser;
}
private final String jobId;

View File

@ -39,20 +39,21 @@ public class BucketInfluencer implements ToXContentObject, Writeable {
public static final ParseField RAW_ANOMALY_SCORE = new ParseField("raw_anomaly_score");
public static final ParseField PROBABILITY = new ParseField("probability");
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
public static final ParseField SEQUENCE_NUM = new ParseField("sequence_num");
/**
* The influencer field name used for time influencers
*/
public static final String BUCKET_TIME = "bucket_time";
public static final ConstructingObjectParser<BucketInfluencer, Void> PARSER =
new ConstructingObjectParser<>(RESULT_TYPE_FIELD.getPreferredName(), a -> new BucketInfluencer((String) a[0],
(Date) a[1], (long) a[2]));
public static final ConstructingObjectParser<BucketInfluencer, Void> STRICT_PARSER = createParser(false);
public static final ConstructingObjectParser<BucketInfluencer, Void> LENIENT_PARSER = createParser(true);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
private static ConstructingObjectParser<BucketInfluencer, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<BucketInfluencer, Void> parser = new ConstructingObjectParser<>(RESULT_TYPE_FIELD.getPreferredName(),
ignoreUnknownFields, a -> new BucketInfluencer((String) a[0], (Date) a[1], (long) a[2]));
parser.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
parser.declareField(ConstructingObjectParser.constructorArg(), p -> {
if (p.currentToken() == Token.VALUE_NUMBER) {
return new Date(p.longValue());
} else if (p.currentToken() == Token.VALUE_STRING) {
@ -61,16 +62,16 @@ public class BucketInfluencer implements ToXContentObject, Writeable {
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
+ Result.TIMESTAMP.getPreferredName() + "]");
}, Result.TIMESTAMP, ValueType.VALUE);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
PARSER.declareString((bucketInfluencer, s) -> {}, Result.RESULT_TYPE);
PARSER.declareString(BucketInfluencer::setInfluencerFieldName, INFLUENCER_FIELD_NAME);
PARSER.declareDouble(BucketInfluencer::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE);
PARSER.declareDouble(BucketInfluencer::setAnomalyScore, ANOMALY_SCORE);
PARSER.declareDouble(BucketInfluencer::setRawAnomalyScore, RAW_ANOMALY_SCORE);
PARSER.declareDouble(BucketInfluencer::setProbability, PROBABILITY);
PARSER.declareBoolean(BucketInfluencer::setIsInterim, Result.IS_INTERIM);
// For bwc with 5.4
PARSER.declareInt((bucketInfluencer, sequenceNum) -> {}, SEQUENCE_NUM);
parser.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
parser.declareString((bucketInfluencer, s) -> {}, Result.RESULT_TYPE);
parser.declareString(BucketInfluencer::setInfluencerFieldName, INFLUENCER_FIELD_NAME);
parser.declareDouble(BucketInfluencer::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE);
parser.declareDouble(BucketInfluencer::setAnomalyScore, ANOMALY_SCORE);
parser.declareDouble(BucketInfluencer::setRawAnomalyScore, RAW_ANOMALY_SCORE);
parser.declareDouble(BucketInfluencer::setProbability, PROBABILITY);
parser.declareBoolean(BucketInfluencer::setIsInterim, Result.IS_INTERIM);
return parser;
}
private final String jobId;

View File

@ -38,16 +38,21 @@ public class CategoryDefinition implements ToXContentObject, Writeable {
// Used for QueryPage
public static final ParseField RESULTS_FIELD = new ParseField("categories");
public static final ConstructingObjectParser<CategoryDefinition, Void> PARSER =
new ConstructingObjectParser<>(TYPE.getPreferredName(), a -> new CategoryDefinition((String) a[0]));
public static final ConstructingObjectParser<CategoryDefinition, Void> STRICT_PARSER = createParser(false);
public static final ConstructingObjectParser<CategoryDefinition, Void> LENIENT_PARSER = createParser(true);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareLong(CategoryDefinition::setCategoryId, CATEGORY_ID);
PARSER.declareString(CategoryDefinition::setTerms, TERMS);
PARSER.declareString(CategoryDefinition::setRegex, REGEX);
PARSER.declareLong(CategoryDefinition::setMaxMatchingLength, MAX_MATCHING_LENGTH);
PARSER.declareStringArray(CategoryDefinition::setExamples, EXAMPLES);
private static ConstructingObjectParser<CategoryDefinition, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<CategoryDefinition, Void> parser = new ConstructingObjectParser<>(TYPE.getPreferredName(),
ignoreUnknownFields, a -> new CategoryDefinition((String) a[0]));
parser.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
parser.declareLong(CategoryDefinition::setCategoryId, CATEGORY_ID);
parser.declareString(CategoryDefinition::setTerms, TERMS);
parser.declareString(CategoryDefinition::setRegex, REGEX);
parser.declareLong(CategoryDefinition::setMaxMatchingLength, MAX_MATCHING_LENGTH);
parser.declareStringArray(CategoryDefinition::setExamples, EXAMPLES);
return parser;
}
private final String jobId;

View File

@ -43,14 +43,15 @@ public class Forecast implements ToXContentObject, Writeable {
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
public static final ParseField DETECTOR_INDEX = new ParseField("detector_index");
public static final ConstructingObjectParser<Forecast, Void> PARSER =
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, a ->
new Forecast((String) a[0], (String) a[1], (Date) a[2], (long) a[3], (int) a[4]));
public static final ConstructingObjectParser<Forecast, Void> STRICT_PARSER = createParser(false);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareString(ConstructingObjectParser.constructorArg(), FORECAST_ID);
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
private static ConstructingObjectParser<Forecast, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<Forecast, Void> parser = new ConstructingObjectParser<>(RESULT_TYPE_VALUE, ignoreUnknownFields,
a -> new Forecast((String) a[0], (String) a[1], (Date) a[2], (long) a[3], (int) a[4]));
parser.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
parser.declareString(ConstructingObjectParser.constructorArg(), FORECAST_ID);
parser.declareField(ConstructingObjectParser.constructorArg(), p -> {
if (p.currentToken() == Token.VALUE_NUMBER) {
return new Date(p.longValue());
} else if (p.currentToken() == Token.VALUE_STRING) {
@ -59,17 +60,19 @@ public class Forecast implements ToXContentObject, Writeable {
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
+ Result.TIMESTAMP.getPreferredName() + "]");
}, Result.TIMESTAMP, ValueType.VALUE);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
PARSER.declareInt(ConstructingObjectParser.constructorArg(), DETECTOR_INDEX);
PARSER.declareString((modelForecast, s) -> {}, Result.RESULT_TYPE);
PARSER.declareString(Forecast::setPartitionFieldName, PARTITION_FIELD_NAME);
PARSER.declareString(Forecast::setPartitionFieldValue, PARTITION_FIELD_VALUE);
PARSER.declareString(Forecast::setByFieldName, BY_FIELD_NAME);
PARSER.declareString(Forecast::setByFieldValue, BY_FIELD_VALUE);
PARSER.declareString(Forecast::setModelFeature, MODEL_FEATURE);
PARSER.declareDouble(Forecast::setForecastLower, FORECAST_LOWER);
PARSER.declareDouble(Forecast::setForecastUpper, FORECAST_UPPER);
PARSER.declareDouble(Forecast::setForecastPrediction, FORECAST_PREDICTION);
parser.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
parser.declareInt(ConstructingObjectParser.constructorArg(), DETECTOR_INDEX);
parser.declareString((modelForecast, s) -> {}, Result.RESULT_TYPE);
parser.declareString(Forecast::setPartitionFieldName, PARTITION_FIELD_NAME);
parser.declareString(Forecast::setPartitionFieldValue, PARTITION_FIELD_VALUE);
parser.declareString(Forecast::setByFieldName, BY_FIELD_NAME);
parser.declareString(Forecast::setByFieldValue, BY_FIELD_VALUE);
parser.declareString(Forecast::setModelFeature, MODEL_FEATURE);
parser.declareDouble(Forecast::setForecastLower, FORECAST_LOWER);
parser.declareDouble(Forecast::setForecastUpper, FORECAST_UPPER);
parser.declareDouble(Forecast::setForecastPrediction, FORECAST_PREDICTION);
return parser;
}
private final String jobId;

View File

@ -47,30 +47,35 @@ public class ForecastRequestStats implements ToXContentObject, Writeable {
public static final ParseField STATUS = new ParseField("forecast_status");
public static final ParseField MEMORY_USAGE = new ParseField("forecast_memory_bytes");
public static final ConstructingObjectParser<ForecastRequestStats, Void> PARSER =
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, a -> new ForecastRequestStats((String) a[0], (String) a[1]));
public static final ConstructingObjectParser<ForecastRequestStats, Void> STRICT_PARSER = createParser(false);
public static final ConstructingObjectParser<ForecastRequestStats, Void> LENIENT_PARSER = createParser(true);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareString(ConstructingObjectParser.constructorArg(), FORECAST_ID);
private static ConstructingObjectParser<ForecastRequestStats, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<ForecastRequestStats, Void> parser = new ConstructingObjectParser<>(RESULT_TYPE_VALUE, ignoreUnknownFields,
a -> new ForecastRequestStats((String) a[0], (String) a[1]));
PARSER.declareString((modelForecastRequestStats, s) -> {}, Result.RESULT_TYPE);
PARSER.declareLong(ForecastRequestStats::setRecordCount, PROCESSED_RECORD_COUNT);
PARSER.declareStringArray(ForecastRequestStats::setMessages, MESSAGES);
PARSER.declareField(ForecastRequestStats::setTimeStamp,
parser.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
parser.declareString(ConstructingObjectParser.constructorArg(), FORECAST_ID);
parser.declareString((modelForecastRequestStats, s) -> {}, Result.RESULT_TYPE);
parser.declareLong(ForecastRequestStats::setRecordCount, PROCESSED_RECORD_COUNT);
parser.declareStringArray(ForecastRequestStats::setMessages, MESSAGES);
parser.declareField(ForecastRequestStats::setTimeStamp,
p -> Instant.ofEpochMilli(p.longValue()), Result.TIMESTAMP, ValueType.LONG);
PARSER.declareField(ForecastRequestStats::setStartTime,
parser.declareField(ForecastRequestStats::setStartTime,
p -> Instant.ofEpochMilli(p.longValue()), START_TIME, ValueType.LONG);
PARSER.declareField(ForecastRequestStats::setEndTime,
parser.declareField(ForecastRequestStats::setEndTime,
p -> Instant.ofEpochMilli(p.longValue()), END_TIME, ValueType.LONG);
PARSER.declareField(ForecastRequestStats::setCreateTime,
parser.declareField(ForecastRequestStats::setCreateTime,
p -> Instant.ofEpochMilli(p.longValue()), CREATE_TIME, ValueType.LONG);
PARSER.declareField(ForecastRequestStats::setExpiryTime,
parser.declareField(ForecastRequestStats::setExpiryTime,
p -> Instant.ofEpochMilli(p.longValue()), EXPIRY_TIME, ValueType.LONG);
PARSER.declareDouble(ForecastRequestStats::setProgress, PROGRESS);
PARSER.declareLong(ForecastRequestStats::setProcessingTime, PROCESSING_TIME_MS);
PARSER.declareField(ForecastRequestStats::setStatus, p -> ForecastRequestStatus.fromString(p.text()), STATUS, ValueType.STRING);
PARSER.declareLong(ForecastRequestStats::setMemoryUsage, MEMORY_USAGE);
parser.declareDouble(ForecastRequestStats::setProgress, PROGRESS);
parser.declareLong(ForecastRequestStats::setProcessingTime, PROCESSING_TIME_MS);
parser.declareField(ForecastRequestStats::setStatus, p -> ForecastRequestStatus.fromString(p.text()), STATUS, ValueType.STRING);
parser.declareLong(ForecastRequestStats::setMemoryUsage, MEMORY_USAGE);
return parser;
}
public enum ForecastRequestStatus implements Writeable {

View File

@ -30,15 +30,18 @@ public class Influence implements ToXContentObject, Writeable {
public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name");
public static final ParseField INFLUENCER_FIELD_VALUES = new ParseField("influencer_field_values");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<Influence, Void> PARSER = new ConstructingObjectParser<>(
INFLUENCER.getPreferredName(), a -> new Influence((String) a[0], (List<String>) a[1]));
public static final ConstructingObjectParser<Influence, Void> STRICT_PARSER = createParser(false);
public static final ConstructingObjectParser<Influence, Void> LENIENT_PARSER = createParser(true);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME);
PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUES);
private static ConstructingObjectParser<Influence, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<Influence, Void> parser = new ConstructingObjectParser<>(INFLUENCER.getPreferredName(),
ignoreUnknownFields, a -> new Influence((String) a[0], (List<String>) a[1]));
parser.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME);
parser.declareStringArray(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUES);
return parser;
}
private String field;
private List<String> fieldValues;

View File

@ -34,7 +34,6 @@ public class Influencer implements ToXContentObject, Writeable {
* ThrottlerField names
*/
public static final ParseField PROBABILITY = new ParseField("probability");
public static final ParseField SEQUENCE_NUM = new ParseField("sequence_num");
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name");
public static final ParseField INFLUENCER_FIELD_VALUE = new ParseField("influencer_field_value");
@ -44,15 +43,16 @@ public class Influencer implements ToXContentObject, Writeable {
// Used for QueryPage
public static final ParseField RESULTS_FIELD = new ParseField("influencers");
public static final ConstructingObjectParser<Influencer, Void> PARSER = new ConstructingObjectParser<>(
RESULT_TYPE_FIELD.getPreferredName(), true, a -> new Influencer((String) a[0], (String) a[1], (String) a[2],
(Date) a[3], (long) a[4]));
// Influencers contain data fields, thus we always parse them leniently
public static final ConstructingObjectParser<Influencer, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
RESULT_TYPE_FIELD.getPreferredName(), true,
a -> new Influencer((String) a[0], (String) a[1], (String) a[2], (Date) a[3], (long) a[4]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME);
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE);
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
LENIENT_PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
LENIENT_PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME);
LENIENT_PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE);
LENIENT_PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
if (p.currentToken() == Token.VALUE_NUMBER) {
return new Date(p.longValue());
} else if (p.currentToken() == Token.VALUE_STRING) {
@ -61,14 +61,12 @@ public class Influencer implements ToXContentObject, Writeable {
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
+ Result.TIMESTAMP.getPreferredName() + "]");
}, Result.TIMESTAMP, ValueType.VALUE);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
PARSER.declareString((influencer, s) -> {}, Result.RESULT_TYPE);
PARSER.declareDouble(Influencer::setProbability, PROBABILITY);
PARSER.declareDouble(Influencer::setInfluencerScore, INFLUENCER_SCORE);
PARSER.declareDouble(Influencer::setInitialInfluencerScore, INITIAL_INFLUENCER_SCORE);
PARSER.declareBoolean(Influencer::setInterim, Result.IS_INTERIM);
// For bwc with 5.4
PARSER.declareInt((influencer, sequenceNum) -> {}, SEQUENCE_NUM);
LENIENT_PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
LENIENT_PARSER.declareString((influencer, s) -> {}, Result.RESULT_TYPE);
LENIENT_PARSER.declareDouble(Influencer::setProbability, PROBABILITY);
LENIENT_PARSER.declareDouble(Influencer::setInfluencerScore, INFLUENCER_SCORE);
LENIENT_PARSER.declareDouble(Influencer::setInitialInfluencerScore, INITIAL_INFLUENCER_SCORE);
LENIENT_PARSER.declareBoolean(Influencer::setInterim, Result.IS_INTERIM);
}
private final String jobId;

View File

@ -46,13 +46,15 @@ public class ModelPlot implements ToXContentObject, Writeable {
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
public static final ParseField DETECTOR_INDEX = new ParseField("detector_index");
public static final ConstructingObjectParser<ModelPlot, Void> PARSER =
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, a ->
new ModelPlot((String) a[0], (Date) a[1], (long) a[2], (int) a[3]));
public static final ConstructingObjectParser<ModelPlot, Void> STRICT_PARSER = createParser(false);
public static final ConstructingObjectParser<ModelPlot, Void> LENIENT_PARSER = createParser(true);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
private static ConstructingObjectParser<ModelPlot, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<ModelPlot, Void> parser = new ConstructingObjectParser<>(RESULT_TYPE_VALUE, ignoreUnknownFields,
a -> new ModelPlot((String) a[0], (Date) a[1], (long) a[2], (int) a[3]));
parser.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
parser.declareField(ConstructingObjectParser.constructorArg(), p -> {
if (p.currentToken() == Token.VALUE_NUMBER) {
return new Date(p.longValue());
} else if (p.currentToken() == Token.VALUE_STRING) {
@ -61,20 +63,22 @@ public class ModelPlot implements ToXContentObject, Writeable {
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
+ Result.TIMESTAMP.getPreferredName() + "]");
}, Result.TIMESTAMP, ValueType.VALUE);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
PARSER.declareInt(ConstructingObjectParser.constructorArg(), DETECTOR_INDEX);
PARSER.declareString((modelPlot, s) -> {}, Result.RESULT_TYPE);
PARSER.declareString(ModelPlot::setPartitionFieldName, PARTITION_FIELD_NAME);
PARSER.declareString(ModelPlot::setPartitionFieldValue, PARTITION_FIELD_VALUE);
PARSER.declareString(ModelPlot::setOverFieldName, OVER_FIELD_NAME);
PARSER.declareString(ModelPlot::setOverFieldValue, OVER_FIELD_VALUE);
PARSER.declareString(ModelPlot::setByFieldName, BY_FIELD_NAME);
PARSER.declareString(ModelPlot::setByFieldValue, BY_FIELD_VALUE);
PARSER.declareString(ModelPlot::setModelFeature, MODEL_FEATURE);
PARSER.declareDouble(ModelPlot::setModelLower, MODEL_LOWER);
PARSER.declareDouble(ModelPlot::setModelUpper, MODEL_UPPER);
PARSER.declareDouble(ModelPlot::setModelMedian, MODEL_MEDIAN);
PARSER.declareDouble(ModelPlot::setActual, ACTUAL);
parser.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
parser.declareInt(ConstructingObjectParser.constructorArg(), DETECTOR_INDEX);
parser.declareString((modelPlot, s) -> {}, Result.RESULT_TYPE);
parser.declareString(ModelPlot::setPartitionFieldName, PARTITION_FIELD_NAME);
parser.declareString(ModelPlot::setPartitionFieldValue, PARTITION_FIELD_VALUE);
parser.declareString(ModelPlot::setOverFieldName, OVER_FIELD_NAME);
parser.declareString(ModelPlot::setOverFieldValue, OVER_FIELD_VALUE);
parser.declareString(ModelPlot::setByFieldName, BY_FIELD_NAME);
parser.declareString(ModelPlot::setByFieldValue, BY_FIELD_VALUE);
parser.declareString(ModelPlot::setModelFeature, MODEL_FEATURE);
parser.declareDouble(ModelPlot::setModelLower, MODEL_LOWER);
parser.declareDouble(ModelPlot::setModelUpper, MODEL_UPPER);
parser.declareDouble(ModelPlot::setModelMedian, MODEL_MEDIAN);
parser.declareDouble(ModelPlot::setActual, ACTUAL);
return parser;
}
private final String jobId;

View File

@ -25,16 +25,20 @@ public class PartitionScore implements ToXContentObject, Writeable {
private double recordScore;
private double probability;
public static final ConstructingObjectParser<PartitionScore, Void> PARSER = new ConstructingObjectParser<>(
PARTITION_SCORE.getPreferredName(), a -> new PartitionScore((String) a[0], (String) a[1], (Double) a[2], (Double) a[3],
(Double) a[4]));
public static final ConstructingObjectParser<PartitionScore, Void> STRICT_PARSER = createParser(false);
public static final ConstructingObjectParser<PartitionScore, Void> LENIENT_PARSER = createParser(true);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARTITION_FIELD_NAME);
PARSER.declareString(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARTITION_FIELD_VALUE);
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AnomalyRecord.INITIAL_RECORD_SCORE);
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AnomalyRecord.RECORD_SCORE);
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AnomalyRecord.PROBABILITY);
private static ConstructingObjectParser<PartitionScore, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<PartitionScore, Void> parser = new ConstructingObjectParser<>(PARTITION_SCORE.getPreferredName(),
ignoreUnknownFields, a -> new PartitionScore((String) a[0], (String) a[1], (Double) a[2], (Double) a[3], (Double) a[4]));
parser.declareString(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARTITION_FIELD_NAME);
parser.declareString(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARTITION_FIELD_VALUE);
parser.declareDouble(ConstructingObjectParser.constructorArg(), AnomalyRecord.INITIAL_RECORD_SCORE);
parser.declareDouble(ConstructingObjectParser.constructorArg(), AnomalyRecord.RECORD_SCORE);
parser.declareDouble(ConstructingObjectParser.constructorArg(), AnomalyRecord.PROBABILITY);
return parser;
}
public PartitionScore(String fieldName, String fieldValue, double initialRecordScore, double recordScore, double probability) {

View File

@ -30,8 +30,7 @@ public class AuditMessage implements ToXContentObject, Writeable {
public static final ParseField TIMESTAMP = new ParseField("timestamp");
public static final ParseField NODE_NAME = new ParseField("node_name");
public static final ObjectParser<AuditMessage, Void> PARSER = new ObjectParser<>(TYPE.getPreferredName(),
AuditMessage::new);
public static final ObjectParser<AuditMessage, Void> PARSER = new ObjectParser<>(TYPE.getPreferredName(), true, AuditMessage::new);
static {
PARSER.declareString(AuditMessage::setJobId, Job.ID);

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.calendars;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.config.JobTests;
@ -15,6 +16,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class CalendarTests extends AbstractSerializingTestCase<Calendar> {
@ -48,7 +50,7 @@ public class CalendarTests extends AbstractSerializingTestCase<Calendar> {
@Override
protected Calendar doParseInstance(XContentParser parser) throws IOException {
return Calendar.PARSER.apply(parser, null).build();
return Calendar.STRICT_PARSER.apply(parser, null).build();
}
public void testNullId() {
@ -59,4 +61,21 @@ public class CalendarTests extends AbstractSerializingTestCase<Calendar> {
public void testDocumentId() {
assertThat(Calendar.documentId("foo"), equalTo("calendar_foo"));
}
public void testStrictParser() throws IOException {
String json = "{\"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> Calendar.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
Calendar.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.config.Connective;
import org.elasticsearch.xpack.core.ml.job.config.DetectionRule;
@ -47,7 +48,7 @@ public class ScheduledEventTests extends AbstractSerializingTestCase<ScheduledEv
@Override
protected ScheduledEvent doParseInstance(XContentParser parser) throws IOException {
return ScheduledEvent.PARSER.apply(parser, null).build();
return ScheduledEvent.STRICT_PARSER.apply(parser, null).build();
}
public void testToDetectionRule() {
@ -107,4 +108,21 @@ public class ScheduledEventTests extends AbstractSerializingTestCase<ScheduledEv
e = expectThrows(ElasticsearchStatusException.class, builder::build);
assertThat(e.getMessage(), containsString("must come before end time"));
}
public void testStrictParser() throws IOException {
String json = "{\"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> ScheduledEvent.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
ScheduledEvent.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -7,12 +7,16 @@ package org.elasticsearch.xpack.core.ml.job.config;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.results.CategoryDefinition;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class MlFilterTests extends AbstractSerializingTestCase<MlFilter> {
@ -38,7 +42,7 @@ public class MlFilterTests extends AbstractSerializingTestCase<MlFilter> {
@Override
protected MlFilter doParseInstance(XContentParser parser) {
return MlFilter.PARSER.apply(parser, null).build();
return MlFilter.STRICT_PARSER.apply(parser, null).build();
}
public void testNullId() {
@ -55,4 +59,21 @@ public class MlFilterTests extends AbstractSerializingTestCase<MlFilter> {
public void testDocumentId() {
assertThat(MlFilter.documentId("foo"), equalTo("filter_foo"));
}
public void testStrictParser() throws IOException {
String json = "{\"filter_id\":\"filter_1\", \"items\": [], \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> MlFilter.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"filter_id\":\"filter_1\", \"items\": [], \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
MlFilter.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -8,12 +8,15 @@ package org.elasticsearch.xpack.core.ml.job.process.autodetect.state;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats.MemoryStatus;
import java.io.IOException;
import java.util.Date;
import static org.hamcrest.Matchers.containsString;
public class ModelSizeStatsTests extends AbstractSerializingTestCase<ModelSizeStats> {
public void testDefaultConstructor() {
@ -83,11 +86,28 @@ public class ModelSizeStatsTests extends AbstractSerializingTestCase<ModelSizeSt
@Override
protected ModelSizeStats doParseInstance(XContentParser parser) {
return ModelSizeStats.PARSER.apply(parser, null).build();
return ModelSizeStats.STRICT_PARSER.apply(parser, null).build();
}
public void testId() {
ModelSizeStats stats = new ModelSizeStats.Builder("job-foo").setLogTime(new Date(100)).build();
assertEquals("job-foo_model_size_stats_100", stats.getId());
}
public void testStrictParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> ModelSizeStats.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
ModelSizeStats.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -8,12 +8,20 @@ package org.elasticsearch.xpack.core.ml.job.process.autodetect.state;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
import java.util.Arrays;
import java.util.Date;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class ModelSnapshotTests extends AbstractSerializingTestCase<ModelSnapshot> {
@ -171,7 +179,7 @@ public class ModelSnapshotTests extends AbstractSerializingTestCase<ModelSnapsho
@Override
protected ModelSnapshot doParseInstance(XContentParser parser) {
return ModelSnapshot.PARSER.apply(parser, null).build();
return ModelSnapshot.STRICT_PARSER.apply(parser, null).build();
}
public void testDocumentId() {
@ -194,4 +202,21 @@ public class ModelSnapshotTests extends AbstractSerializingTestCase<ModelSnapsho
assertThat(snapshot.stateDocumentIds(),
equalTo(Arrays.asList("foo_model_state_123456789#1", "foo_model_state_123456789#2", "foo_model_state_123456789#3")));
}
public void testStrictParser() throws IOException {
String json = "{\"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> ModelSnapshot.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
ModelSnapshot.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -8,10 +8,14 @@ package org.elasticsearch.xpack.core.ml.job.process.autodetect.state;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
import java.util.Date;
import static org.hamcrest.Matchers.containsString;
public class QuantilesTests extends AbstractSerializingTestCase<Quantiles> {
public void testEquals_GivenSameObject() {
@ -78,6 +82,23 @@ public class QuantilesTests extends AbstractSerializingTestCase<Quantiles> {
@Override
protected Quantiles doParseInstance(XContentParser parser) {
return Quantiles.PARSER.apply(parser, null);
return Quantiles.STRICT_PARSER.apply(parser, null);
}
public void testStrictParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"timestamp\": 123456789, \"quantile_state\":\"...\", \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> Quantiles.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"timestamp\": 123456789, \"quantile_state\":\"...\", \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
Quantiles.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -7,13 +7,15 @@ package org.elasticsearch.xpack.core.ml.job.results;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.results.AnomalyCause;
import org.elasticsearch.xpack.core.ml.job.results.Influence;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.Matchers.containsString;
public class AnomalyCauseTests extends AbstractSerializingTestCase<AnomalyCause> {
@Override
@ -91,7 +93,23 @@ public class AnomalyCauseTests extends AbstractSerializingTestCase<AnomalyCause>
@Override
protected AnomalyCause doParseInstance(XContentParser parser) {
return AnomalyCause.PARSER.apply(parser, null);
return AnomalyCause.STRICT_PARSER.apply(parser, null);
}
public void testStrictParser() throws IOException {
String json = "{\"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> AnomalyCause.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
AnomalyCause.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -5,14 +5,13 @@
*/
package org.elasticsearch.xpack.core.ml.job.results;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
@ -24,6 +23,8 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.hamcrest.Matchers.containsString;
public class AnomalyRecordTests extends AbstractSerializingTestCase<AnomalyRecord> {
@Override
@ -86,7 +87,7 @@ public class AnomalyRecordTests extends AbstractSerializingTestCase<AnomalyRecor
@Override
protected AnomalyRecord doParseInstance(XContentParser parser) {
return AnomalyRecord.PARSER.apply(parser, null);
return AnomalyRecord.STRICT_PARSER.apply(parser, null);
}
@SuppressWarnings("unchecked")
@ -189,15 +190,27 @@ public class AnomalyRecordTests extends AbstractSerializingTestCase<AnomalyRecor
assertEquals("test-job_record_1000_60_0_" + valuesHash + "_" + length, record.getId());
}
public void testParsingv54WithSequenceNumField() throws IOException {
AnomalyRecord record = createTestInstance();
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.startObject();
builder.field(AnomalyRecord.SEQUENCE_NUM.getPreferredName(), 1);
record.innerToXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
XContentParser parser = createParser(builder);
AnomalyRecord serialised = doParseInstance(parser);
assertEquals(record, serialised);
public void testStrictParser_IsLenientOnTopLevelFields() throws IOException {
String json = "{\"job_id\":\"job_1\", \"timestamp\": 123544456, \"bucket_span\": 3600, \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
AnomalyRecord.STRICT_PARSER.apply(parser, null);
}
}
public void testStrictParser_IsStrictOnNestedFields() throws IOException {
String json = "{\"job_id\":\"job_1\", \"timestamp\": 123544456, \"bucket_span\": 3600, \"foo\":\"bar\"," +
" \"causes\":[{\"cause_foo\":\"bar\"}]}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
ParsingException e = expectThrows(ParsingException.class, () -> AnomalyRecord.STRICT_PARSER.apply(parser, null));
assertThat(e.getCause().getMessage(), containsString("[anomaly_cause] unknown field [cause_foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"timestamp\": 123544456, \"bucket_span\": 3600, \"foo\":\"bar\"," +
" \"causes\":[{\"cause_foo\":\"bar\"}]}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
AnomalyRecord.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -6,16 +6,15 @@
package org.elasticsearch.xpack.core.ml.job.results;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
import java.util.Date;
import static org.hamcrest.Matchers.containsString;
public class BucketInfluencerTests extends AbstractSerializingTestCase<BucketInfluencer> {
@Override
@ -50,7 +49,7 @@ public class BucketInfluencerTests extends AbstractSerializingTestCase<BucketInf
@Override
protected BucketInfluencer doParseInstance(XContentParser parser) {
return BucketInfluencer.PARSER.apply(parser, null);
return BucketInfluencer.STRICT_PARSER.apply(parser, null);
}
public void testEquals_GivenNull() {
@ -144,16 +143,20 @@ public class BucketInfluencerTests extends AbstractSerializingTestCase<BucketInf
assertEquals("job-foo_bucket_influencer_1000_300_field-with-influence", influencer.getId());
}
public void testParsingv54WithSequenceNumField() throws IOException {
BucketInfluencer bucketInfluencer = createTestInstance();
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.startObject();
builder.field(BucketInfluencer.SEQUENCE_NUM.getPreferredName(), 1);
bucketInfluencer.innerToXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
XContentParser parser = createParser(builder);
BucketInfluencer serialised = doParseInstance(parser);
assertEquals(bucketInfluencer, serialised);
public void testStrictParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"timestamp\": 123544456, \"bucket_span\": 3600, \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> BucketInfluencer.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"timestamp\": 123544456, \"bucket_span\": 3600, \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
BucketInfluencer.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -7,12 +7,10 @@ package org.elasticsearch.xpack.core.ml.job.results;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
@ -42,7 +40,7 @@ public class InfluencerTests extends AbstractSerializingTestCase<Influencer> {
@Override
protected Influencer doParseInstance(XContentParser parser) {
return Influencer.PARSER.apply(parser, null);
return Influencer.LENIENT_PARSER.apply(parser, null);
}
public void testToXContentIncludesNameValueField() throws IOException {
@ -70,15 +68,11 @@ public class InfluencerTests extends AbstractSerializingTestCase<Influencer> {
assertEquals("job-foo_influencer_1000_300_host_" + valueHash + "_" + influencerFieldValue.length(), influencer.getId());
}
public void testParsingv54WithSequenceNumField() throws IOException {
Influencer influencer = createTestInstance();
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.startObject();
builder.field(Influencer.SEQUENCE_NUM.getPreferredName(), 1);
influencer.innerToXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
XContentParser parser = createParser(builder);
Influencer serialised = doParseInstance(parser);
assertEquals(influencer, serialised);
public void testLenientParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"timestamp\": 123544456, \"bucket_span\": 3600," +
"\"influencer_field_name\":\"foo_1\", \"influencer_field_value\": \"foo_2\", \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
Influencer.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -87,7 +87,7 @@ public class TransportGetFiltersAction extends HandledTransportAction<GetFilters
XContentParser parser =
XContentFactory.xContent(getDocResponse.getSourceAsBytes())
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
MlFilter filter = MlFilter.PARSER.apply(parser, null).build();
MlFilter filter = MlFilter.LENIENT_PARSER.apply(parser, null).build();
responseBody = new QueryPage<>(Collections.singletonList(filter), 1, MlFilter.RESULTS_FIELD);
GetFiltersAction.Response filterResponse = new GetFiltersAction.Response(responseBody);
@ -128,7 +128,7 @@ public class TransportGetFiltersAction extends HandledTransportAction<GetFilters
try (InputStream stream = docSource.streamInput();
XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(docSource)).createParser(
NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
docs.add(MlFilter.PARSER.apply(parser, null).build());
docs.add(MlFilter.LENIENT_PARSER.apply(parser, null).build());
} catch (IOException e) {
this.onFailure(e);
}

View File

@ -32,7 +32,7 @@ class BatchedBucketsIterator extends BatchedResultsIterator<Bucket> {
try (InputStream stream = source.streamInput();
XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)).createParser(NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE, stream)) {
Bucket bucket = Bucket.PARSER.apply(parser, null);
Bucket bucket = Bucket.LENIENT_PARSER.apply(parser, null);
return new Result<>(hit.getIndex(), bucket);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse bucket", e);

View File

@ -31,7 +31,7 @@ class BatchedInfluencersIterator extends BatchedResultsIterator<Influencer> {
try (InputStream stream = source.streamInput();
XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)).createParser(NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE, stream)) {
Influencer influencer = Influencer.PARSER.apply(parser, null);
Influencer influencer = Influencer.LENIENT_PARSER.apply(parser, null);
return new Result<>(hit.getIndex(), influencer);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parser influencer", e);

View File

@ -32,7 +32,7 @@ class BatchedRecordsIterator extends BatchedResultsIterator<AnomalyRecord> {
try (InputStream stream = source.streamInput();
XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)).createParser(NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE, stream)){
AnomalyRecord record = AnomalyRecord.PARSER.apply(parser, null);
AnomalyRecord record = AnomalyRecord.LENIENT_PARSER.apply(parser, null);
return new Result<>(hit.getIndex(), record);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse record", e);

View File

@ -82,8 +82,6 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.config.MlFilter;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery;
import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats;
@ -100,6 +98,8 @@ import org.elasticsearch.xpack.core.ml.job.results.Result;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.ml.utils.MlIndicesUtils;
import org.elasticsearch.xpack.core.security.support.Exceptions;
import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery;
import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams;
import java.io.IOException;
import java.io.InputStream;
@ -459,15 +459,15 @@ public class JobProvider {
if (DataCounts.documentId(jobId).equals(hitId)) {
paramsBuilder.setDataCounts(parseSearchHit(hit, DataCounts.PARSER, errorHandler));
} else if (hitId.startsWith(ModelSizeStats.documentIdPrefix(jobId))) {
ModelSizeStats.Builder modelSizeStats = parseSearchHit(hit, ModelSizeStats.PARSER, errorHandler);
ModelSizeStats.Builder modelSizeStats = parseSearchHit(hit, ModelSizeStats.LENIENT_PARSER, errorHandler);
paramsBuilder.setModelSizeStats(modelSizeStats == null ? null : modelSizeStats.build());
} else if (hitId.startsWith(ModelSnapshot.documentIdPrefix(jobId))) {
ModelSnapshot.Builder modelSnapshot = parseSearchHit(hit, ModelSnapshot.PARSER, errorHandler);
ModelSnapshot.Builder modelSnapshot = parseSearchHit(hit, ModelSnapshot.LENIENT_PARSER, errorHandler);
paramsBuilder.setModelSnapshot(modelSnapshot == null ? null : modelSnapshot.build());
} else if (Quantiles.documentId(jobId).equals(hit.getId())) {
paramsBuilder.setQuantiles(parseSearchHit(hit, Quantiles.PARSER, errorHandler));
paramsBuilder.setQuantiles(parseSearchHit(hit, Quantiles.LENIENT_PARSER, errorHandler));
} else if (hitId.startsWith(MlFilter.DOCUMENT_ID_PREFIX)) {
paramsBuilder.addFilter(parseSearchHit(hit, MlFilter.PARSER, errorHandler).build());
paramsBuilder.addFilter(parseSearchHit(hit, MlFilter.LENIENT_PARSER, errorHandler).build());
} else {
errorHandler.accept(new IllegalStateException("Unexpected Id [" + hitId + "]"));
}
@ -530,7 +530,7 @@ public class JobProvider {
try (InputStream stream = source.streamInput();
XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source))
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
Bucket bucket = Bucket.PARSER.apply(parser, null);
Bucket bucket = Bucket.LENIENT_PARSER.apply(parser, null);
results.add(bucket);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse bucket", e);
@ -662,7 +662,7 @@ public class JobProvider {
try (InputStream stream = source.streamInput();
XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source))
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
CategoryDefinition categoryDefinition = CategoryDefinition.PARSER.apply(parser, null);
CategoryDefinition categoryDefinition = CategoryDefinition.LENIENT_PARSER.apply(parser, null);
results.add(categoryDefinition);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse category definition", e);
@ -697,7 +697,7 @@ public class JobProvider {
try (InputStream stream = source.streamInput();
XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source))
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
results.add(AnomalyRecord.PARSER.apply(parser, null));
results.add(AnomalyRecord.LENIENT_PARSER.apply(parser, null));
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse records", e);
}
@ -746,7 +746,7 @@ public class JobProvider {
try (InputStream stream = source.streamInput();
XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source))
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
influencers.add(Influencer.PARSER.apply(parser, null));
influencers.add(Influencer.LENIENT_PARSER.apply(parser, null));
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse influencer", e);
}
@ -779,7 +779,7 @@ public class JobProvider {
}
String resultsIndex = AnomalyDetectorsIndex.jobResultsAliasedName(jobId);
SearchRequestBuilder search = createDocIdSearch(resultsIndex, ModelSnapshot.documentId(jobId, modelSnapshotId));
searchSingleResult(jobId, ModelSnapshot.TYPE.getPreferredName(), search, ModelSnapshot.PARSER,
searchSingleResult(jobId, ModelSnapshot.TYPE.getPreferredName(), search, ModelSnapshot.LENIENT_PARSER,
result -> handler.accept(result.result == null ? null : new Result<ModelSnapshot>(result.index, result.result.build())),
errorHandler, () -> null);
}
@ -891,7 +891,7 @@ public class JobProvider {
try (InputStream stream = source.streamInput();
XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source))
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
ModelPlot modelPlot = ModelPlot.PARSER.apply(parser, null);
ModelPlot modelPlot = ModelPlot.LENIENT_PARSER.apply(parser, null);
results.add(modelPlot);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse modelPlot", e);
@ -909,7 +909,7 @@ public class JobProvider {
String indexName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId);
searchSingleResult(jobId, ModelSizeStats.RESULT_TYPE_VALUE, createLatestModelSizeStatsSearch(indexName),
ModelSizeStats.PARSER,
ModelSizeStats.LENIENT_PARSER,
result -> handler.accept(result.result.build()), errorHandler,
() -> new ModelSizeStats.Builder(jobId));
}
@ -1076,7 +1076,7 @@ public class JobProvider {
List<ScheduledEvent> events = new ArrayList<>();
SearchHit[] hits = response.getHits().getHits();
for (SearchHit hit : hits) {
ScheduledEvent.Builder event = parseSearchHit(hit, ScheduledEvent.PARSER, handler::onFailure);
ScheduledEvent.Builder event = parseSearchHit(hit, ScheduledEvent.LENIENT_PARSER, handler::onFailure);
event.eventId(hit.getId());
events.add(event.build());
}
@ -1094,7 +1094,7 @@ public class JobProvider {
GetRequest getRequest = new GetRequest(indexName, ElasticsearchMappings.DOC_TYPE,
ForecastRequestStats.documentId(jobId, forecastId));
getResult(jobId, ForecastRequestStats.RESULTS_FIELD.getPreferredName(), getRequest, ForecastRequestStats.PARSER,
getResult(jobId, ForecastRequestStats.RESULTS_FIELD.getPreferredName(), getRequest, ForecastRequestStats.LENIENT_PARSER,
result -> handler.accept(result.result), errorHandler, () -> null);
}
@ -1159,7 +1159,7 @@ public class JobProvider {
List<Calendar> calendars = new ArrayList<>();
SearchHit[] hits = response.getHits().getHits();
for (SearchHit hit : hits) {
calendars.add(parseSearchHit(hit, Calendar.PARSER, listener::onFailure).build());
calendars.add(parseSearchHit(hit, Calendar.LENIENT_PARSER, listener::onFailure).build());
}
listener.onResponse(new QueryPage<Calendar>(calendars, response.getHits().getTotalHits(),
@ -1228,7 +1228,7 @@ public class JobProvider {
XContentFactory.xContent(XContentHelper.xContentType(docSource))
.createParser(NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE, stream)) {
Calendar calendar = Calendar.PARSER.apply(parser, null).build();
Calendar calendar = Calendar.LENIENT_PARSER.apply(parser, null).build();
listener.onResponse(calendar);
}
} else {

View File

@ -42,18 +42,19 @@ public class AutodetectResult implements ToXContentObject, Writeable {
(Forecast) a[7], (ForecastRequestStats) a[8], (CategoryDefinition) a[9], (FlushAcknowledgement) a[10]));
static {
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), Bucket.PARSER, Bucket.RESULT_TYPE_FIELD);
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), AnomalyRecord.PARSER, AnomalyRecord.RESULTS_FIELD);
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), Influencer.PARSER, Influencer.RESULTS_FIELD);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), Quantiles.PARSER, Quantiles.TYPE);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ModelSnapshot.PARSER, ModelSnapshot.TYPE);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ModelSizeStats.PARSER,
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), Bucket.STRICT_PARSER, Bucket.RESULT_TYPE_FIELD);
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), AnomalyRecord.STRICT_PARSER,
AnomalyRecord.RESULTS_FIELD);
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), Influencer.LENIENT_PARSER, Influencer.RESULTS_FIELD);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), Quantiles.STRICT_PARSER, Quantiles.TYPE);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ModelSnapshot.STRICT_PARSER, ModelSnapshot.TYPE);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ModelSizeStats.STRICT_PARSER,
ModelSizeStats.RESULT_TYPE_FIELD);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ModelPlot.PARSER, ModelPlot.RESULTS_FIELD);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), Forecast.PARSER, Forecast.RESULTS_FIELD);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ForecastRequestStats.PARSER,
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ModelPlot.STRICT_PARSER, ModelPlot.RESULTS_FIELD);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), Forecast.STRICT_PARSER, Forecast.RESULTS_FIELD);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ForecastRequestStats.STRICT_PARSER,
ForecastRequestStats.RESULTS_FIELD);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), CategoryDefinition.PARSER, CategoryDefinition.TYPE);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), CategoryDefinition.STRICT_PARSER, CategoryDefinition.TYPE);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), FlushAcknowledgement.PARSER, FlushAcknowledgement.TYPE);
}

View File

@ -121,7 +121,7 @@ public class ExpiredForecastsRemover implements MlDataRemover {
try (InputStream stream = hit.getSourceRef().streamInput();
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(
NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
ForecastRequestStats forecastRequestStats = ForecastRequestStats.PARSER.apply(parser, null);
ForecastRequestStats forecastRequestStats = ForecastRequestStats.LENIENT_PARSER.apply(parser, null);
if (forecastRequestStats.getExpiryTime().toEpochMilli() < cutoffEpochMs) {
forecastsToDelete.add(forecastRequestStats);
}

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.job.results;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord;
import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecordTests;
@ -14,6 +15,7 @@ import org.elasticsearch.xpack.core.ml.job.results.Bucket;
import org.elasticsearch.xpack.core.ml.job.results.BucketInfluencer;
import org.elasticsearch.xpack.core.ml.job.results.PartitionScore;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -21,6 +23,7 @@ import java.util.Date;
import java.util.List;
import java.util.stream.IntStream;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class BucketTests extends AbstractSerializingTestCase<Bucket> {
@ -95,7 +98,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
@Override
protected Bucket doParseInstance(XContentParser parser) {
return Bucket.PARSER.apply(parser, null);
return Bucket.STRICT_PARSER.apply(parser, null);
}
public void testEquals_GivenDifferentClass() {
@ -297,4 +300,21 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
assertThat(copy, equalTo(bucket));
}
}
public void testStrictParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"timestamp\": 123544456, \"bucket_span\": 3600, \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> Bucket.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"timestamp\": 123544456, \"bucket_span\": 3600, \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
Bucket.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -7,11 +7,15 @@ package org.elasticsearch.xpack.ml.job.results;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.results.CategoryDefinition;
import java.io.IOException;
import java.util.Arrays;
import static org.hamcrest.Matchers.containsString;
public class CategoryDefinitionTests extends AbstractSerializingTestCase<CategoryDefinition> {
public CategoryDefinition createTestInstance(String jobId) {
@ -36,7 +40,7 @@ public class CategoryDefinitionTests extends AbstractSerializingTestCase<Categor
@Override
protected CategoryDefinition doParseInstance(XContentParser parser) {
return CategoryDefinition.PARSER.apply(parser, null);
return CategoryDefinition.STRICT_PARSER.apply(parser, null);
}
public void testEquals_GivenSameObject() {
@ -121,4 +125,21 @@ public class CategoryDefinitionTests extends AbstractSerializingTestCase<Categor
category.addExample("bar");
return category;
}
public void testStrictParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> CategoryDefinition.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
CategoryDefinition.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -7,14 +7,18 @@ package org.elasticsearch.xpack.ml.job.results;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.results.ForecastRequestStats;
import org.elasticsearch.xpack.core.ml.job.results.ForecastRequestStats.ForecastRequestStatus;
import java.io.IOException;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.Matchers.containsString;
public class ForecastRequestStatsTests extends AbstractSerializingTestCase<ForecastRequestStats> {
@Override
@ -74,6 +78,23 @@ public class ForecastRequestStatsTests extends AbstractSerializingTestCase<Forec
@Override
protected ForecastRequestStats doParseInstance(XContentParser parser) {
return ForecastRequestStats.PARSER.apply(parser, null);
return ForecastRequestStats.STRICT_PARSER.apply(parser, null);
}
public void testStrictParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"forecast_id\":\"forecast_1\", \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> ForecastRequestStats.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"forecast_id\":\"forecast_1\", \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
ForecastRequestStats.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -7,12 +7,16 @@ package org.elasticsearch.xpack.ml.job.results;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.results.Forecast;
import java.io.IOException;
import java.util.Date;
import java.util.Objects;
import static org.hamcrest.Matchers.containsString;
public class ForecastTests extends AbstractSerializingTestCase<Forecast> {
@Override
@ -60,7 +64,7 @@ public class ForecastTests extends AbstractSerializingTestCase<Forecast> {
@Override
protected Forecast doParseInstance(XContentParser parser) {
return Forecast.PARSER.apply(parser, null);
return Forecast.STRICT_PARSER.apply(parser, null);
}
public void testId() {
@ -86,4 +90,15 @@ public class ForecastTests extends AbstractSerializingTestCase<Forecast> {
valuesHash = Objects.hash(byFieldValue, partitionFieldValue);
assertEquals("job-foo_model_forecast_222_100_60_2_" + valuesHash + "_" + length, forecast.getId());
}
public void testStrictParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"forecast_id\":\"forecast_1\", \"timestamp\":12354667, \"bucket_span\": 3600," +
"\"detector_index\":3, \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> Forecast.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
}

View File

@ -7,12 +7,16 @@ package org.elasticsearch.xpack.ml.job.results;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.results.Influence;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.Matchers.containsString;
public class InfluenceTests extends AbstractSerializingTestCase<Influence> {
@Override
@ -32,7 +36,23 @@ public class InfluenceTests extends AbstractSerializingTestCase<Influence> {
@Override
protected Influence doParseInstance(XContentParser parser) {
return Influence.PARSER.apply(parser, null);
return Influence.STRICT_PARSER.apply(parser, null);
}
public void testStrictParser() throws IOException {
String json = "{\"influencer_field_name\":\"influencer_1\", \"influencer_field_values\":[], \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> Influence.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"influencer_field_name\":\"influencer_1\", \"influencer_field_values\":[], \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
Influence.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -68,7 +68,7 @@ public class ModelPlotTests extends AbstractSerializingTestCase<ModelPlot> {
@Override
protected ModelPlot doParseInstance(XContentParser parser) {
return ModelPlot.PARSER.apply(parser, null);
return ModelPlot.STRICT_PARSER.apply(parser, null);
}
public void testEquals_GivenSameObject() {
@ -243,6 +243,23 @@ public class ModelPlotTests extends AbstractSerializingTestCase<ModelPlot> {
assertEquals("job-foo_model_plot_100_60_33_" + valuesHash + "_" + length, plot.getId());
}
public void testStrictParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"timestamp\":12354667, \"bucket_span\": 3600, \"detector_index\":3, \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> ModelPlot.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"timestamp\":12354667, \"bucket_span\": 3600, \"detector_index\":3, \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
ModelPlot.LENIENT_PARSER.apply(parser, null);
}
}
private ModelPlot createFullyPopulated() {
ModelPlot modelPlot = new ModelPlot("foo", new Date(12345678L), 360L, 22);
modelPlot.setByFieldName("by");
@ -256,5 +273,4 @@ public class ModelPlotTests extends AbstractSerializingTestCase<ModelPlot> {
modelPlot.setActual(100.0);
return modelPlot;
}
}

View File

@ -7,9 +7,14 @@ package org.elasticsearch.xpack.ml.job.results;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.results.PartitionScore;
import java.io.IOException;
import static org.hamcrest.Matchers.containsString;
public class PartitionScoreTests extends AbstractSerializingTestCase<PartitionScore> {
@Override
@ -25,7 +30,25 @@ public class PartitionScoreTests extends AbstractSerializingTestCase<PartitionSc
@Override
protected PartitionScore doParseInstance(XContentParser parser) {
return PartitionScore.PARSER.apply(parser, null);
return PartitionScore.STRICT_PARSER.apply(parser, null);
}
public void testStrictParser() throws IOException {
String json = "{\"partition_field_name\":\"field_1\", \"partition_field_value\":\"x\", \"initial_record_score\": 3," +
" \"record_score\": 3, \"probability\": 0.001, \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> PartitionScore.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"partition_field_name\":\"field_1\", \"partition_field_value\":\"x\", \"initial_record_score\": 3," +
" \"record_score\": 3, \"probability\": 0.001, \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
PartitionScore.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -25,6 +25,9 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.persistent.PersistentTaskParams;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
import org.elasticsearch.persistent.PersistentTasksNodeService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
@ -81,9 +84,6 @@ import org.elasticsearch.xpack.core.ml.job.results.CategoryDefinition;
import org.elasticsearch.xpack.core.ml.job.results.Forecast;
import org.elasticsearch.xpack.core.ml.job.results.ForecastRequestStats;
import org.elasticsearch.xpack.core.ml.job.results.Result;
import org.elasticsearch.persistent.PersistentTaskParams;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
import org.elasticsearch.persistent.PersistentTasksNodeService;
import org.elasticsearch.xpack.core.security.SecurityField;
import org.elasticsearch.xpack.core.security.authc.TokenMetaData;
@ -359,7 +359,7 @@ abstract class MlNativeAutodetectIntegTestCase extends ESIntegTestCase {
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(
NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
hits.getHits()[0].getSourceRef().streamInput());
return ForecastRequestStats.PARSER.apply(parser, null);
return ForecastRequestStats.STRICT_PARSER.apply(parser, null);
} catch (IOException e) {
throw new IllegalStateException(e);
}
@ -378,7 +378,7 @@ abstract class MlNativeAutodetectIntegTestCase extends ESIntegTestCase {
try {
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(
NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, hit.getSourceRef().streamInput());
forecastStats.add(ForecastRequestStats.PARSER.apply(parser, null));
forecastStats.add(ForecastRequestStats.STRICT_PARSER.apply(parser, null));
} catch (IOException e) {
throw new IllegalStateException(e);
}
@ -413,7 +413,7 @@ abstract class MlNativeAutodetectIntegTestCase extends ESIntegTestCase {
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(
NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
hit.getSourceRef().streamInput());
forecasts.add(Forecast.PARSER.apply(parser, null));
forecasts.add(Forecast.STRICT_PARSER.apply(parser, null));
} catch (IOException e) {
throw new IllegalStateException(e);
}

View File

@ -165,7 +165,7 @@ public class RevertModelSnapshotIT extends MlNativeAutodetectIntegTestCase {
try {
XContentParser parser = JsonXContent.jsonXContent
.createParser(null, LoggingDeprecationHandler.INSTANCE, hits.getAt(0).getSourceAsString());
return Quantiles.PARSER.apply(parser, null);
return Quantiles.LENIENT_PARSER.apply(parser, null);
} catch (IOException e) {
throw new IllegalStateException(e);
}