[7.x] [ML] Add new geo_results.(actual_point|typical_point) fields for `lat_long` results (#47050) (#48958)

* [ML] Add new geo_results.(actual_point|typical_point) fields for `lat_long` results (#47050)

[ML] Add new geo_results.(actual_point|typical_point) fields for `lat_long` results (#47050)

Related PR: https://github.com/elastic/ml-cpp/pull/809

* adjusting bwc version
This commit is contained in:
Benjamin Trent 2019-11-11 15:43:03 -05:00 committed by GitHub
parent 8acbd0aa2a
commit 46ab1db54f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 421 additions and 10 deletions

View File

@ -18,7 +18,10 @@
*/
package org.elasticsearch.client.ml.job.results;
import org.elasticsearch.client.ml.job.config.DetectorFunction;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -256,6 +259,28 @@ public class AnomalyCause implements ToXContentObject {
this.influencers = Collections.unmodifiableList(influencers);
}
@Nullable
public GeoPoint getTypicalGeoPoint() {
if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || typical == null) {
return null;
}
if (typical.size() == 2) {
return new GeoPoint(typical.get(0), typical.get(1));
}
return null;
}
@Nullable
public GeoPoint getActualGeoPoint() {
if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || actual == null) {
return null;
}
if (actual.size() == 2) {
return new GeoPoint(actual.get(0), actual.get(1));
}
return null;
}
@Override
public int hashCode() {
return Objects.hash(probability, actual, typical, byFieldName, byFieldValue, correlatedByFieldValue, fieldName, function,

View File

@ -19,8 +19,11 @@
package org.elasticsearch.client.ml.job.results;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.client.ml.job.config.DetectorFunction;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -388,6 +391,28 @@ public class AnomalyRecord implements ToXContentObject {
this.influences = Collections.unmodifiableList(influencers);
}
@Nullable
public GeoPoint getTypicalGeoPoint() {
if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || typical == null) {
return null;
}
if (typical.size() == 2) {
return new GeoPoint(typical.get(0), typical.get(1));
}
return null;
}
@Nullable
public GeoPoint getActualGeoPoint() {
if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || actual == null) {
return null;
}
if (actual.size() == 2) {
return new GeoPoint(actual.get(0), actual.get(1));
}
return null;
}
@Override
public int hashCode() {
return Objects.hash(jobId, detectorIndex, bucketSpan, probability, multiBucketImpact, recordScore,

View File

@ -18,12 +18,20 @@
*/
package org.elasticsearch.client.ml.job.results;
import org.elasticsearch.client.ml.job.config.DetectorFunction;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.Is.is;
public class AnomalyCauseTests extends AbstractXContentTestCase<AnomalyCause> {
@Override
@ -103,4 +111,40 @@ public class AnomalyCauseTests extends AbstractXContentTestCase<AnomalyCause> {
protected boolean supportsUnknownFields() {
return true;
}
public void testActualAsGeoPoint() {
AnomalyCause anomalyCause = new AnomalyCause();
assertThat(anomalyCause.getActualGeoPoint(), is(nullValue()));
anomalyCause.setFunction(DetectorFunction.LAT_LONG.getFullName());
assertThat(anomalyCause.getActualGeoPoint(), is(nullValue()));
anomalyCause.setActual(Collections.singletonList(80.0));
assertThat(anomalyCause.getActualGeoPoint(), is(nullValue()));
anomalyCause.setActual(Arrays.asList(90.0, 80.0));
assertThat(anomalyCause.getActualGeoPoint(), equalTo(new GeoPoint(90.0, 80.0)));
anomalyCause.setActual(Arrays.asList(10.0, 100.0, 90.0));
assertThat(anomalyCause.getActualGeoPoint(), is(nullValue()));
}
public void testTypicalAsGeoPoint() {
AnomalyCause anomalyCause = new AnomalyCause();
assertThat(anomalyCause.getTypicalGeoPoint(), is(nullValue()));
anomalyCause.setFunction(DetectorFunction.LAT_LONG.getFullName());
assertThat(anomalyCause.getTypicalGeoPoint(), is(nullValue()));
anomalyCause.setTypical(Collections.singletonList(80.0));
assertThat(anomalyCause.getTypicalGeoPoint(), is(nullValue()));
anomalyCause.setTypical(Arrays.asList(90.0, 80.0));
assertThat(anomalyCause.getTypicalGeoPoint(), equalTo(new GeoPoint(90.0, 80.0)));
anomalyCause.setTypical(Arrays.asList(10.0, 100.0, 90.0));
assertThat(anomalyCause.getTypicalGeoPoint(), is(nullValue()));
}
}

View File

@ -18,14 +18,21 @@
*/
package org.elasticsearch.client.ml.job.results;
import org.elasticsearch.client.ml.job.config.DetectorFunction;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.Is.is;
public class AnomalyRecordTests extends AbstractXContentTestCase<AnomalyRecord> {
@Override
@ -93,4 +100,40 @@ public class AnomalyRecordTests extends AbstractXContentTestCase<AnomalyRecord>
protected boolean supportsUnknownFields() {
return true;
}
public void testActualAsGeoPoint() {
AnomalyRecord anomalyRecord = new AnomalyRecord(randomAlphaOfLength(10), new Date(), randomNonNegativeLong());
assertThat(anomalyRecord.getActualGeoPoint(), is(nullValue()));
anomalyRecord.setFunction(DetectorFunction.LAT_LONG.getFullName());
assertThat(anomalyRecord.getActualGeoPoint(), is(nullValue()));
anomalyRecord.setActual(Collections.singletonList(80.0));
assertThat(anomalyRecord.getActualGeoPoint(), is(nullValue()));
anomalyRecord.setActual(Arrays.asList(90.0, 80.0));
assertThat(anomalyRecord.getActualGeoPoint(), equalTo(new GeoPoint(90.0, 80.0)));
anomalyRecord.setActual(Arrays.asList(10.0, 100.0, 90.0));
assertThat(anomalyRecord.getActualGeoPoint(), is(nullValue()));
}
public void testTypicalAsGeoPoint() {
AnomalyRecord anomalyRecord = new AnomalyRecord(randomAlphaOfLength(10), new Date(), randomNonNegativeLong());
assertThat(anomalyRecord.getTypicalGeoPoint(), is(nullValue()));
anomalyRecord.setFunction(DetectorFunction.LAT_LONG.getFullName());
assertThat(anomalyRecord.getTypicalGeoPoint(), is(nullValue()));
anomalyRecord.setTypical(Collections.singletonList(80.0));
assertThat(anomalyRecord.getTypicalGeoPoint(), is(nullValue()));
anomalyRecord.setTypical(Arrays.asList(90.0, 80.0));
assertThat(anomalyRecord.getTypicalGeoPoint(), equalTo(new GeoPoint(90.0, 80.0)));
anomalyRecord.setTypical(Arrays.asList(10.0, 100.0, 90.0));
assertThat(anomalyRecord.getTypicalGeoPoint(), is(nullValue()));
}
}

View File

@ -303,8 +303,9 @@ A record object has the following properties:
part of the core analytical modeling, these low-level anomaly records are
aggregated for their parent over field record. The causes resource contains
similar elements to the record resource, namely `actual`, `typical`,
`*_field_name` and `*_field_value`. Probability and scores are not applicable
to causes.
`geo_results.actual_point`, `geo_results.typical_point`,
`*_field_name` and `*_field_value`.
Probability and scores are not applicable to causes.
`detector_index`::
(number) A unique identifier for the detector.
@ -383,6 +384,16 @@ A record object has the following properties:
`typical`::
(array) The typical value for the bucket, according to analytical modeling.
`geo_results.actual_point`::
(string) The actual value for the bucket formatted as a `geo_point`.
If the detector function is `lat_long`, this is a comma delimited string
of the latitude and longitude.
`geo_results.typical_point`::
(string) The typical value for the bucket formatted as a `geo_point`.
If the detector function is `lat_long`, this is a comma delimited string
of the latitude and longitude.
NOTE: Additional record properties are added, depending on the fields being
analyzed. For example, if it's analyzing `hostname` as a _by field_, then a field
`hostname` is added to the result document. This information enables you to

View File

@ -55,6 +55,7 @@ import org.elasticsearch.xpack.core.ml.job.results.BucketInfluencer;
import org.elasticsearch.xpack.core.ml.job.results.CategoryDefinition;
import org.elasticsearch.xpack.core.ml.job.results.Forecast;
import org.elasticsearch.xpack.core.ml.job.results.ForecastRequestStats;
import org.elasticsearch.xpack.core.ml.job.results.GeoResults;
import org.elasticsearch.xpack.core.ml.job.results.Influence;
import org.elasticsearch.xpack.core.ml.job.results.Influencer;
import org.elasticsearch.xpack.core.ml.job.results.ModelPlot;
@ -131,6 +132,7 @@ public class ElasticsearchMappings {
public static final String BOOLEAN = "boolean";
public static final String DATE = "date";
public static final String DOUBLE = "double";
public static final String GEO_POINT = "geo_point";
public static final String INTEGER = "integer";
public static final String KEYWORD = "keyword";
public static final String LONG = "long";
@ -885,6 +887,16 @@ public class ElasticsearchMappings {
.field(TYPE, KEYWORD)
.field(COPY_TO, ALL_FIELD_VALUES)
.endObject()
.startObject(AnomalyCause.GEO_RESULTS.getPreferredName())
.startObject(PROPERTIES)
.startObject(GeoResults.ACTUAL_POINT.getPreferredName())
.field(TYPE, GEO_POINT)
.endObject()
.startObject(GeoResults.TYPICAL_POINT.getPreferredName())
.field(TYPE, GEO_POINT)
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.startObject(AnomalyRecord.INFLUENCERS.getPreferredName())
@ -899,6 +911,16 @@ public class ElasticsearchMappings {
.field(COPY_TO, ALL_FIELD_VALUES)
.endObject()
.endObject()
.endObject()
.startObject(AnomalyRecord.GEO_RESULTS.getPreferredName())
.startObject(PROPERTIES)
.startObject(GeoResults.ACTUAL_POINT.getPreferredName())
.field(TYPE, GEO_POINT)
.endObject()
.startObject(GeoResults.TYPICAL_POINT.getPreferredName())
.field(TYPE, GEO_POINT)
.endObject()
.endObject()
.endObject();
}

View File

@ -5,7 +5,9 @@
*/
package org.elasticsearch.xpack.core.ml.job.results;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
@ -39,6 +41,7 @@ public class AnomalyCause implements ToXContentObject, Writeable {
public static final ParseField TYPICAL = new ParseField("typical");
public static final ParseField ACTUAL = new ParseField("actual");
public static final ParseField INFLUENCERS = new ParseField("influencers");
public static final ParseField GEO_RESULTS = new ParseField("geo_results");
/**
* Metric Results
@ -67,6 +70,9 @@ public class AnomalyCause implements ToXContentObject, Writeable {
parser.declareString(AnomalyCause::setOverFieldValue, OVER_FIELD_VALUE);
parser.declareObjectArray(AnomalyCause::setInfluencers, ignoreUnknownFields ? Influence.LENIENT_PARSER : Influence.STRICT_PARSER,
INFLUENCERS);
parser.declareObject(AnomalyCause::setGeoResults,
ignoreUnknownFields ? GeoResults.LENIENT_PARSER : GeoResults.STRICT_PARSER,
GEO_RESULTS);
return parser;
}
@ -81,6 +87,7 @@ public class AnomalyCause implements ToXContentObject, Writeable {
private String functionDescription;
private List<Double> typical;
private List<Double> actual;
private GeoResults geoResults;
private String fieldName;
@ -114,6 +121,9 @@ public class AnomalyCause implements ToXContentObject, Writeable {
if (in.readBoolean()) {
influencers = in.readList(Influence::new);
}
if (in.getVersion().onOrAfter(Version.V_7_6_0)) {
geoResults = in.readOptionalWriteable(GeoResults::new);
}
}
@Override
@ -144,6 +154,9 @@ public class AnomalyCause implements ToXContentObject, Writeable {
if (hasInfluencers) {
out.writeList(influencers);
}
if (out.getVersion().onOrAfter(Version.V_7_6_0)) {
out.writeOptionalWriteable(geoResults);
}
}
@Override
@ -189,11 +202,13 @@ public class AnomalyCause implements ToXContentObject, Writeable {
if (influencers != null) {
builder.field(INFLUENCERS.getPreferredName(), influencers);
}
if (geoResults != null) {
builder.field(GEO_RESULTS.getPreferredName(), geoResults);
}
builder.endObject();
return builder;
}
public double getProbability() {
return probability;
}
@ -307,6 +322,14 @@ public class AnomalyCause implements ToXContentObject, Writeable {
this.influencers = influencers;
}
public GeoResults getGeoResults() {
return geoResults;
}
public void setGeoResults(GeoResults geoResults) {
this.geoResults = geoResults;
}
@Override
public int hashCode() {
return Objects.hash(probability,
@ -322,7 +345,8 @@ public class AnomalyCause implements ToXContentObject, Writeable {
overFieldValue,
partitionFieldName,
partitionFieldValue,
influencers);
influencers,
geoResults);
}
@Override
@ -350,8 +374,13 @@ public class AnomalyCause implements ToXContentObject, Writeable {
Objects.equals(this.partitionFieldValue, that.partitionFieldValue) &&
Objects.equals(this.overFieldName, that.overFieldName) &&
Objects.equals(this.overFieldValue, that.overFieldValue) &&
Objects.equals(this.geoResults, that.geoResults) &&
Objects.equals(this.influencers, that.influencers);
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
}

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.core.ml.job.results;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
@ -56,6 +57,7 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
public static final ParseField ACTUAL = new ParseField("actual");
public static final ParseField INFLUENCERS = new ParseField("influencers");
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
public static final ParseField GEO_RESULTS = new ParseField("geo_results");
// Used for QueryPage
public static final ParseField RESULTS_FIELD = new ParseField("records");
@ -115,6 +117,9 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
CAUSES);
parser.declareObjectArray(AnomalyRecord::setInfluencers, ignoreUnknownFields ? Influence.LENIENT_PARSER : Influence.STRICT_PARSER,
INFLUENCERS);
parser.declareObject(AnomalyRecord::setGeoResults,
ignoreUnknownFields ? GeoResults.LENIENT_PARSER : GeoResults.STRICT_PARSER,
GEO_RESULTS);
return parser;
}
@ -133,6 +138,7 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
private List<Double> typical;
private List<Double> actual;
private boolean isInterim;
private GeoResults geoResults;
private String fieldName;
@ -190,6 +196,9 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
if (in.readBoolean()) {
influences = in.readList(Influence::new);
}
if (in.getVersion().onOrAfter(Version.V_7_6_0)) {
geoResults = in.readOptionalWriteable(GeoResults::new);
}
}
@Override
@ -235,6 +244,9 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
if (hasInfluencers) {
out.writeList(influences);
}
if (out.getVersion().onOrAfter(Version.V_7_6_0)) {
out.writeOptionalWriteable(geoResults);
}
}
@Override
@ -300,6 +312,9 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
if (influences != null) {
builder.field(INFLUENCERS.getPreferredName(), influences);
}
if (geoResults != null) {
builder.field(GEO_RESULTS.getPreferredName(), geoResults);
}
Map<String, LinkedHashSet<String>> inputFields = inputFieldMap();
for (String fieldName : inputFields.keySet()) {
@ -529,6 +544,13 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
this.influences = influencers;
}
public GeoResults getGeoResults() {
return geoResults;
}
public void setGeoResults(GeoResults geoResults) {
this.geoResults = geoResults;
}
@Override
public int hashCode() {
@ -536,10 +558,9 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
initialRecordScore, typical, actual,function, functionDescription, fieldName,
byFieldName, byFieldValue, correlatedByFieldValue, partitionFieldName,
partitionFieldValue, overFieldName, overFieldValue, timestamp, isInterim,
causes, influences, jobId);
causes, influences, jobId, geoResults);
}
@Override
public boolean equals(Object other) {
if (this == other) {
@ -574,6 +595,12 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
&& Objects.equals(this.timestamp, that.timestamp)
&& Objects.equals(this.isInterim, that.isInterim)
&& Objects.equals(this.causes, that.causes)
&& Objects.equals(this.geoResults, that.geoResults)
&& Objects.equals(this.influences, that.influences);
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
}

View File

@ -0,0 +1,101 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.job.results;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
public class GeoResults implements ToXContentObject, Writeable {
public static final ParseField GEO_RESULTS = new ParseField("geo_results");
public static final ParseField TYPICAL_POINT = new ParseField("typical_point");
public static final ParseField ACTUAL_POINT = new ParseField("actual_point");
public static final ObjectParser<GeoResults, Void> STRICT_PARSER = createParser(false);
public static final ObjectParser<GeoResults, Void> LENIENT_PARSER = createParser(true);
private static ObjectParser<GeoResults, Void> createParser(boolean ignoreUnknownFields) {
ObjectParser<GeoResults, Void> parser = new ObjectParser<>(GEO_RESULTS.getPreferredName(), ignoreUnknownFields,
GeoResults::new);
parser.declareString(GeoResults::setActualPoint, ACTUAL_POINT);
parser.declareString(GeoResults::setTypicalPoint, TYPICAL_POINT);
return parser;
}
private String actualPoint;
private String typicalPoint;
public GeoResults() {}
public GeoResults(StreamInput in) throws IOException {
this.actualPoint = in.readOptionalString();
this.typicalPoint = in.readOptionalString();
}
public String getActualPoint() {
return actualPoint;
}
public void setActualPoint(String actualPoint) {
this.actualPoint = actualPoint;
}
public String getTypicalPoint() {
return typicalPoint;
}
public void setTypicalPoint(String typicalPoint) {
this.typicalPoint = typicalPoint;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(actualPoint);
out.writeOptionalString(typicalPoint);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (typicalPoint != null) {
builder.field(TYPICAL_POINT.getPreferredName(), typicalPoint);
}
if (actualPoint != null) {
builder.field(ACTUAL_POINT.getPreferredName(), actualPoint);
}
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(typicalPoint, actualPoint);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
GeoResults that = (GeoResults) other;
return Objects.equals(this.typicalPoint, that.typicalPoint) && Objects.equals(this.actualPoint, that.actualPoint);
}
}

View File

@ -74,6 +74,7 @@ public final class ReservedFieldNames {
AnomalyCause.FUNCTION_DESCRIPTION.getPreferredName(),
AnomalyCause.TYPICAL.getPreferredName(),
AnomalyCause.ACTUAL.getPreferredName(),
AnomalyCause.GEO_RESULTS.getPreferredName(),
AnomalyCause.INFLUENCERS.getPreferredName(),
AnomalyCause.FIELD_NAME.getPreferredName(),
@ -88,6 +89,7 @@ public final class ReservedFieldNames {
AnomalyRecord.FUNCTION_DESCRIPTION.getPreferredName(),
AnomalyRecord.TYPICAL.getPreferredName(),
AnomalyRecord.ACTUAL.getPreferredName(),
AnomalyRecord.GEO_RESULTS.getPreferredName(),
AnomalyRecord.INFLUENCERS.getPreferredName(),
AnomalyRecord.FIELD_NAME.getPreferredName(),
AnomalyRecord.OVER_FIELD_NAME.getPreferredName(),
@ -97,6 +99,9 @@ public final class ReservedFieldNames {
AnomalyRecord.INITIAL_RECORD_SCORE.getPreferredName(),
AnomalyRecord.BUCKET_SPAN.getPreferredName(),
GeoResults.TYPICAL_POINT.getPreferredName(),
GeoResults.ACTUAL_POINT.getPreferredName(),
Bucket.ANOMALY_SCORE.getPreferredName(),
Bucket.BUCKET_INFLUENCERS.getPreferredName(),
Bucket.BUCKET_SPAN.getPreferredName(),

View File

@ -89,7 +89,7 @@ public class ElasticsearchMappingsTests extends ESTestCase {
GetResult._TYPE
);
public void testResultsMapppingReservedFields() throws Exception {
public void testResultsMappingReservedFields() throws Exception {
Set<String> overridden = new HashSet<>(KEYWORDS);
// These are not reserved because they're data types, not field names
@ -109,7 +109,7 @@ public class ElasticsearchMappingsTests extends ESTestCase {
compareFields(expected, ReservedFieldNames.RESERVED_RESULT_FIELD_NAMES);
}
public void testConfigMapppingReservedFields() throws Exception {
public void testConfigMappingReservedFields() throws Exception {
Set<String> overridden = new HashSet<>(KEYWORDS);
// These are not reserved because they're data types, not field names

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.core.ml.job.results;
import org.elasticsearch.client.ml.job.config.DetectorFunction;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
@ -59,7 +60,8 @@ public class AnomalyCauseTests extends AbstractSerializingTestCase<AnomalyCause>
anomalyCause.setPartitionFieldValue(randomAlphaOfLengthBetween(1, 20));
}
if (randomBoolean()) {
anomalyCause.setFunction(randomAlphaOfLengthBetween(1, 20));
anomalyCause.setFunction(DetectorFunction.LAT_LONG.getFullName());
anomalyCause.setGeoResults(GeoResultsTests.createTestGeoResults());
}
if (randomBoolean()) {
anomalyCause.setFunctionDescription(randomAlphaOfLengthBetween(1, 20));

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.core.ml.job.results;
import org.elasticsearch.client.ml.job.config.DetectorFunction;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentHelper;
@ -58,7 +59,12 @@ public class AnomalyRecordTests extends AbstractSerializingTestCase<AnomalyRecor
anomalyRecord.setOverFieldName(randomAlphaOfLength(12));
anomalyRecord.setOverFieldValue(randomAlphaOfLength(12));
}
anomalyRecord.setFunction(randomAlphaOfLengthBetween(5, 20));
if (randomBoolean()) {
anomalyRecord.setFunction(DetectorFunction.LAT_LONG.getFullName());
anomalyRecord.setGeoResults(GeoResultsTests.createTestGeoResults());
} else {
anomalyRecord.setFunction(randomAlphaOfLengthBetween(5, 25));
}
anomalyRecord.setFunctionDescription(randomAlphaOfLengthBetween(5, 20));
if (randomBoolean()) {
anomalyRecord.setCorrelatedByFieldValue(randomAlphaOfLength(16));

View File

@ -0,0 +1,71 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.job.results;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.junit.Before;
import java.io.IOException;
import static org.hamcrest.Matchers.containsString;
public class GeoResultsTests extends AbstractSerializingTestCase<GeoResults> {
private boolean lenient;
@Before
public void setLenient() {
lenient = randomBoolean();
}
static GeoResults createTestGeoResults() {
GeoResults geoResults = new GeoResults();
if (randomBoolean()) {
geoResults.setActualPoint(randomDoubleBetween(-90.0, 90.0, true) + "," +
randomDoubleBetween(-90.0, 90.0, true));
}
if (randomBoolean()) {
geoResults.setTypicalPoint(randomDoubleBetween(-90.0, 90.0, true) + "," +
randomDoubleBetween(-90.0, 90.0, true));
}
return geoResults;
}
@Override
protected GeoResults createTestInstance() {
return createTestGeoResults();
}
@Override
protected Reader<GeoResults> instanceReader() {
return GeoResults::new;
}
@Override
protected GeoResults doParseInstance(XContentParser parser) {
return lenient ? GeoResults.LENIENT_PARSER.apply(parser, null) : GeoResults.STRICT_PARSER.apply(parser, null);
}
public void testStrictParser() throws IOException {
String json = "{\"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> GeoResults.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
GeoResults.LENIENT_PARSER.apply(parser, null);
}
}
}