Make start and end optional params in results endpoints
Original commit: elastic/x-pack-elasticsearch@0ab5da04f9
This commit is contained in:
parent
9d2ce12624
commit
2898e3c421
|
@ -83,8 +83,8 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
|||
PARSER.declareString(Request::setPartitionValue, PARTITION_VALUE);
|
||||
PARSER.declareBoolean(Request::setExpand, EXPAND);
|
||||
PARSER.declareBoolean(Request::setIncludeInterim, INCLUDE_INTERIM);
|
||||
PARSER.declareString(Request::setStart, START);
|
||||
PARSER.declareString(Request::setEnd, END);
|
||||
PARSER.declareStringOrNull(Request::setStart, START);
|
||||
PARSER.declareStringOrNull(Request::setEnd, END);
|
||||
PARSER.declareBoolean(Request::setExpand, EXPAND);
|
||||
PARSER.declareBoolean(Request::setIncludeInterim, INCLUDE_INTERIM);
|
||||
PARSER.declareObject(Request::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
||||
|
@ -161,7 +161,7 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
|||
}
|
||||
|
||||
public void setStart(String start) {
|
||||
this.start = ExceptionsHelper.requireNonNull(start, START.getPreferredName());
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
|
@ -169,7 +169,7 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
|||
}
|
||||
|
||||
public void setEnd(String end) {
|
||||
this.end = ExceptionsHelper.requireNonNull(end, END.getPreferredName());
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public PageParams getPageParams() {
|
||||
|
@ -198,12 +198,7 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
|||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
ActionRequestValidationException validationException = null;
|
||||
if ((timestamp == null || timestamp.isEmpty())
|
||||
&& (start == null || start.isEmpty() || end == null || end.isEmpty())) {
|
||||
validationException = addValidationError("Either [timestamp] or [start, end] parameters must be set.", validationException);
|
||||
}
|
||||
return validationException;
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -248,12 +243,8 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
|||
if (partitionValue != null) {
|
||||
builder.field(PARTITION_VALUE.getPreferredName(), partitionValue);
|
||||
}
|
||||
if (start != null) {
|
||||
builder.field(START.getPreferredName(), start);
|
||||
}
|
||||
if (end != null) {
|
||||
builder.field(END.getPreferredName(), end);
|
||||
}
|
||||
builder.field(START.getPreferredName(), start);
|
||||
builder.field(END.getPreferredName(), end);
|
||||
if (pageParams != null) {
|
||||
builder.field(PageParams.PAGE.getPreferredName(), pageParams);
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.elasticsearch.xpack.prelert.job.persistence.QueryPage;
|
|||
import org.elasticsearch.xpack.prelert.job.results.Influencer;
|
||||
import org.elasticsearch.xpack.prelert.job.results.PageParams;
|
||||
import org.elasticsearch.xpack.prelert.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
|
@ -72,8 +73,8 @@ extends Action<GetInfluencersAction.Request, GetInfluencersAction.Response, GetI
|
|||
|
||||
static {
|
||||
PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
|
||||
PARSER.declareString((request, start) -> request.start = start, START);
|
||||
PARSER.declareString((request, end) -> request.end = end, END);
|
||||
PARSER.declareStringOrNull(Request::setStart, START);
|
||||
PARSER.declareStringOrNull(Request::setEnd, END);
|
||||
PARSER.declareBoolean(Request::setIncludeInterim, INCLUDE_INTERIM);
|
||||
PARSER.declareObject(Request::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
||||
PARSER.declareDouble(Request::setAnomalyScore, ANOMALY_SCORE);
|
||||
|
@ -81,18 +82,12 @@ extends Action<GetInfluencersAction.Request, GetInfluencersAction.Response, GetI
|
|||
PARSER.declareBoolean(Request::setDecending, DESCENDING_SORT);
|
||||
}
|
||||
|
||||
public static Request parseRequest(String jobId, String start, String end, XContentParser parser,
|
||||
public static Request parseRequest(String jobId, XContentParser parser,
|
||||
ParseFieldMatcherSupplier parseFieldMatcherSupplier) {
|
||||
Request request = PARSER.apply(parser, parseFieldMatcherSupplier);
|
||||
if (jobId != null) {
|
||||
request.jobId = jobId;
|
||||
}
|
||||
if (start != null) {
|
||||
request.start = start;
|
||||
}
|
||||
if (end != null) {
|
||||
request.end = end;
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -108,10 +103,8 @@ extends Action<GetInfluencersAction.Request, GetInfluencersAction.Response, GetI
|
|||
Request() {
|
||||
}
|
||||
|
||||
public Request(String jobId, String start, String end) {
|
||||
public Request(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
this.start = ExceptionsHelper.requireNonNull(start, START.getPreferredName());
|
||||
this.end = ExceptionsHelper.requireNonNull(end, END.getPreferredName());
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
|
@ -122,10 +115,18 @@ extends Action<GetInfluencersAction.Request, GetInfluencersAction.Response, GetI
|
|||
return start;
|
||||
}
|
||||
|
||||
public void setStart(String start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public void setEnd(String end) {
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public boolean isDecending() {
|
||||
return decending;
|
||||
}
|
||||
|
@ -177,8 +178,8 @@ extends Action<GetInfluencersAction.Request, GetInfluencersAction.Response, GetI
|
|||
jobId = in.readString();
|
||||
includeInterim = in.readBoolean();
|
||||
pageParams = new PageParams(in);
|
||||
start = in.readString();
|
||||
end = in.readString();
|
||||
start = in.readOptionalString();
|
||||
end = in.readOptionalString();
|
||||
sort = in.readOptionalString();
|
||||
decending = in.readBoolean();
|
||||
anomalyScoreFilter = in.readDouble();
|
||||
|
@ -190,8 +191,8 @@ extends Action<GetInfluencersAction.Request, GetInfluencersAction.Response, GetI
|
|||
out.writeString(jobId);
|
||||
out.writeBoolean(includeInterim);
|
||||
pageParams.writeTo(out);
|
||||
out.writeString(start);
|
||||
out.writeString(end);
|
||||
out.writeOptionalString(start);
|
||||
out.writeOptionalString(end);
|
||||
out.writeOptionalString(sort);
|
||||
out.writeBoolean(decending);
|
||||
out.writeDouble(anomalyScoreFilter);
|
||||
|
|
|
@ -75,8 +75,8 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
|||
|
||||
static {
|
||||
PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
|
||||
PARSER.declareString((request, start) -> request.start = start, START);
|
||||
PARSER.declareString((request, end) -> request.end = end, END);
|
||||
PARSER.declareStringOrNull(Request::setStart, START);
|
||||
PARSER.declareStringOrNull(Request::setEnd, END);
|
||||
PARSER.declareString(Request::setPartitionValue, PARTITION_VALUE);
|
||||
PARSER.declareString(Request::setSort, SORT);
|
||||
PARSER.declareBoolean(Request::setDecending, DESCENDING);
|
||||
|
@ -108,10 +108,8 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
|||
Request() {
|
||||
}
|
||||
|
||||
public Request(String jobId, String start, String end) {
|
||||
public Request(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
this.start = ExceptionsHelper.requireNonNull(start, START.getPreferredName());
|
||||
this.end = ExceptionsHelper.requireNonNull(end, END.getPreferredName());
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
|
@ -122,10 +120,18 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
|||
return start;
|
||||
}
|
||||
|
||||
public void setStart(String start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public void setEnd(String end) {
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public boolean isDecending() {
|
||||
return decending;
|
||||
}
|
||||
|
@ -192,8 +198,8 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
|||
jobId = in.readString();
|
||||
includeInterim = in.readBoolean();
|
||||
pageParams = new PageParams(in);
|
||||
start = in.readString();
|
||||
end = in.readString();
|
||||
start = in.readOptionalString();
|
||||
end = in.readOptionalString();
|
||||
sort = in.readOptionalString();
|
||||
decending = in.readBoolean();
|
||||
anomalyScoreFilter = in.readDouble();
|
||||
|
@ -207,8 +213,8 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
|||
out.writeString(jobId);
|
||||
out.writeBoolean(includeInterim);
|
||||
pageParams.writeTo(out);
|
||||
out.writeString(start);
|
||||
out.writeString(end);
|
||||
out.writeOptionalString(start);
|
||||
out.writeOptionalString(end);
|
||||
out.writeOptionalString(sort);
|
||||
out.writeBoolean(decending);
|
||||
out.writeDouble(anomalyScoreFilter);
|
||||
|
@ -220,12 +226,8 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (start != null) {
|
||||
builder.field(START.getPreferredName(), start);
|
||||
}
|
||||
if (end != null) {
|
||||
builder.field(END.getPreferredName(), end);
|
||||
}
|
||||
builder.field(START.getPreferredName(), start);
|
||||
builder.field(END.getPreferredName(), end);
|
||||
builder.field(SORT.getPreferredName(), sort);
|
||||
builder.field(DESCENDING.getPreferredName(), decending);
|
||||
builder.field(ANOMALY_SCORE_FILTER.getPreferredName(), anomalyScoreFilter);
|
||||
|
|
|
@ -198,7 +198,7 @@ public final class BucketsQueryBuilder {
|
|||
Objects.equals(expand, other.expand) &&
|
||||
Objects.equals(includeInterim, other.includeInterim) &&
|
||||
Objects.equals(epochStart, other.epochStart) &&
|
||||
Objects.equals(epochStart, other.epochStart) &&
|
||||
Objects.equals(epochEnd, other.epochEnd) &&
|
||||
Objects.equals(anomalyScoreFilter, other.anomalyScoreFilter) &&
|
||||
Objects.equals(normalizedProbability, other.normalizedProbability) &&
|
||||
Objects.equals(partitionValue, other.partitionValue) &&
|
||||
|
|
|
@ -217,6 +217,7 @@ public class ElasticsearchMappings {
|
|||
.endObject();
|
||||
|
||||
addAnomalyRecordFieldsToMapping(builder);
|
||||
addInfluencerFieldsToMapping(builder);
|
||||
|
||||
if (termFieldNames != null) {
|
||||
ElasticsearchDotNotationReverser reverser = new ElasticsearchDotNotationReverser();
|
||||
|
@ -244,7 +245,7 @@ public class ElasticsearchMappings {
|
|||
* @return builder
|
||||
* @throws IOException On write error
|
||||
*/
|
||||
private static XContentBuilder addAnomalyRecordFieldsToMapping(XContentBuilder builder)
|
||||
private static XContentBuilder addAnomalyRecordFieldsToMapping(XContentBuilder builder)
|
||||
throws IOException {
|
||||
builder.startObject(AnomalyRecord.DETECTOR_INDEX.getPreferredName())
|
||||
.field(TYPE, INTEGER).field(INCLUDE_IN_ALL, false)
|
||||
|
@ -351,6 +352,26 @@ public class ElasticsearchMappings {
|
|||
return builder;
|
||||
}
|
||||
|
||||
private static XContentBuilder addInfluencerFieldsToMapping(XContentBuilder builder) throws IOException {
|
||||
builder.startObject(Influencer.INFLUENCER_FIELD_NAME.getPreferredName())
|
||||
.field(TYPE, KEYWORD).field(INCLUDE_IN_ALL, false)
|
||||
.endObject()
|
||||
.startObject(Influencer.INFLUENCER_FIELD_VALUE.getPreferredName())
|
||||
.field(TYPE, KEYWORD).field(INCLUDE_IN_ALL, false)
|
||||
.endObject()
|
||||
.startObject(Influencer.ANOMALY_SCORE.getPreferredName())
|
||||
.field(TYPE, DOUBLE).field(INCLUDE_IN_ALL, false)
|
||||
.endObject()
|
||||
.startObject(Influencer.INITIAL_ANOMALY_SCORE.getPreferredName())
|
||||
.field(TYPE, DOUBLE).field(INCLUDE_IN_ALL, false)
|
||||
.endObject()
|
||||
.startObject(Influencer.PROBABILITY.getPreferredName())
|
||||
.field(TYPE, DOUBLE).field(INCLUDE_IN_ALL, false)
|
||||
.endObject();
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static XContentBuilder dataCountsMapping() throws IOException {
|
||||
return jsonBuilder()
|
||||
.startObject()
|
||||
|
|
|
@ -158,7 +158,7 @@ public final class InfluencersQueryBuilder {
|
|||
Objects.equals(size, other.size) &&
|
||||
Objects.equals(includeInterim, other.includeInterim) &&
|
||||
Objects.equals(epochStart, other.epochStart) &&
|
||||
Objects.equals(epochStart, other.epochStart) &&
|
||||
Objects.equals(epochEnd, other.epochEnd) &&
|
||||
Objects.equals(anomalyScoreFilter, other.anomalyScoreFilter) &&
|
||||
Objects.equals(sortField, other.sortField) &&
|
||||
this.sortDescending == other.sortDescending;
|
||||
|
|
|
@ -80,6 +80,7 @@ public final class ReservedFieldNames {
|
|||
AnomalyRecord.BUCKET_SPAN.getPreferredName(),
|
||||
|
||||
Bucket.ANOMALY_SCORE.getPreferredName(),
|
||||
Bucket.BUCKET_INFLUENCERS.getPreferredName(),
|
||||
Bucket.BUCKET_SPAN.getPreferredName(),
|
||||
Bucket.MAX_NORMALIZED_PROBABILITY.getPreferredName(),
|
||||
Bucket.IS_INTERIM.getPreferredName(),
|
||||
|
|
|
@ -47,9 +47,11 @@ public class RestGetInfluencersAction extends BaseRestHandler {
|
|||
final GetInfluencersAction.Request request;
|
||||
if (bodyBytes != null && bodyBytes.length() > 0) {
|
||||
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
|
||||
request = GetInfluencersAction.Request.parseRequest(jobId, start, end, parser, () -> parseFieldMatcher);
|
||||
request = GetInfluencersAction.Request.parseRequest(jobId, parser, () -> parseFieldMatcher);
|
||||
} else {
|
||||
request = new GetInfluencersAction.Request(jobId, start, end);
|
||||
request = new GetInfluencersAction.Request(jobId);
|
||||
request.setStart(start);
|
||||
request.setEnd(end);
|
||||
request.setIncludeInterim(restRequest.paramAsBoolean(GetInfluencersAction.Request.INCLUDE_INTERIM.getPreferredName(), false));
|
||||
request.setPageParams(new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM),
|
||||
restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE)));
|
||||
|
@ -57,7 +59,7 @@ public class RestGetInfluencersAction extends BaseRestHandler {
|
|||
Double.parseDouble(restRequest.param(GetInfluencersAction.Request.ANOMALY_SCORE.getPreferredName(), "0.0")));
|
||||
request.setSort(restRequest.param(GetInfluencersAction.Request.SORT_FIELD.getPreferredName(),
|
||||
Influencer.ANOMALY_SCORE.getPreferredName()));
|
||||
request.setDecending(restRequest.paramAsBoolean(GetInfluencersAction.Request.DESCENDING_SORT.getPreferredName(), false));
|
||||
request.setDecending(restRequest.paramAsBoolean(GetInfluencersAction.Request.DESCENDING_SORT.getPreferredName(), true));
|
||||
}
|
||||
|
||||
return channel -> transportAction.execute(request, new RestToXContentListener<GetInfluencersAction.Response>(channel));
|
||||
|
|
|
@ -63,7 +63,7 @@ public class RestGetBucketsAction extends BaseRestHandler {
|
|||
request.setTimestamp(timestamp);
|
||||
request.setExpand(restRequest.paramAsBoolean(GetBucketsAction.Request.EXPAND.getPreferredName(), false));
|
||||
request.setIncludeInterim(restRequest.paramAsBoolean(GetBucketsAction.Request.INCLUDE_INTERIM.getPreferredName(), false));
|
||||
} else if (start != null && !start.isEmpty() && end != null && !end.isEmpty()) {
|
||||
} else {
|
||||
// Multiple buckets
|
||||
request.setStart(start);
|
||||
request.setEnd(end);
|
||||
|
@ -77,8 +77,6 @@ public class RestGetBucketsAction extends BaseRestHandler {
|
|||
if (restRequest.hasParam(GetBucketsAction.Request.PARTITION_VALUE.getPreferredName())) {
|
||||
request.setPartitionValue(restRequest.param(GetBucketsAction.Request.PARTITION_VALUE.getPreferredName()));
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Either [timestamp] or [start, end] parameters must be set.");
|
||||
}
|
||||
|
||||
// Common options
|
||||
|
|
|
@ -34,9 +34,9 @@ public class RestGetRecordsAction extends BaseRestHandler {
|
|||
|
||||
@Override
|
||||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
GetRecordsAction.Request request = new GetRecordsAction.Request(restRequest.param(Job.ID.getPreferredName()),
|
||||
restRequest.param(GetRecordsAction.Request.START.getPreferredName()),
|
||||
restRequest.param(GetRecordsAction.Request.END.getPreferredName()));
|
||||
GetRecordsAction.Request request = new GetRecordsAction.Request(restRequest.param(Job.ID.getPreferredName()));
|
||||
request.setStart(restRequest.param(GetRecordsAction.Request.START.getPreferredName()));
|
||||
request.setEnd(restRequest.param(GetRecordsAction.Request.END.getPreferredName()));
|
||||
request.setIncludeInterim(restRequest.paramAsBoolean(GetRecordsAction.Request.INCLUDE_INTERIM.getPreferredName(), false));
|
||||
request.setPageParams(new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM),
|
||||
restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE)));
|
||||
|
@ -44,7 +44,7 @@ public class RestGetRecordsAction extends BaseRestHandler {
|
|||
Double.parseDouble(restRequest.param(GetRecordsAction.Request.ANOMALY_SCORE_FILTER.getPreferredName(), "0.0")));
|
||||
request.setSort(restRequest.param(GetRecordsAction.Request.SORT.getPreferredName(),
|
||||
AnomalyRecord.NORMALIZED_PROBABILITY.getPreferredName()));
|
||||
request.setDecending(restRequest.paramAsBoolean(GetRecordsAction.Request.DESCENDING.getPreferredName(), false));
|
||||
request.setDecending(restRequest.paramAsBoolean(GetRecordsAction.Request.DESCENDING.getPreferredName(), true));
|
||||
request.setMaxNormalizedProbability(
|
||||
Double.parseDouble(restRequest.param(GetRecordsAction.Request.MAX_NORMALIZED_PROBABILITY.getPreferredName(), "0.0")));
|
||||
String partitionValue = restRequest.param(GetRecordsAction.Request.PARTITION_VALUE.getPreferredName());
|
||||
|
|
|
@ -15,13 +15,20 @@ public class GetInfluencersActionRequestTests extends AbstractStreamableXContent
|
|||
|
||||
@Override
|
||||
protected Request parseInstance(XContentParser parser, ParseFieldMatcher matcher) {
|
||||
return GetInfluencersAction.Request.parseRequest(null, null, null, parser, () -> matcher);
|
||||
return GetInfluencersAction.Request.parseRequest(null, parser, () -> matcher);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Request createTestInstance() {
|
||||
Request request = new Request(randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(1, 20),
|
||||
randomAsciiOfLengthBetween(1, 20));
|
||||
Request request = new Request(randomAsciiOfLengthBetween(1, 20));
|
||||
if (randomBoolean()) {
|
||||
String start = randomBoolean() ? randomAsciiOfLengthBetween(1, 20) : String.valueOf(randomPositiveLong());
|
||||
request.setStart(start);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
String end = randomBoolean() ? randomAsciiOfLengthBetween(1, 20) : String.valueOf(randomPositiveLong());
|
||||
request.setEnd(end);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
request.setAnomalyScore(randomDouble());
|
||||
}
|
||||
|
|
|
@ -20,8 +20,15 @@ public class GetRecordsActionRequestTests extends AbstractStreamableXContentTest
|
|||
|
||||
@Override
|
||||
protected Request createTestInstance() {
|
||||
Request request = new Request(randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(1, 20),
|
||||
randomAsciiOfLengthBetween(1, 20));
|
||||
Request request = new Request(randomAsciiOfLengthBetween(1, 20));
|
||||
if (randomBoolean()) {
|
||||
String start = randomBoolean() ? randomAsciiOfLengthBetween(1, 20) : String.valueOf(randomPositiveLong());
|
||||
request.setStart(start);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
String end = randomBoolean() ? randomAsciiOfLengthBetween(1, 20) : String.valueOf(randomPositiveLong());
|
||||
request.setEnd(end);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
request.setPartitionValue(randomAsciiOfLengthBetween(1, 20));
|
||||
}
|
||||
|
|
|
@ -25,13 +25,11 @@
|
|||
},
|
||||
"start": {
|
||||
"type": "string",
|
||||
"description": "start timestamp for the requested influencers",
|
||||
"required": true
|
||||
"description": "start timestamp for the requested influencers"
|
||||
},
|
||||
"end": {
|
||||
"type": "string",
|
||||
"description": "end timestamp for the requested influencers",
|
||||
"required": true
|
||||
"description": "end timestamp for the requested influencers"
|
||||
},
|
||||
"anomalyScore": {
|
||||
"type": "double",
|
||||
|
|
|
@ -28,12 +28,10 @@
|
|||
},
|
||||
"start": {
|
||||
"type": "string",
|
||||
"required": true,
|
||||
"description" : "Start time filter for records"
|
||||
},
|
||||
"end": {
|
||||
"type": "string",
|
||||
"required": true,
|
||||
"description" : "End time filter for records"
|
||||
},
|
||||
"anomalyScore": {
|
||||
|
|
|
@ -20,12 +20,19 @@ setup:
|
|||
id: "farequote_1464739200000_1"
|
||||
body: { "jobId": "farequote", "result_type": "bucket", "timestamp": "2016-06-01T00:00:00Z", "bucketSpan":1 }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-farequote
|
||||
type: result
|
||||
id: "farequote_1464739200000_2"
|
||||
body: { "jobId": "farequote", "result_type": "bucket", "timestamp": "2016-08-01T00:00:00Z", "bucketSpan":1 }
|
||||
|
||||
- do:
|
||||
indices.refresh:
|
||||
index: prelertresults-farequote
|
||||
|
||||
---
|
||||
"Test result buckets api":
|
||||
"Test result buckets api with time range":
|
||||
- do:
|
||||
xpack.prelert.get_buckets:
|
||||
job_id: "farequote"
|
||||
|
@ -37,6 +44,20 @@ setup:
|
|||
- match: { buckets.0.jobId: farequote}
|
||||
- match: { buckets.0.result_type: bucket}
|
||||
|
||||
---
|
||||
"Test result buckets api":
|
||||
- do:
|
||||
xpack.prelert.get_buckets:
|
||||
job_id: "farequote"
|
||||
|
||||
- match: { count: 2 }
|
||||
- match: { buckets.0.timestamp: 1464739200000 }
|
||||
- match: { buckets.0.jobId: farequote}
|
||||
- match: { buckets.0.result_type: bucket}
|
||||
- match: { buckets.1.timestamp: 1470009600000 }
|
||||
- match: { buckets.1.jobId: farequote}
|
||||
- match: { buckets.1.result_type: bucket}
|
||||
|
||||
---
|
||||
"Test result single bucket api":
|
||||
- do:
|
||||
|
|
|
@ -25,9 +25,24 @@ setup:
|
|||
"timestamp": "2016-06-01T00:00:00Z",
|
||||
"influencerFieldName": "foo",
|
||||
"influencerFieldValue": "bar",
|
||||
"anomalyScore": 80.0,
|
||||
"result_type" : "influencer"
|
||||
}
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-farequote
|
||||
type: result
|
||||
id: 2
|
||||
body:
|
||||
{
|
||||
"jobId": "farequote",
|
||||
"timestamp": "2016-06-02T00:00:00Z",
|
||||
"influencerFieldName": "foo",
|
||||
"influencerFieldValue": "zoo",
|
||||
"anomalyScore": 50.0,
|
||||
"result_type" : "influencer"
|
||||
}
|
||||
- do:
|
||||
indices.refresh:
|
||||
index: prelertresults-farequote
|
||||
|
@ -37,9 +52,18 @@ setup:
|
|||
- do:
|
||||
xpack.prelert.get_influencers:
|
||||
job_id: "farequote"
|
||||
start: "2016-05-01T00:00:00Z"
|
||||
end: "2016-07-01T00:00:00Z"
|
||||
|
||||
- match: { count: 2 }
|
||||
- match: { influencers.0.timestamp: 1464739200000 }
|
||||
- match: { influencers.1.timestamp: 1464825600000 }
|
||||
|
||||
---
|
||||
"Test result influencers api with time range":
|
||||
- do:
|
||||
xpack.prelert.get_influencers:
|
||||
job_id: "farequote"
|
||||
start: "2016-06-01T00:00:00Z"
|
||||
end: "2016-06-01T01:00:00Z"
|
||||
|
||||
- match: { count: 1 }
|
||||
- match: { influencers.0.timestamp: 1464739200000 }
|
||||
|
||||
|
|
|
@ -32,7 +32,28 @@ setup:
|
|||
index: prelertresults-farequote
|
||||
type: result
|
||||
id: 2
|
||||
body: { "jobId": "farequote", "result_type": "record", "timestamp": "2016-06-01T00:00:00Z", "result_type": "record" }
|
||||
body:
|
||||
{
|
||||
"jobId": "farequote",
|
||||
"result_type": "record",
|
||||
"timestamp": "2016-06-01T00:00:00Z",
|
||||
"anomalyScore": 60.0,
|
||||
"result_type": "record"
|
||||
}
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-farequote
|
||||
type: result
|
||||
id: 3
|
||||
body:
|
||||
{
|
||||
"jobId": "farequote",
|
||||
"result_type": "record",
|
||||
"timestamp": "2016-06-02T00:00:00Z",
|
||||
"anomalyScore": 80.0,
|
||||
"result_type": "record"
|
||||
}
|
||||
|
||||
- do:
|
||||
indices.refresh:
|
||||
|
@ -40,11 +61,25 @@ setup:
|
|||
|
||||
---
|
||||
"Test result records api":
|
||||
- do:
|
||||
xpack.prelert.get_records:
|
||||
job_id: "farequote"
|
||||
|
||||
- match: { count: 2 }
|
||||
- match: { records.0.timestamp: 1464825600000 }
|
||||
- match: { records.0.jobId: farequote}
|
||||
- match: { records.0.result_type: record}
|
||||
- match: { records.1.timestamp: 1464739200000 }
|
||||
- match: { records.1.jobId: farequote}
|
||||
- match: { records.1.result_type: record}
|
||||
|
||||
---
|
||||
"Test result records api with time range":
|
||||
- do:
|
||||
xpack.prelert.get_records:
|
||||
job_id: "farequote"
|
||||
start: "2016-05-01T00:00:00Z"
|
||||
end: "2016-07-01T00:00:00Z"
|
||||
end: "2016-06-01T01:00:00Z"
|
||||
|
||||
- match: { count: 1 }
|
||||
- match: { records.0.timestamp: 1464739200000 }
|
||||
|
|
Loading…
Reference in New Issue