HLRC: ML Add preview datafeed api (#34284)

* HLRC: ML Add preview datafeed api

* Changing deprecation handling for parser

* Removing some duplication in docs, will address other APIs in another PR
This commit is contained in:
Benjamin Trent 2018-10-04 11:28:44 -07:00 committed by GitHub
parent dcfe64e0e4
commit 2dd058d607
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 600 additions and 24 deletions

View File

@ -45,6 +45,7 @@ import org.elasticsearch.client.ml.GetOverallBucketsRequest;
import org.elasticsearch.client.ml.GetRecordsRequest; import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.OpenJobRequest; import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PutCalendarRequest; import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutDatafeedRequest; import org.elasticsearch.client.ml.PutDatafeedRequest;
import org.elasticsearch.client.ml.PutJobRequest; import org.elasticsearch.client.ml.PutJobRequest;
@ -259,6 +260,17 @@ final class MLRequestConverters {
return request; return request;
} }
static Request previewDatafeed(PreviewDatafeedRequest previewDatafeedRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("datafeeds")
.addPathPart(previewDatafeedRequest.getDatafeedId())
.addPathPartAsIs("_preview")
.build();
return new Request(HttpGet.METHOD_NAME, endpoint);
}
static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) { static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) {
String endpoint = new EndpointBuilder() String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack") .addPathPartAsIs("_xpack")

View File

@ -52,6 +52,8 @@ import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.OpenJobResponse; import org.elasticsearch.client.ml.OpenJobResponse;
import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PostDataResponse; import org.elasticsearch.client.ml.PostDataResponse;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PreviewDatafeedResponse;
import org.elasticsearch.client.ml.PutCalendarRequest; import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutCalendarResponse; import org.elasticsearch.client.ml.PutCalendarResponse;
import org.elasticsearch.client.ml.PutDatafeedRequest; import org.elasticsearch.client.ml.PutDatafeedRequest;
@ -649,6 +651,49 @@ public final class MachineLearningClient {
Collections.emptySet()); Collections.emptySet());
} }
/**
* Previews the given Machine Learning Datafeed
* <p>
* For additional info
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-preview-datafeed.html">
* ML Preview Datafeed documentation</a>
*
* @param request The request to preview the datafeed
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return {@link PreviewDatafeedResponse} object containing a {@link org.elasticsearch.common.bytes.BytesReference} of the data in
* JSON format
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public PreviewDatafeedResponse previewDatafeed(PreviewDatafeedRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::previewDatafeed,
options,
PreviewDatafeedResponse::fromXContent,
Collections.emptySet());
}
/**
* Previews the given Machine Learning Datafeed asynchronously and notifies the listener on completion
* <p>
* For additional info
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-preview-datafeed.html">
* ML Preview Datafeed documentation</a>
*
* @param request The request to preview the datafeed
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void previewDatafeedAsync(PreviewDatafeedRequest request,
RequestOptions options,
ActionListener<PreviewDatafeedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::previewDatafeed,
options,
PreviewDatafeedResponse::fromXContent,
listener,
Collections.emptySet());
}
/** /**
* Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} * Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job}
* <p> * <p>

View File

@ -0,0 +1,100 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
/**
* Request to preview a MachineLearning Datafeed
*/
public class PreviewDatafeedRequest extends ActionRequest implements ToXContentObject {
public static final ConstructingObjectParser<PreviewDatafeedRequest, Void> PARSER = new ConstructingObjectParser<>(
"open_datafeed_request", true, a -> new PreviewDatafeedRequest((String) a[0]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID);
}
public static PreviewDatafeedRequest fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private final String datafeedId;
/**
* Create a new request with the desired datafeedId
*
* @param datafeedId unique datafeedId, must not be null
*/
public PreviewDatafeedRequest(String datafeedId) {
this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null");
}
public String getDatafeedId() {
return datafeedId;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId);
builder.endObject();
return builder;
}
@Override
public String toString() {
return Strings.toString(this);
}
@Override
public int hashCode() {
return Objects.hash(datafeedId);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
PreviewDatafeedRequest that = (PreviewDatafeedRequest) other;
return Objects.equals(datafeedId, that.datafeedId);
}
}

View File

@ -0,0 +1,113 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
/**
* Response containing a datafeed preview in JSON format
*/
public class PreviewDatafeedResponse extends ActionResponse implements ToXContentObject {
private BytesReference preview;
public static PreviewDatafeedResponse fromXContent(XContentParser parser) throws IOException {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
parser.nextToken();
builder.copyCurrentStructure(parser);
return new PreviewDatafeedResponse(BytesReference.bytes(builder));
}
}
public PreviewDatafeedResponse(BytesReference preview) {
this.preview = preview;
}
public BytesReference getPreview() {
return preview;
}
/**
* Parses the preview to a list of {@link Map} objects
* @return List of previewed data
* @throws IOException If there is a parsing issue with the {@link BytesReference}
* @throws java.lang.ClassCastException If casting the raw {@link Object} entries to a {@link Map} fails
*/
@SuppressWarnings("unchecked")
public List<Map<String, Object>> getDataList() throws IOException {
try(StreamInput streamInput = preview.streamInput();
XContentParser parser = XContentType.JSON.xContent()
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, streamInput)) {
XContentParser.Token token = parser.nextToken();
if (token == XContentParser.Token.START_ARRAY) {
return parser.listOrderedMap().stream().map(obj -> (Map<String, Object>)obj).collect(Collectors.toList());
} else {
return Collections.singletonList(parser.mapOrdered());
}
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
try (InputStream stream = preview.streamInput()) {
builder.rawValue(stream, XContentType.JSON);
}
return builder;
}
@Override
public int hashCode() {
return Objects.hash(preview);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
PreviewDatafeedResponse other = (PreviewDatafeedResponse) obj;
return Objects.equals(preview, other.preview);
}
@Override
public final String toString() {
return Strings.toString(this);
}
}

View File

@ -41,6 +41,7 @@ import org.elasticsearch.client.ml.GetOverallBucketsRequest;
import org.elasticsearch.client.ml.GetRecordsRequest; import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.OpenJobRequest; import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PutCalendarRequest; import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutDatafeedRequest; import org.elasticsearch.client.ml.PutDatafeedRequest;
import org.elasticsearch.client.ml.PutJobRequest; import org.elasticsearch.client.ml.PutJobRequest;
@ -293,6 +294,13 @@ public class MLRequestConvertersTests extends ESTestCase {
} }
} }
public void testPreviewDatafeed() {
PreviewDatafeedRequest datafeedRequest = new PreviewDatafeedRequest("datafeed_1");
Request request = MLRequestConverters.previewDatafeed(datafeedRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/datafeeds/" + datafeedRequest.getDatafeedId() + "/_preview", request.getEndpoint());
}
public void testDeleteForecast() { public void testDeleteForecast() {
String jobId = randomAlphaOfLength(10); String jobId = randomAlphaOfLength(10);
DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest(jobId); DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest(jobId);

View File

@ -49,6 +49,8 @@ import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.OpenJobResponse; import org.elasticsearch.client.ml.OpenJobResponse;
import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PostDataResponse; import org.elasticsearch.client.ml.PostDataResponse;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PreviewDatafeedResponse;
import org.elasticsearch.client.ml.PutCalendarRequest; import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutCalendarResponse; import org.elasticsearch.client.ml.PutCalendarResponse;
import org.elasticsearch.client.ml.PutDatafeedRequest; import org.elasticsearch.client.ml.PutDatafeedRequest;
@ -76,8 +78,11 @@ import org.elasticsearch.rest.RestStatus;
import org.junit.After; import org.junit.After;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -564,6 +569,56 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
} }
} }
public void testPreviewDatafeed() throws Exception {
String jobId = "test-preview-datafeed";
String indexName = "preview_data_1";
// Set up the index and docs
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping("doc", "timestamp", "type=date", "total", "type=long");
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
BulkRequest bulk = new BulkRequest();
bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
long now = (System.currentTimeMillis()/1000)*1000;
long thePast = now - 60000;
int i = 0;
List<Integer> totalTotals = new ArrayList<>(60);
while(thePast < now) {
Integer total = randomInt(1000);
IndexRequest doc = new IndexRequest();
doc.index(indexName);
doc.type("doc");
doc.id("id" + i);
doc.source("{\"total\":" + total + ",\"timestamp\":"+ thePast +"}", XContentType.JSON);
bulk.add(doc);
thePast += 1000;
i++;
totalTotals.add(total);
}
highLevelClient().bulk(bulk, RequestOptions.DEFAULT);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
// create the job and the datafeed
Job job = buildJob(jobId);
putJob(job);
openJob(job);
String datafeedId = jobId + "-feed";
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId)
.setIndices(indexName)
.setQueryDelay(TimeValue.timeValueSeconds(1))
.setTypes(Collections.singletonList("doc"))
.setFrequency(TimeValue.timeValueSeconds(1)).build();
machineLearningClient.putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
PreviewDatafeedResponse response = execute(new PreviewDatafeedRequest(datafeedId),
machineLearningClient::previewDatafeed,
machineLearningClient::previewDatafeedAsync);
Integer[] totals = response.getDataList().stream().map(map -> (Integer)map.get("total")).toArray(Integer[]::new);
assertThat(totalTotals, containsInAnyOrder(totals));
}
public void testDeleteForecast() throws Exception { public void testDeleteForecast() throws Exception {
String jobId = "test-delete-forecast"; String jobId = "test-delete-forecast";

View File

@ -65,6 +65,8 @@ import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.OpenJobResponse; import org.elasticsearch.client.ml.OpenJobResponse;
import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PostDataResponse; import org.elasticsearch.client.ml.PostDataResponse;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PreviewDatafeedResponse;
import org.elasticsearch.client.ml.PutCalendarRequest; import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutCalendarResponse; import org.elasticsearch.client.ml.PutCalendarResponse;
import org.elasticsearch.client.ml.PutDatafeedRequest; import org.elasticsearch.client.ml.PutDatafeedRequest;
@ -97,6 +99,7 @@ import org.elasticsearch.client.ml.job.results.Influencer;
import org.elasticsearch.client.ml.job.results.OverallBucket; import org.elasticsearch.client.ml.job.results.OverallBucket;
import org.elasticsearch.client.ml.job.stats.JobStats; import org.elasticsearch.client.ml.job.stats.JobStats;
import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
@ -708,6 +711,66 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
} }
} }
public void testPreviewDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("preview-datafeed-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String datafeedId = job.getId() + "-feed";
String indexName = "preview_data_2";
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping("doc", "timestamp", "type=date", "total", "type=long");
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId())
.setTypes(Arrays.asList("doc"))
.setIndices(indexName)
.build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
{
//tag::preview-datafeed-request
PreviewDatafeedRequest request = new PreviewDatafeedRequest(datafeedId); // <1>
//end::preview-datafeed-request
//tag::preview-datafeed-execute
PreviewDatafeedResponse response = client.machineLearning().previewDatafeed(request, RequestOptions.DEFAULT);
//end::preview-datafeed-execute
//tag::preview-datafeed-response
BytesReference rawPreview = response.getPreview(); // <1>
List<Map<String, Object>> semiParsedPreview = response.getDataList(); // <2>
//end::preview-datafeed-response
assertTrue(semiParsedPreview.isEmpty());
}
{
PreviewDatafeedRequest request = new PreviewDatafeedRequest(datafeedId);
// tag::preview-datafeed-execute-listener
ActionListener<PreviewDatafeedResponse> listener = new ActionListener<PreviewDatafeedResponse>() {
@Override
public void onResponse(PreviewDatafeedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::preview-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::preview-datafeed-execute-async
client.machineLearning().previewDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::preview-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testStartDatafeed() throws Exception { public void testStartDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient(); RestHighLevelClient client = highLevelClient();

View File

@ -0,0 +1,43 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class PreviewDatafeedRequestTests extends AbstractXContentTestCase<PreviewDatafeedRequest> {
@Override
protected PreviewDatafeedRequest createTestInstance() {
return new PreviewDatafeedRequest(DatafeedConfigTests.randomValidDatafeedId());
}
@Override
protected PreviewDatafeedRequest doParseInstance(XContentParser parser) throws IOException {
return PreviewDatafeedRequest.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,99 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.stream.Collectors;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
import static org.hamcrest.Matchers.containsInAnyOrder;
public class PreviewDatafeedResponseTests extends ESTestCase {
protected PreviewDatafeedResponse createTestInstance() throws IOException {
//This is just to create a random object to stand in the place of random data
DatafeedConfig datafeedConfig = DatafeedConfigTests.createRandom();
BytesReference bytes = XContentHelper.toXContent(datafeedConfig, XContentType.JSON, false);
return new PreviewDatafeedResponse(bytes);
}
public void testGetDataList() throws IOException {
String rawData = "[\n" +
" {\n" +
" \"time\": 1454803200000,\n" +
" \"airline\": \"JZA\",\n" +
" \"doc_count\": 5,\n" +
" \"responsetime\": 990.4628295898438\n" +
" },\n" +
" {\n" +
" \"time\": 1454803200000,\n" +
" \"airline\": \"JBU\",\n" +
" \"doc_count\": 23,\n" +
" \"responsetime\": 877.5927124023438\n" +
" },\n" +
" {\n" +
" \"time\": 1454803200000,\n" +
" \"airline\": \"KLM\",\n" +
" \"doc_count\": 42,\n" +
" \"responsetime\": 1355.481201171875\n" +
" }\n" +
"]";
BytesReference bytes = new BytesArray(rawData);
PreviewDatafeedResponse response = new PreviewDatafeedResponse(bytes);
assertThat(response.getDataList()
.stream()
.map(map -> (String)map.get("airline"))
.collect(Collectors.toList()), containsInAnyOrder("JZA", "JBU", "KLM"));
rawData = "{\"key\":\"my_value\"}";
bytes = new BytesArray(rawData);
response = new PreviewDatafeedResponse(bytes);
assertThat(response.getDataList()
.stream()
.map(map -> (String)map.get("key"))
.collect(Collectors.toList()), containsInAnyOrder("my_value"));
}
//Because this is raw a BytesReference, the shuffling done via `AbstractXContentTestCase` is unacceptable and causes equality failures
public void testSerializationDeserialization() throws IOException {
for (int runs = 0; runs < 20; runs++) {
XContentType xContentType = XContentType.JSON;
PreviewDatafeedResponse testInstance = createTestInstance();
BytesReference originalXContent = XContentHelper.toXContent(testInstance, xContentType, false);
XContentParser parser = this.createParser(xContentType.xContent(), originalXContent);
PreviewDatafeedResponse parsed = PreviewDatafeedResponse.fromXContent(parser);
assertEquals(testInstance, parsed);
assertToXContentEquivalent(
XContentHelper.toXContent(testInstance, xContentType, false),
XContentHelper.toXContent(parsed, xContentType, false),
xContentType);
}
}
}

View File

@ -0,0 +1,34 @@
--
:api: preview-datafeed
:request: PreviewDatafeedRequest
:response: PreviewDatafeedResponse
--
[id="{upid}-{api}"]
=== Preview Datafeed API
The Preview Datafeed API provides the ability to preview a {ml} datafeed's data
in the cluster. It accepts a +{request}+ object and responds
with a +{response}+ object.
[id="{upid}-{api}-request"]
==== Preview Datafeed Request
A +{request}+ object is created referencing a non-null `datafeedId`.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
--------------------------------------------------
<1> Constructing a new request referencing an existing `datafeedId`
[id="{upid}-{api}-response"]
==== Preview Datafeed Response
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-response]
--------------------------------------------------
<1> The raw +BytesReference+ of the data preview
<2> A +List<Map<String,Object>>+ that represents the previewed data
include::../execution.asciidoc[]

View File

@ -219,33 +219,36 @@ include::licensing/get-license.asciidoc[]
include::licensing/delete-license.asciidoc[] include::licensing/delete-license.asciidoc[]
== Machine Learning APIs == Machine Learning APIs
:upid: {mainid}-x-pack-ml
:doc-tests-file: {doc-tests}/MlClientDocumentationIT.java
The Java High Level REST Client supports the following Machine Learning APIs: The Java High Level REST Client supports the following Machine Learning APIs:
* <<java-rest-high-x-pack-ml-put-job>> * <<{upid}-put-job>>
* <<java-rest-high-x-pack-ml-get-job>> * <<{upid}-get-job>>
* <<java-rest-high-x-pack-ml-delete-job>> * <<{upid}-delete-job>>
* <<java-rest-high-x-pack-ml-open-job>> * <<{upid}-open-job>>
* <<java-rest-high-x-pack-ml-close-job>> * <<{upid}-close-job>>
* <<java-rest-high-x-pack-ml-flush-job>> * <<{upid}-flush-job>>
* <<java-rest-high-x-pack-ml-update-job>> * <<{upid}-update-job>>
* <<java-rest-high-x-pack-ml-get-job-stats>> * <<{upid}-get-job-stats>>
* <<java-rest-high-x-pack-ml-put-datafeed>> * <<{upid}-put-datafeed>>
* <<java-rest-high-x-pack-ml-get-datafeed>> * <<{upid}-get-datafeed>>
* <<java-rest-high-x-pack-ml-delete-datafeed>> * <<{upid}-delete-datafeed>>
* <<java-rest-high-x-pack-ml-start-datafeed>> * <<{upid}-preview-datafeed>>
* <<java-rest-high-x-pack-ml-stop-datafeed>> * <<{upid}-start-datafeed>>
* <<java-rest-high-x-pack-ml-forecast-job>> * <<{upid}-stop-datafeed>>
* <<java-rest-high-x-pack-ml-delete-forecast>> * <<{upid}-forecast-job>>
* <<java-rest-high-x-pack-ml-get-buckets>> * <<{upid}-delete-forecast>>
* <<java-rest-high-x-pack-ml-get-overall-buckets>> * <<{upid}-get-buckets>>
* <<java-rest-high-x-pack-ml-get-records>> * <<{upid}-get-overall-buckets>>
* <<java-rest-high-x-pack-ml-post-data>> * <<{upid}-get-records>>
* <<java-rest-high-x-pack-ml-get-influencers>> * <<{upid}-post-data>>
* <<java-rest-high-x-pack-ml-get-categories>> * <<{upid}-get-influencers>>
* <<java-rest-high-x-pack-ml-get-calendars>> * <<{upid}-get-categories>>
* <<java-rest-high-x-pack-ml-put-calendar>> * <<{upid}-get-calendars>>
* <<java-rest-high-x-pack-ml-delete-calendar>> * <<{upid}-put-calendar>>
* <<{upid}-delete-calendar>>
include::ml/put-job.asciidoc[] include::ml/put-job.asciidoc[]
include::ml/get-job.asciidoc[] include::ml/get-job.asciidoc[]
@ -257,6 +260,7 @@ include::ml/flush-job.asciidoc[]
include::ml/put-datafeed.asciidoc[] include::ml/put-datafeed.asciidoc[]
include::ml/get-datafeed.asciidoc[] include::ml/get-datafeed.asciidoc[]
include::ml/delete-datafeed.asciidoc[] include::ml/delete-datafeed.asciidoc[]
include::ml/preview-datafeed.asciidoc[]
include::ml/start-datafeed.asciidoc[] include::ml/start-datafeed.asciidoc[]
include::ml/stop-datafeed.asciidoc[] include::ml/stop-datafeed.asciidoc[]
include::ml/get-job-stats.asciidoc[] include::ml/get-job-stats.asciidoc[]