listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(graphExploreRequest, RequestConverters::xPackGraphExplore,
+ options, GraphExploreResponse::fromXContext, listener, emptySet());
+ }
+
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java
index 7178a9c7fc3..6c1cc205701 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java
@@ -20,12 +20,15 @@
package org.elasticsearch.client;
import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.RequestConverters.EndpointBuilder;
import org.elasticsearch.common.Strings;
import org.elasticsearch.protocol.xpack.ml.CloseJobRequest;
import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest;
+import org.elasticsearch.protocol.xpack.ml.GetJobRequest;
+import org.elasticsearch.protocol.xpack.ml.GetBucketsRequest;
import org.elasticsearch.protocol.xpack.ml.OpenJobRequest;
import org.elasticsearch.protocol.xpack.ml.PutJobRequest;
@@ -50,6 +53,23 @@ final class MLRequestConverters {
return request;
}
+ static Request getJob(GetJobRequest getJobRequest) {
+ String endpoint = new EndpointBuilder()
+ .addPathPartAsIs("_xpack")
+ .addPathPartAsIs("ml")
+ .addPathPartAsIs("anomaly_detectors")
+ .addPathPart(Strings.collectionToCommaDelimitedString(getJobRequest.getJobIds()))
+ .build();
+ Request request = new Request(HttpGet.METHOD_NAME, endpoint);
+
+ RequestConverters.Params params = new RequestConverters.Params(request);
+ if (getJobRequest.isAllowNoJobs() != null) {
+ params.putParam("allow_no_jobs", Boolean.toString(getJobRequest.isAllowNoJobs()));
+ }
+
+ return request;
+ }
+
static Request openJob(OpenJobRequest openJobRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
@@ -59,11 +79,11 @@ final class MLRequestConverters {
.addPathPartAsIs("_open")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
- request.setJsonEntity(openJobRequest.toString());
+ request.setEntity(createEntity(openJobRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
- static Request closeJob(CloseJobRequest closeJobRequest) {
+ static Request closeJob(CloseJobRequest closeJobRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
@@ -72,18 +92,7 @@ final class MLRequestConverters {
.addPathPartAsIs("_close")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
-
- RequestConverters.Params params = new RequestConverters.Params(request);
- if (closeJobRequest.isForce() != null) {
- params.putParam("force", Boolean.toString(closeJobRequest.isForce()));
- }
- if (closeJobRequest.isAllowNoJobs() != null) {
- params.putParam("allow_no_jobs", Boolean.toString(closeJobRequest.isAllowNoJobs()));
- }
- if (closeJobRequest.getTimeout() != null) {
- params.putParam("timeout", closeJobRequest.getTimeout().getStringRep());
- }
-
+ request.setEntity(createEntity(closeJobRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
@@ -101,4 +110,18 @@ final class MLRequestConverters {
return request;
}
+
+ static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOException {
+ String endpoint = new EndpointBuilder()
+ .addPathPartAsIs("_xpack")
+ .addPathPartAsIs("ml")
+ .addPathPartAsIs("anomaly_detectors")
+ .addPathPart(getBucketsRequest.getJobId())
+ .addPathPartAsIs("results")
+ .addPathPartAsIs("buckets")
+ .build();
+ Request request = new Request(HttpGet.METHOD_NAME, endpoint);
+ request.setEntity(createEntity(getBucketsRequest, REQUEST_BODY_CONTENT_TYPE));
+ return request;
+ }
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java
index 2073d613ac6..c4dcc1eaffc 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java
@@ -23,6 +23,10 @@ import org.elasticsearch.protocol.xpack.ml.CloseJobRequest;
import org.elasticsearch.protocol.xpack.ml.CloseJobResponse;
import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest;
import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse;
+import org.elasticsearch.protocol.xpack.ml.GetBucketsRequest;
+import org.elasticsearch.protocol.xpack.ml.GetBucketsResponse;
+import org.elasticsearch.protocol.xpack.ml.GetJobRequest;
+import org.elasticsearch.protocol.xpack.ml.GetJobResponse;
import org.elasticsearch.protocol.xpack.ml.OpenJobRequest;
import org.elasticsearch.protocol.xpack.ml.OpenJobResponse;
import org.elasticsearch.protocol.xpack.ml.PutJobRequest;
@@ -52,7 +56,7 @@ public final class MachineLearningClient {
* For additional info
* see ML PUT job documentation
*
- * @param request the PutJobRequest containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings
+ * @param request The PutJobRequest containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return PutJobResponse with enclosed {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} object
* @throws IOException when there is a serialization issue sending the request or receiving the response
@@ -71,7 +75,7 @@ public final class MachineLearningClient {
* For additional info
* see ML PUT job documentation
*
- * @param request the request containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings
+ * @param request The request containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
@@ -84,13 +88,54 @@ public final class MachineLearningClient {
Collections.emptySet());
}
+ /**
+ * Gets one or more Machine Learning job configuration info.
+ *
+ *
+ * For additional info
+ * see
+ *
+ * @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @return {@link GetJobResponse} response object containing
+ * the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} objects and the number of jobs found
+ * @throws IOException when there is a serialization issue sending the request or receiving the response
+ */
+ public GetJobResponse getJob(GetJobRequest request, RequestOptions options) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(request,
+ MLRequestConverters::getJob,
+ options,
+ GetJobResponse::fromXContent,
+ Collections.emptySet());
+ }
+
+ /**
+ * Gets one or more Machine Learning job configuration info, asynchronously.
+ *
+ *
+ * For additional info
+ * see
+ *
+ * @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener Listener to be notified with {@link GetJobResponse} upon request completion
+ */
+ public void getJobAsync(GetJobRequest request, RequestOptions options, ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request,
+ MLRequestConverters::getJob,
+ options,
+ GetJobResponse::fromXContent,
+ listener,
+ Collections.emptySet());
+ }
+
/**
* Deletes the given Machine Learning Job
*
* For additional info
* see ML Delete Job documentation
*
- * @param request the request to delete the job
+ * @param request The request to delete the job
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return action acknowledgement
* @throws IOException when there is a serialization issue sending the request or receiving the response
@@ -109,7 +154,7 @@ public final class MachineLearningClient {
* For additional info
* see ML Delete Job documentation
*
- * @param request the request to delete the job
+ * @param request The request to delete the job
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
@@ -133,7 +178,7 @@ public final class MachineLearningClient {
* For additional info
* see
*
- * @param request request containing job_id and additional optional options
+ * @param request Request containing job_id and additional optional options
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return response containing if the job was successfully opened or not.
* @throws IOException when there is a serialization issue sending the request or receiving the response
@@ -156,7 +201,7 @@ public final class MachineLearningClient {
* For additional info
* see
*
- * @param request request containing job_id and additional optional options
+ * @param request Request containing job_id and additional optional options
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
@@ -174,7 +219,7 @@ public final class MachineLearningClient {
*
* A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results.
*
- * @param request request containing job_ids and additional options. See {@link CloseJobRequest}
+ * @param request Request containing job_ids and additional options. See {@link CloseJobRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return response containing if the job was successfully closed or not.
* @throws IOException when there is a serialization issue sending the request or receiving the response
@@ -192,7 +237,7 @@ public final class MachineLearningClient {
*
* A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results.
*
- * @param request request containing job_ids and additional options. See {@link CloseJobRequest}
+ * @param request Request containing job_ids and additional options. See {@link CloseJobRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
@@ -204,4 +249,40 @@ public final class MachineLearningClient {
listener,
Collections.emptySet());
}
+
+ /**
+ * Gets the buckets for a Machine Learning Job.
+ *
+ * For additional info
+ * see ML GET buckets documentation
+ *
+ * @param request The request
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ */
+ public GetBucketsResponse getBuckets(GetBucketsRequest request, RequestOptions options) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(request,
+ MLRequestConverters::getBuckets,
+ options,
+ GetBucketsResponse::fromXContent,
+ Collections.emptySet());
+ }
+
+ /**
+ * Gets the buckets for a Machine Learning Job, notifies listener once the requested buckets are retrieved.
+ *
+ * For additional info
+ * see ML GET buckets documentation
+ *
+ * @param request The request
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener Listener to be notified upon request completion
+ */
+ public void getBucketsAsync(GetBucketsRequest request, RequestOptions options, ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request,
+ MLRequestConverters::getBuckets,
+ options,
+ GetBucketsResponse::fromXContent,
+ listener,
+ Collections.emptySet());
+ }
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
index a83415d60fd..6133cd003dc 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
@@ -118,6 +118,7 @@ import org.elasticsearch.protocol.xpack.license.PutLicenseRequest;
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
+import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
import org.elasticsearch.script.mustache.SearchTemplateRequest;
@@ -1128,6 +1129,13 @@ final class RequestConverters {
return request;
}
+ static Request xPackGraphExplore(GraphExploreRequest exploreRequest) throws IOException {
+ String endpoint = endpoint(exploreRequest.indices(), exploreRequest.types(), "_xpack/graph/_explore");
+ Request request = new Request(HttpGet.METHOD_NAME, endpoint);
+ request.setEntity(createEntity(exploreRequest, REQUEST_BODY_CONTENT_TYPE));
+ return request;
+ }
+
static Request xPackWatcherPutWatch(PutWatchRequest putWatchRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
index 51953f87540..fc37b9352d1 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
@@ -209,6 +209,7 @@ public class RestHighLevelClient implements Closeable {
private final TasksClient tasksClient = new TasksClient(this);
private final XPackClient xPackClient = new XPackClient(this);
private final WatcherClient watcherClient = new WatcherClient(this);
+ private final GraphClient graphClient = new GraphClient(this);
private final LicenseClient licenseClient = new LicenseClient(this);
private final MigrationClient migrationClient = new MigrationClient(this);
private final MachineLearningClient machineLearningClient = new MachineLearningClient(this);
@@ -325,6 +326,16 @@ public class RestHighLevelClient implements Closeable {
* Watcher APIs on elastic.co for more information.
*/
public WatcherClient watcher() { return watcherClient; }
+
+ /**
+ * Provides methods for accessing the Elastic Licensed Graph explore API that
+ * is shipped with the default distribution of Elasticsearch. All of
+ * these APIs will 404 if run against the OSS distribution of Elasticsearch.
+ *
+ * See the
+ * Graph API on elastic.co for more information.
+ */
+ public GraphClient graph() { return graphClient; }
/**
* Provides methods for accessing the Elastic Licensed Licensing APIs that
@@ -961,6 +972,11 @@ public class RestHighLevelClient implements Closeable {
FieldCapabilitiesResponse::fromXContent, listener, emptySet());
}
+ /**
+ * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation
+ * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}.
+ */
+ @Deprecated
protected final Resp performRequestAndParseEntity(Req request,
CheckedFunction requestConverter,
RequestOptions options,
@@ -970,15 +986,58 @@ public class RestHighLevelClient implements Closeable {
response -> parseEntity(response.getEntity(), entityParser), ignores);
}
+ /**
+ * Defines a helper method for performing a request and then parsing the returned entity using the provided entityParser.
+ */
+ protected final Resp performRequestAndParseEntity(Req request,
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction entityParser,
+ Set ignores) throws IOException {
+ return performRequest(request, requestConverter, options,
+ response -> parseEntity(response.getEntity(), entityParser), ignores);
+ }
+
+ /**
+ * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation
+ * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}.
+ */
+ @Deprecated
protected final Resp performRequest(Req request,
- CheckedFunction requestConverter,
- RequestOptions options,
- CheckedFunction responseConverter,
- Set ignores) throws IOException {
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction responseConverter,
+ Set ignores) throws IOException {
ActionRequestValidationException validationException = request.validate();
- if (validationException != null) {
+ if (validationException != null && validationException.validationErrors().isEmpty() == false) {
throw validationException;
}
+ return internalPerformRequest(request, requestConverter, options, responseConverter, ignores);
+ }
+
+ /**
+ * Defines a helper method for performing a request.
+ */
+ protected final Resp performRequest(Req request,
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction responseConverter,
+ Set ignores) throws IOException {
+ ValidationException validationException = request.validate();
+ if (validationException != null && validationException.validationErrors().isEmpty() == false) {
+ throw validationException;
+ }
+ return internalPerformRequest(request, requestConverter, options, responseConverter, ignores);
+ }
+
+ /**
+ * Provides common functionality for performing a request.
+ */
+ private Resp internalPerformRequest(Req request,
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction responseConverter,
+ Set ignores) throws IOException {
Request req = requestConverter.apply(request);
req.setOptions(options);
Response response;
@@ -1006,25 +1065,75 @@ public class RestHighLevelClient implements Closeable {
}
}
+ /**
+ * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation
+ * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}.
+ */
+ @Deprecated
protected final void performRequestAsyncAndParseEntity(Req request,
- CheckedFunction requestConverter,
- RequestOptions options,
- CheckedFunction entityParser,
- ActionListener listener, Set ignores) {
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction entityParser,
+ ActionListener listener, Set ignores) {
performRequestAsync(request, requestConverter, options,
response -> parseEntity(response.getEntity(), entityParser), listener, ignores);
}
+ /**
+ * Defines a helper method for asynchronously performing a request.
+ */
+ protected final void performRequestAsyncAndParseEntity(Req request,
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction entityParser,
+ ActionListener listener, Set ignores) {
+ performRequestAsync(request, requestConverter, options,
+ response -> parseEntity(response.getEntity(), entityParser), listener, ignores);
+ }
+
+
+ /**
+ * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation
+ * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}.
+ */
+ @Deprecated
protected final void performRequestAsync(Req request,
- CheckedFunction requestConverter,
- RequestOptions options,
- CheckedFunction responseConverter,
- ActionListener listener, Set ignores) {
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction responseConverter,
+ ActionListener listener, Set ignores) {
ActionRequestValidationException validationException = request.validate();
- if (validationException != null) {
+ if (validationException != null && validationException.validationErrors().isEmpty() == false) {
listener.onFailure(validationException);
return;
}
+ internalPerformRequestAsync(request, requestConverter, options, responseConverter, listener, ignores);
+ }
+
+ /**
+ * Defines a helper method for asynchronously performing a request.
+ */
+ protected final void performRequestAsync(Req request,
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction responseConverter,
+ ActionListener listener, Set ignores) {
+ ValidationException validationException = request.validate();
+ if (validationException != null && validationException.validationErrors().isEmpty() == false) {
+ listener.onFailure(validationException);
+ return;
+ }
+ internalPerformRequestAsync(request, requestConverter, options, responseConverter, listener, ignores);
+ }
+
+ /**
+ * Provides common functionality for asynchronously performing a request.
+ */
+ private void internalPerformRequestAsync(Req request,
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction responseConverter,
+ ActionListener listener, Set ignores) {
Request req;
try {
req = requestConverter.apply(request);
@@ -1038,6 +1147,7 @@ public class RestHighLevelClient implements Closeable {
client.performRequestAsync(req, responseListener);
}
+
final ResponseListener wrapResponseListener(CheckedFunction responseConverter,
ActionListener actionListener, Set ignores) {
return new ResponseListener() {
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java
new file mode 100644
index 00000000000..2efff4d3663
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client;
+
+/**
+ * Defines a validation layer for Requests.
+ */
+public interface Validatable {
+ ValidationException EMPTY_VALIDATION = new ValidationException() {
+ @Override
+ public void addValidationError(String error) {
+ throw new UnsupportedOperationException("Validation messages should not be added to the empty validation");
+ }
+ };
+
+ /**
+ * Perform validation. This method does not have to be overridden in the event that no validation needs to be done.
+ *
+ * @return potentially null, in the event of older actions, an empty {@link ValidationException} in newer actions, or finally a
+ * {@link ValidationException} that contains a list of all failed validation.
+ */
+ default ValidationException validate() {
+ return EMPTY_VALIDATION;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java
new file mode 100644
index 00000000000..6b5d738d675
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Encapsulates an accumulation of validation errors
+ */
+public class ValidationException extends IllegalArgumentException {
+ private final List validationErrors = new ArrayList<>();
+
+ /**
+ * Add a new validation error to the accumulating validation errors
+ * @param error the error to add
+ */
+ public void addValidationError(String error) {
+ validationErrors.add(error);
+ }
+
+ /**
+ * Returns the validation errors accumulated
+ */
+ public final List validationErrors() {
+ return validationErrors;
+ }
+
+ @Override
+ public final String getMessage() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("Validation Failed: ");
+ int index = 0;
+ for (String error : validationErrors) {
+ sb.append(++index).append(": ").append(error).append(";");
+ }
+ return sb.toString();
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphIT.java
new file mode 100644
index 00000000000..4376b47d737
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphIT.java
@@ -0,0 +1,139 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client;
+
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.methods.HttpPut;
+import org.elasticsearch.action.ShardOperationFailedException;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.TermQueryBuilder;
+import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
+import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse;
+import org.elasticsearch.protocol.xpack.graph.Hop;
+import org.elasticsearch.protocol.xpack.graph.Vertex;
+import org.elasticsearch.protocol.xpack.graph.VertexRequest;
+import org.hamcrest.Matchers;
+import org.junit.Before;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+public class GraphIT extends ESRestHighLevelClientTestCase {
+
+ @Before
+ public void indexDocuments() throws IOException {
+ // Create chain of doc IDs across indices 1->2->3
+ Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/type/1");
+ doc1.setJsonEntity("{ \"num\":[1], \"const\":\"start\"}");
+ client().performRequest(doc1);
+
+ Request doc2 = new Request(HttpPut.METHOD_NAME, "/index2/type/1");
+ doc2.setJsonEntity("{\"num\":[1,2], \"const\":\"foo\"}");
+ client().performRequest(doc2);
+
+ Request doc3 = new Request(HttpPut.METHOD_NAME, "/index2/type/2");
+ doc3.setJsonEntity("{\"num\":[2,3], \"const\":\"foo\"}");
+ client().performRequest(doc3);
+
+ Request doc4 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/type/2");
+ doc4.setJsonEntity("{\"num\":\"string\", \"const\":\"foo\"}");
+ client().performRequest(doc4);
+
+ Request doc5 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/type/2");
+ doc5.setJsonEntity("{\"num\":[2,4], \"const\":\"foo\"}");
+ client().performRequest(doc5);
+
+
+ client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh"));
+ }
+
+ public void testCleanExplore() throws Exception {
+ GraphExploreRequest graphExploreRequest = new GraphExploreRequest();
+ graphExploreRequest.indices("index1", "index2");
+ graphExploreRequest.useSignificance(false);
+ int numHops = 3;
+ for (int i = 0; i < numHops; i++) {
+ QueryBuilder guidingQuery = null;
+ if (i == 0) {
+ guidingQuery = new TermQueryBuilder("const.keyword", "start");
+ } else if (randomBoolean()){
+ guidingQuery = new TermQueryBuilder("const.keyword", "foo");
+ }
+ Hop hop = graphExploreRequest.createNextHop(guidingQuery);
+ VertexRequest vr = hop.addVertexRequest("num");
+ vr.minDocCount(1);
+ }
+ Map expectedTermsAndDepths = new HashMap<>();
+ expectedTermsAndDepths.put("1", 0);
+ expectedTermsAndDepths.put("2", 1);
+ expectedTermsAndDepths.put("3", 2);
+
+ GraphExploreResponse exploreResponse = highLevelClient().graph().explore(graphExploreRequest, RequestOptions.DEFAULT);
+ Map actualTermsAndDepths = new HashMap<>();
+ Collection v = exploreResponse.getVertices();
+ for (Vertex vertex : v) {
+ actualTermsAndDepths.put(vertex.getTerm(), vertex.getHopDepth());
+ }
+ assertEquals(expectedTermsAndDepths, actualTermsAndDepths);
+ assertThat(exploreResponse.isTimedOut(), Matchers.is(false));
+ ShardOperationFailedException[] failures = exploreResponse.getShardFailures();
+ assertThat(failures.length, Matchers.equalTo(0));
+
+ }
+
+ public void testBadExplore() throws Exception {
+ //Explore indices where lack of fielddata=true on one index leads to partial failures
+ GraphExploreRequest graphExploreRequest = new GraphExploreRequest();
+ graphExploreRequest.indices("index1", "index2", "index_no_field_data");
+ graphExploreRequest.useSignificance(false);
+ int numHops = 3;
+ for (int i = 0; i < numHops; i++) {
+ QueryBuilder guidingQuery = null;
+ if (i == 0) {
+ guidingQuery = new TermQueryBuilder("const.keyword", "start");
+ } else if (randomBoolean()){
+ guidingQuery = new TermQueryBuilder("const.keyword", "foo");
+ }
+ Hop hop = graphExploreRequest.createNextHop(guidingQuery);
+ VertexRequest vr = hop.addVertexRequest("num");
+ vr.minDocCount(1);
+ }
+ Map expectedTermsAndDepths = new HashMap<>();
+ expectedTermsAndDepths.put("1", 0);
+ expectedTermsAndDepths.put("2", 1);
+ expectedTermsAndDepths.put("3", 2);
+
+ GraphExploreResponse exploreResponse = highLevelClient().graph().explore(graphExploreRequest, RequestOptions.DEFAULT);
+ Map actualTermsAndDepths = new HashMap<>();
+ Collection v = exploreResponse.getVertices();
+ for (Vertex vertex : v) {
+ actualTermsAndDepths.put(vertex.getTerm(), vertex.getHopDepth());
+ }
+ assertEquals(expectedTermsAndDepths, actualTermsAndDepths);
+ assertThat(exploreResponse.isTimedOut(), Matchers.is(false));
+ ShardOperationFailedException[] failures = exploreResponse.getShardFailures();
+ assertThat(failures.length, Matchers.equalTo(1));
+ assertTrue(failures[0].reason().contains("Fielddata is disabled"));
+
+ }
+
+
+}
\ No newline at end of file
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
index a313b99a54f..9065cda9cd6 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
@@ -20,17 +20,22 @@
package org.elasticsearch.client;
import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.protocol.xpack.ml.CloseJobRequest;
import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest;
+import org.elasticsearch.protocol.xpack.ml.GetBucketsRequest;
+import org.elasticsearch.protocol.xpack.ml.GetJobRequest;
import org.elasticsearch.protocol.xpack.ml.OpenJobRequest;
import org.elasticsearch.protocol.xpack.ml.PutJobRequest;
import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig;
import org.elasticsearch.protocol.xpack.ml.job.config.Detector;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
+import org.elasticsearch.protocol.xpack.ml.job.util.PageParams;
import org.elasticsearch.test.ESTestCase;
import java.io.ByteArrayOutputStream;
@@ -47,6 +52,7 @@ public class MLRequestConvertersTests extends ESTestCase {
Request request = MLRequestConverters.putJob(putJobRequest);
+ assertEquals(HttpPut.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_xpack/ml/anomaly_detectors/foo"));
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
Job parsedJob = Job.PARSER.apply(parser, null).build();
@@ -54,6 +60,23 @@ public class MLRequestConvertersTests extends ESTestCase {
}
}
+ public void testGetJob() {
+ GetJobRequest getJobRequest = new GetJobRequest();
+
+ Request request = MLRequestConverters.getJob(getJobRequest);
+
+ assertEquals(HttpGet.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors", request.getEndpoint());
+ assertFalse(request.getParameters().containsKey("allow_no_jobs"));
+
+ getJobRequest = new GetJobRequest("job1", "jobs*");
+ getJobRequest.setAllowNoJobs(true);
+ request = MLRequestConverters.getJob(getJobRequest);
+
+ assertEquals("/_xpack/ml/anomaly_detectors/job1,jobs*", request.getEndpoint());
+ assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_jobs"));
+ }
+
public void testOpenJob() throws Exception {
String jobId = "some-job-id";
OpenJobRequest openJobRequest = new OpenJobRequest(jobId);
@@ -62,21 +85,17 @@ public class MLRequestConvertersTests extends ESTestCase {
Request request = MLRequestConverters.openJob(openJobRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_open", request.getEndpoint());
- ByteArrayOutputStream bos = new ByteArrayOutputStream();
- request.getEntity().writeTo(bos);
- assertEquals(bos.toString("UTF-8"), "{\"job_id\":\""+ jobId +"\",\"timeout\":\"10m\"}");
+ assertEquals(requestEntityToString(request), "{\"job_id\":\""+ jobId +"\",\"timeout\":\"10m\"}");
}
- public void testCloseJob() {
+ public void testCloseJob() throws Exception {
String jobId = "somejobid";
CloseJobRequest closeJobRequest = new CloseJobRequest(jobId);
Request request = MLRequestConverters.closeJob(closeJobRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_close", request.getEndpoint());
- assertFalse(request.getParameters().containsKey("force"));
- assertFalse(request.getParameters().containsKey("allow_no_jobs"));
- assertFalse(request.getParameters().containsKey("timeout"));
+ assertEquals("{\"job_id\":\"somejobid\"}", requestEntityToString(request));
closeJobRequest = new CloseJobRequest(jobId, "otherjobs*");
closeJobRequest.setForce(true);
@@ -85,9 +104,8 @@ public class MLRequestConvertersTests extends ESTestCase {
request = MLRequestConverters.closeJob(closeJobRequest);
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + ",otherjobs*/_close", request.getEndpoint());
- assertEquals(Boolean.toString(true), request.getParameters().get("force"));
- assertEquals(Boolean.toString(false), request.getParameters().get("allow_no_jobs"));
- assertEquals("10m", request.getParameters().get("timeout"));
+ assertEquals("{\"job_id\":\"somejobid,otherjobs*\",\"timeout\":\"10m\",\"force\":true,\"allow_no_jobs\":false}",
+ requestEntityToString(request));
}
public void testDeleteJob() {
@@ -104,6 +122,23 @@ public class MLRequestConvertersTests extends ESTestCase {
assertEquals(Boolean.toString(true), request.getParameters().get("force"));
}
+ public void testGetBuckets() throws IOException {
+ String jobId = randomAlphaOfLength(10);
+ GetBucketsRequest getBucketsRequest = new GetBucketsRequest(jobId);
+ getBucketsRequest.setPageParams(new PageParams(100, 300));
+ getBucketsRequest.setAnomalyScore(75.0);
+ getBucketsRequest.setSort("anomaly_score");
+ getBucketsRequest.setDescending(true);
+
+ Request request = MLRequestConverters.getBuckets(getBucketsRequest);
+ assertEquals(HttpGet.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/results/buckets", request.getEndpoint());
+ try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
+ GetBucketsRequest parsedRequest = GetBucketsRequest.PARSER.apply(parser, null);
+ assertThat(parsedRequest, equalTo(getBucketsRequest));
+ }
+ }
+
private static Job createValidJob(String jobId) {
AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList(
Detector.builder().setFunction("count").build()));
@@ -111,4 +146,10 @@ public class MLRequestConvertersTests extends ESTestCase {
jobBuilder.setAnalysisConfig(analysisConfig);
return jobBuilder.build();
}
+
+ private static String requestEntityToString(Request request) throws Exception {
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ request.getEntity().writeTo(bos);
+ return bos.toString("UTF-8");
+ }
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java
new file mode 100644
index 00000000000..a4f83c347ad
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java
@@ -0,0 +1,217 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client;
+
+import org.elasticsearch.action.bulk.BulkRequest;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.support.WriteRequest;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.protocol.xpack.ml.GetBucketsRequest;
+import org.elasticsearch.protocol.xpack.ml.GetBucketsResponse;
+import org.elasticsearch.protocol.xpack.ml.PutJobRequest;
+import org.elasticsearch.protocol.xpack.ml.job.config.Job;
+import org.elasticsearch.protocol.xpack.ml.job.results.Bucket;
+import org.elasticsearch.protocol.xpack.ml.job.util.PageParams;
+import org.junit.After;
+import org.junit.Before;
+
+import java.io.IOException;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.lessThanOrEqualTo;
+
+public class MachineLearningGetResultsIT extends ESRestHighLevelClientTestCase {
+
+ private static final String RESULTS_INDEX = ".ml-anomalies-shared";
+ private static final String DOC = "doc";
+
+ private static final String JOB_ID = "get-results-it-job";
+
+ // 2018-08-01T00:00:00Z
+ private static final long START_TIME_EPOCH_MS = 1533081600000L;
+
+ private BucketStats bucketStats = new BucketStats();
+
+ @Before
+ public void createJobAndIndexResults() throws IOException {
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+ Job job = MachineLearningIT.buildJob(JOB_ID);
+ machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
+
+ BulkRequest bulkRequest = new BulkRequest();
+ bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
+
+ long time = START_TIME_EPOCH_MS;
+ long endTime = time + 3600000L * 24 * 10; // 10 days of hourly buckets
+ while (time < endTime) {
+ addBucketIndexRequest(time, false, bulkRequest);
+ addRecordIndexRequests(time, false, bulkRequest);
+ time += 3600000L;
+ }
+
+ // Also index an interim bucket
+ addBucketIndexRequest(time, true, bulkRequest);
+ addRecordIndexRequests(time, true, bulkRequest);
+
+ highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
+ }
+
+ private void addBucketIndexRequest(long timestamp, boolean isInterim, BulkRequest bulkRequest) {
+ IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC);
+ double bucketScore = randomDoubleBetween(0.0, 100.0, true);
+ bucketStats.report(bucketScore);
+ indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"result_type\":\"bucket\", \"timestamp\": " + timestamp + "," +
+ "\"bucket_span\": 3600,\"is_interim\": " + isInterim + ", \"anomaly_score\": " + bucketScore +
+ ", \"bucket_influencers\":[{\"job_id\": \"" + JOB_ID + "\", \"result_type\":\"bucket_influencer\", " +
+ "\"influencer_field_name\": \"bucket_time\", \"timestamp\": " + timestamp + ", \"bucket_span\": 3600, " +
+ "\"is_interim\": " + isInterim + "}]}", XContentType.JSON);
+ bulkRequest.add(indexRequest);
+ }
+
+ private void addRecordIndexRequests(long timestamp, boolean isInterim, BulkRequest bulkRequest) {
+ if (randomBoolean()) {
+ return;
+ }
+ int recordCount = randomIntBetween(1, 3);
+ for (int i = 0; i < recordCount; ++i) {
+ IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC);
+ double recordScore = randomDoubleBetween(0.0, 100.0, true);
+ double p = randomDoubleBetween(0.0, 0.05, false);
+ indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"result_type\":\"record\", \"timestamp\": " + timestamp + "," +
+ "\"bucket_span\": 3600,\"is_interim\": " + isInterim + ", \"record_score\": " + recordScore + ", \"probability\": "
+ + p + "}", XContentType.JSON);
+ bulkRequest.add(indexRequest);
+ }
+ }
+
+ @After
+ public void deleteJob() throws IOException {
+ new MlRestTestStateCleaner(logger, client()).clearMlMetadata();
+ }
+
+ public void testGetBuckets() throws IOException {
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ assertThat(response.count(), equalTo(241L));
+ assertThat(response.buckets().size(), equalTo(100));
+ assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS));
+ }
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+ request.setTimestamp("1533081600000");
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ assertThat(response.count(), equalTo(1L));
+ assertThat(response.buckets().size(), equalTo(1));
+ assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS));
+ }
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+ request.setAnomalyScore(75.0);
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ assertThat(response.count(), equalTo(bucketStats.criticalCount));
+ assertThat(response.buckets().size(), equalTo((int) Math.min(100, bucketStats.criticalCount)));
+ assertThat(response.buckets().stream().anyMatch(b -> b.getAnomalyScore() < 75.0), is(false));
+ }
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+ request.setExcludeInterim(true);
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ assertThat(response.count(), equalTo(240L));
+ }
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+ request.setStart("1533081600000");
+ request.setEnd("1533092400000");
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ assertThat(response.count(), equalTo(3L));
+ assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS));
+ assertThat(response.buckets().get(1).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 3600000L));
+ assertThat(response.buckets().get(2).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 2 * + 3600000L));
+ }
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+ request.setPageParams(new PageParams(3, 3));
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ assertThat(response.buckets().size(), equalTo(3));
+ assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 3 * 3600000L));
+ assertThat(response.buckets().get(1).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 4 * 3600000L));
+ assertThat(response.buckets().get(2).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 5 * 3600000L));
+ }
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+ request.setSort("anomaly_score");
+ request.setDescending(true);
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ double previousScore = 100.0;
+ for (Bucket bucket : response.buckets()) {
+ assertThat(bucket.getAnomalyScore(), lessThanOrEqualTo(previousScore));
+ previousScore = bucket.getAnomalyScore();
+ }
+ }
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+ // Make sure we get all buckets
+ request.setPageParams(new PageParams(0, 10000));
+ request.setExpand(true);
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ assertThat(response.buckets().stream().anyMatch(b -> b.getRecords().size() > 0), is(true));
+ }
+ }
+
+ private static class BucketStats {
+ // score < 50.0
+ private long minorCount;
+
+ // score < 75.0
+ private long majorCount;
+
+ // score > 75.0
+ private long criticalCount;
+
+ private void report(double anomalyScore) {
+ if (anomalyScore < 50.0) {
+ minorCount++;
+ } else if (anomalyScore < 75.0) {
+ majorCount++;
+ } else {
+ criticalCount++;
+ }
+ }
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
index 6c4fa6e4514..cec5dd7ccf8 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
@@ -19,12 +19,13 @@
package org.elasticsearch.client;
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
-import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.protocol.xpack.ml.CloseJobRequest;
import org.elasticsearch.protocol.xpack.ml.CloseJobResponse;
import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest;
import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse;
+import org.elasticsearch.protocol.xpack.ml.GetJobRequest;
+import org.elasticsearch.protocol.xpack.ml.GetJobResponse;
import org.elasticsearch.protocol.xpack.ml.OpenJobRequest;
import org.elasticsearch.protocol.xpack.ml.OpenJobResponse;
import org.elasticsearch.protocol.xpack.ml.PutJobRequest;
@@ -33,15 +34,25 @@ import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig;
import org.elasticsearch.protocol.xpack.ml.job.config.DataDescription;
import org.elasticsearch.protocol.xpack.ml.job.config.Detector;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
+import org.junit.After;
+import java.io.IOException;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+import static org.hamcrest.CoreMatchers.hasItems;
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
-@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32993")
public class MachineLearningIT extends ESRestHighLevelClientTestCase {
+ @After
+ public void cleanUp() throws IOException {
+ new MlRestTestStateCleaner(logger, client()).clearMlMetadata();
+ }
+
public void testPutJob() throws Exception {
String jobId = randomValidJobId();
Job job = buildJob(jobId);
@@ -54,6 +65,41 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
assertThat(createdJob.getJobType(), is(Job.ANOMALY_DETECTOR_JOB_TYPE));
}
+ public void testGetJob() throws Exception {
+ String jobId1 = randomValidJobId();
+ String jobId2 = randomValidJobId();
+
+ Job job1 = buildJob(jobId1);
+ Job job2 = buildJob(jobId2);
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+ machineLearningClient.putJob(new PutJobRequest(job1), RequestOptions.DEFAULT);
+ machineLearningClient.putJob(new PutJobRequest(job2), RequestOptions.DEFAULT);
+
+ GetJobRequest request = new GetJobRequest(jobId1, jobId2);
+
+ // Test getting specific jobs
+ GetJobResponse response = execute(request, machineLearningClient::getJob, machineLearningClient::getJobAsync);
+
+ assertEquals(2, response.count());
+ assertThat(response.jobs(), hasSize(2));
+ assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), containsInAnyOrder(jobId1, jobId2));
+
+ // Test getting all jobs explicitly
+ request = GetJobRequest.getAllJobsRequest();
+ response = execute(request, machineLearningClient::getJob, machineLearningClient::getJobAsync);
+
+ assertTrue(response.count() >= 2L);
+ assertTrue(response.jobs().size() >= 2L);
+ assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
+
+ // Test getting all jobs implicitly
+ response = execute(new GetJobRequest(), machineLearningClient::getJob, machineLearningClient::getJobAsync);
+
+ assertTrue(response.count() >= 2L);
+ assertTrue(response.jobs().size() >= 2L);
+ assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
+ }
+
public void testDeleteJob() throws Exception {
String jobId = randomValidJobId();
Job job = buildJob(jobId);
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java
new file mode 100644
index 00000000000..7ad86576245
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client;
+
+import org.apache.logging.log4j.Logger;
+import org.elasticsearch.common.xcontent.support.XContentMapValues;
+import org.elasticsearch.test.rest.ESRestTestCase;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * This is temporarily duplicated from the server side.
+ * @TODO Replace with an implementation using the HLRC once
+ * the APIs for managing datafeeds are implemented.
+ */
+public class MlRestTestStateCleaner {
+
+ private final Logger logger;
+ private final RestClient adminClient;
+
+ public MlRestTestStateCleaner(Logger logger, RestClient adminClient) {
+ this.logger = logger;
+ this.adminClient = adminClient;
+ }
+
+ public void clearMlMetadata() throws IOException {
+ deleteAllDatafeeds();
+ deleteAllJobs();
+ // indices will be deleted by the ESRestTestCase class
+ }
+
+ @SuppressWarnings("unchecked")
+ private void deleteAllDatafeeds() throws IOException {
+ final Request datafeedsRequest = new Request("GET", "/_xpack/ml/datafeeds");
+ datafeedsRequest.addParameter("filter_path", "datafeeds");
+ final Response datafeedsResponse = adminClient.performRequest(datafeedsRequest);
+ final List