diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java
index 30e79d1dce2..8a04c229de2 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java
@@ -28,10 +28,12 @@ import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetJobRequest;
+import org.elasticsearch.client.ml.GetJobStatsRequest;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.PutJobRequest;
import org.elasticsearch.common.Strings;
+import org.elasticsearch.client.ml.FlushJobRequest;
import java.io.IOException;
@@ -126,6 +128,36 @@ final class MLRequestConverters {
return request;
}
+ static Request flushJob(FlushJobRequest flushJobRequest) throws IOException {
+ String endpoint = new EndpointBuilder()
+ .addPathPartAsIs("_xpack")
+ .addPathPartAsIs("ml")
+ .addPathPartAsIs("anomaly_detectors")
+ .addPathPart(flushJobRequest.getJobId())
+ .addPathPartAsIs("_flush")
+ .build();
+ Request request = new Request(HttpPost.METHOD_NAME, endpoint);
+ request.setEntity(createEntity(flushJobRequest, REQUEST_BODY_CONTENT_TYPE));
+ return request;
+ }
+
+ static Request getJobStats(GetJobStatsRequest getJobStatsRequest) {
+ String endpoint = new EndpointBuilder()
+ .addPathPartAsIs("_xpack")
+ .addPathPartAsIs("ml")
+ .addPathPartAsIs("anomaly_detectors")
+ .addPathPart(Strings.collectionToCommaDelimitedString(getJobStatsRequest.getJobIds()))
+ .addPathPartAsIs("_stats")
+ .build();
+ Request request = new Request(HttpGet.METHOD_NAME, endpoint);
+
+ RequestConverters.Params params = new RequestConverters.Params(request);
+ if (getJobStatsRequest.isAllowNoJobs() != null) {
+ params.putParam("allow_no_jobs", Boolean.toString(getJobStatsRequest.isAllowNoJobs()));
+ }
+ return request;
+ }
+
static Request getRecords(GetRecordsRequest getRecordsRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java
index a972f760d2f..ac44f16b80b 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java
@@ -19,6 +19,11 @@
package org.elasticsearch.client;
import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.client.ml.FlushJobRequest;
+import org.elasticsearch.client.ml.FlushJobResponse;
+import org.elasticsearch.client.ml.GetJobStatsRequest;
+import org.elasticsearch.client.ml.GetJobStatsResponse;
+import org.elasticsearch.client.ml.job.stats.JobStats;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.CloseJobResponse;
import org.elasticsearch.client.ml.DeleteJobRequest;
@@ -288,6 +293,101 @@ public final class MachineLearningClient {
Collections.emptySet());
}
+ /**
+ * Flushes internally buffered data for the given Machine Learning Job ensuring all data sent to the has been processed.
+ * This may cause new results to be calculated depending on the contents of the buffer
+ *
+ * Both flush and close operations are similar,
+ * however the flush is more efficient if you are expecting to send more data for analysis.
+ *
+ * When flushing, the job remains open and is available to continue analyzing data.
+ * A close operation additionally prunes and persists the model state to disk and the
+ * job must be opened again before analyzing further data.
+ *
+ *
+ * For additional info
+ * see Flush ML job documentation
+ *
+ * @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ */
+ public FlushJobResponse flushJob(FlushJobRequest request, RequestOptions options) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(request,
+ MLRequestConverters::flushJob,
+ options,
+ FlushJobResponse::fromXContent,
+ Collections.emptySet());
+ }
+
+ /**
+ * Flushes internally buffered data for the given Machine Learning Job asynchronously ensuring all data sent to the has been processed.
+ * This may cause new results to be calculated depending on the contents of the buffer
+ *
+ * Both flush and close operations are similar,
+ * however the flush is more efficient if you are expecting to send more data for analysis.
+ *
+ * When flushing, the job remains open and is available to continue analyzing data.
+ * A close operation additionally prunes and persists the model state to disk and the
+ * job must be opened again before analyzing further data.
+ *
+ *
+ * For additional info
+ * see Flush ML job documentation
+ *
+ * @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener Listener to be notified upon request completion
+ */
+ public void flushJobAsync(FlushJobRequest request, RequestOptions options, ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request,
+ MLRequestConverters::flushJob,
+ options,
+ FlushJobResponse::fromXContent,
+ listener,
+ Collections.emptySet());
+ }
+
+ /**
+ * Gets usage statistics for one or more Machine Learning jobs
+ *
+ *
+ * For additional info
+ * see Get Job stats docs
+ *
+ * @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @return {@link GetJobStatsResponse} response object containing
+ * the {@link JobStats} objects and the number of jobs found
+ * @throws IOException when there is a serialization issue sending the request or receiving the response
+ */
+ public GetJobStatsResponse getJobStats(GetJobStatsRequest request, RequestOptions options) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(request,
+ MLRequestConverters::getJobStats,
+ options,
+ GetJobStatsResponse::fromXContent,
+ Collections.emptySet());
+ }
+
+ /**
+ * Gets one or more Machine Learning job configuration info, asynchronously.
+ *
+ *
+ * For additional info
+ * see Get Job stats docs
+ *
+ * @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener Listener to be notified with {@link GetJobStatsResponse} upon request completion
+ */
+ public void getJobStatsAsync(GetJobStatsRequest request, RequestOptions options, ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request,
+ MLRequestConverters::getJobStats,
+ options,
+ GetJobStatsResponse::fromXContent,
+ listener,
+ Collections.emptySet());
+ }
+
/**
* Gets the records for a Machine Learning Job.
*
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java
new file mode 100644
index 00000000000..067851d4526
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java
@@ -0,0 +1,195 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * Request object to flush a given Machine Learning job.
+ */
+public class FlushJobRequest extends ActionRequest implements ToXContentObject {
+
+ public static final ParseField CALC_INTERIM = new ParseField("calc_interim");
+ public static final ParseField START = new ParseField("start");
+ public static final ParseField END = new ParseField("end");
+ public static final ParseField ADVANCE_TIME = new ParseField("advance_time");
+ public static final ParseField SKIP_TIME = new ParseField("skip_time");
+
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("flush_job_request", (a) -> new FlushJobRequest((String) a[0]));
+
+ static {
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
+ PARSER.declareBoolean(FlushJobRequest::setCalcInterim, CALC_INTERIM);
+ PARSER.declareString(FlushJobRequest::setStart, START);
+ PARSER.declareString(FlushJobRequest::setEnd, END);
+ PARSER.declareString(FlushJobRequest::setAdvanceTime, ADVANCE_TIME);
+ PARSER.declareString(FlushJobRequest::setSkipTime, SKIP_TIME);
+ }
+
+ private final String jobId;
+ private Boolean calcInterim;
+ private String start;
+ private String end;
+ private String advanceTime;
+ private String skipTime;
+
+ /**
+ * Create new Flush job request
+ *
+ * @param jobId The job ID of the job to flush
+ */
+ public FlushJobRequest(String jobId) {
+ this.jobId = jobId;
+ }
+
+ public String getJobId() {
+ return jobId;
+ }
+
+ public boolean getCalcInterim() {
+ return calcInterim;
+ }
+
+ /**
+ * When {@code true} calculates the interim results for the most recent bucket or all buckets within the latency period.
+ *
+ * @param calcInterim defaults to {@code false}.
+ */
+ public void setCalcInterim(boolean calcInterim) {
+ this.calcInterim = calcInterim;
+ }
+
+ public String getStart() {
+ return start;
+ }
+
+ /**
+ * When used in conjunction with {@link FlushJobRequest#calcInterim},
+ * specifies the start of the range of buckets on which to calculate interim results.
+ *
+ * @param start the beginning of the range of buckets; may be an epoch seconds, epoch millis or an ISO string
+ */
+ public void setStart(String start) {
+ this.start = start;
+ }
+
+ public String getEnd() {
+ return end;
+ }
+
+ /**
+ * When used in conjunction with {@link FlushJobRequest#calcInterim}, specifies the end of the range
+ * of buckets on which to calculate interim results
+ *
+ * @param end the end of the range of buckets; may be an epoch seconds, epoch millis or an ISO string
+ */
+ public void setEnd(String end) {
+ this.end = end;
+ }
+
+ public String getAdvanceTime() {
+ return advanceTime;
+ }
+
+ /**
+ * Specifies to advance to a particular time value.
+ * Results are generated and the model is updated for data from the specified time interval.
+ *
+ * @param advanceTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
+ */
+ public void setAdvanceTime(String advanceTime) {
+ this.advanceTime = advanceTime;
+ }
+
+ public String getSkipTime() {
+ return skipTime;
+ }
+
+ /**
+ * Specifies to skip to a particular time value.
+ * Results are not generated and the model is not updated for data from the specified time interval.
+ *
+ * @param skipTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
+ */
+ public void setSkipTime(String skipTime) {
+ this.skipTime = skipTime;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobId, calcInterim, start, end, advanceTime, skipTime);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ FlushJobRequest other = (FlushJobRequest) obj;
+ return Objects.equals(jobId, other.jobId) &&
+ calcInterim == other.calcInterim &&
+ Objects.equals(start, other.start) &&
+ Objects.equals(end, other.end) &&
+ Objects.equals(advanceTime, other.advanceTime) &&
+ Objects.equals(skipTime, other.skipTime);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(Job.ID.getPreferredName(), jobId);
+ if (calcInterim != null) {
+ builder.field(CALC_INTERIM.getPreferredName(), calcInterim);
+ }
+ if (start != null) {
+ builder.field(START.getPreferredName(), start);
+ }
+ if (end != null) {
+ builder.field(END.getPreferredName(), end);
+ }
+ if (advanceTime != null) {
+ builder.field(ADVANCE_TIME.getPreferredName(), advanceTime);
+ }
+ if (skipTime != null) {
+ builder.field(SKIP_TIME.getPreferredName(), skipTime);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java
new file mode 100644
index 00000000000..048b07b504a
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.Date;
+import java.util.Objects;
+
+/**
+ * Response object containing flush acknowledgement and additional data
+ */
+public class FlushJobResponse extends ActionResponse implements ToXContentObject {
+
+ public static final ParseField FLUSHED = new ParseField("flushed");
+ public static final ParseField LAST_FINALIZED_BUCKET_END = new ParseField("last_finalized_bucket_end");
+
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("flush_job_response",
+ true,
+ (a) -> {
+ boolean flushed = (boolean) a[0];
+ Date date = a[1] == null ? null : new Date((long) a[1]);
+ return new FlushJobResponse(flushed, date);
+ });
+
+ static {
+ PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), FLUSHED);
+ PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_FINALIZED_BUCKET_END);
+ }
+
+ public static FlushJobResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
+ private final boolean flushed;
+ private final Date lastFinalizedBucketEnd;
+
+ public FlushJobResponse(boolean flushed, @Nullable Date lastFinalizedBucketEnd) {
+ this.flushed = flushed;
+ this.lastFinalizedBucketEnd = lastFinalizedBucketEnd;
+ }
+
+ /**
+ * Was the job successfully flushed or not
+ */
+ public boolean isFlushed() {
+ return flushed;
+ }
+
+ /**
+ * Provides the timestamp (in milliseconds-since-the-epoch) of the end of the last bucket that was processed.
+ */
+ @Nullable
+ public Date getLastFinalizedBucketEnd() {
+ return lastFinalizedBucketEnd;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(flushed, lastFinalizedBucketEnd);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+
+ FlushJobResponse that = (FlushJobResponse) other;
+ return that.flushed == flushed && Objects.equals(lastFinalizedBucketEnd, that.lastFinalizedBucketEnd);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(FLUSHED.getPreferredName(), flushed);
+ if (lastFinalizedBucketEnd != null) {
+ builder.timeField(LAST_FINALIZED_BUCKET_END.getPreferredName(),
+ LAST_FINALIZED_BUCKET_END.getPreferredName() + "_string", lastFinalizedBucketEnd.getTime());
+ }
+ builder.endObject();
+ return builder;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java
new file mode 100644
index 00000000000..d8eb350755d
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java
@@ -0,0 +1,146 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+
+
+/**
+ * Request object to get {@link org.elasticsearch.client.ml.job.stats.JobStats} by their respective jobIds
+ *
+ * `_all` explicitly gets all the jobs' statistics in the cluster
+ * An empty request (no `jobId`s) implicitly gets all the jobs' statistics in the cluster
+ */
+public class GetJobStatsRequest extends ActionRequest implements ToXContentObject {
+
+ public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "get_jobs_stats_request", a -> new GetJobStatsRequest((List) a[0]));
+
+ static {
+ PARSER.declareField(ConstructingObjectParser.constructorArg(),
+ p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())),
+ Job.ID, ObjectParser.ValueType.STRING_ARRAY);
+ PARSER.declareBoolean(GetJobStatsRequest::setAllowNoJobs, ALLOW_NO_JOBS);
+ }
+
+ private static final String ALL_JOBS = "_all";
+
+ private final List jobIds;
+ private Boolean allowNoJobs;
+
+ /**
+ * Explicitly gets all jobs statistics
+ *
+ * @return a {@link GetJobStatsRequest} for all existing jobs
+ */
+ public static GetJobStatsRequest getAllJobStatsRequest(){
+ return new GetJobStatsRequest(ALL_JOBS);
+ }
+
+ GetJobStatsRequest(List jobIds) {
+ if (jobIds.stream().anyMatch(Objects::isNull)) {
+ throw new NullPointerException("jobIds must not contain null values");
+ }
+ this.jobIds = new ArrayList<>(jobIds);
+ }
+
+ /**
+ * Get the specified Job's statistics via their unique jobIds
+ *
+ * @param jobIds must be non-null and each jobId must be non-null
+ */
+ public GetJobStatsRequest(String... jobIds) {
+ this(Arrays.asList(jobIds));
+ }
+
+ /**
+ * All the jobIds for which to get statistics
+ */
+ public List getJobIds() {
+ return jobIds;
+ }
+
+ public Boolean isAllowNoJobs() {
+ return this.allowNoJobs;
+ }
+
+ /**
+ * Whether to ignore if a wildcard expression matches no jobs.
+ *
+ * This includes `_all` string or when no jobs have been specified
+ *
+ * @param allowNoJobs When {@code true} ignore if wildcard or `_all` matches no jobs. Defaults to {@code true}
+ */
+ public void setAllowNoJobs(boolean allowNoJobs) {
+ this.allowNoJobs = allowNoJobs;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobIds, allowNoJobs);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+
+ GetJobStatsRequest that = (GetJobStatsRequest) other;
+ return Objects.equals(jobIds, that.jobIds) &&
+ Objects.equals(allowNoJobs, that.allowNoJobs);
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(Job.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds));
+ if (allowNoJobs != null) {
+ builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs);
+ }
+ builder.endObject();
+ return builder;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java
new file mode 100644
index 00000000000..2e3ba113d19
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.client.ml.job.stats.JobStats;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+
+/**
+ * Contains a {@link List} of the found {@link JobStats} objects and the total count found
+ */
+public class GetJobStatsResponse extends AbstractResultResponse {
+
+ public static final ParseField RESULTS_FIELD = new ParseField("jobs");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("jobs_stats_response", true,
+ a -> new GetJobStatsResponse((List) a[0], (long) a[1]));
+
+ static {
+ PARSER.declareObjectArray(constructorArg(), JobStats.PARSER, RESULTS_FIELD);
+ PARSER.declareLong(constructorArg(), COUNT);
+ }
+
+ GetJobStatsResponse(List jobStats, long count) {
+ super(RESULTS_FIELD, jobStats, count);
+ }
+
+ /**
+ * The collection of {@link JobStats} objects found in the query
+ */
+ public List jobStats() {
+ return results;
+ }
+
+ public static GetJobStatsResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(results, count);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ GetJobStatsResponse other = (GetJobStatsResponse) obj;
+ return Objects.equals(results, other.results) && count == other.count;
+ }
+
+ @Override
+ public final String toString() {
+ return Strings.toString(this);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java
new file mode 100644
index 00000000000..892df340abd
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java
@@ -0,0 +1,150 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * A Pojo class containing an Elastic Node's attributes
+ */
+public class NodeAttributes implements ToXContentObject {
+
+ public static final ParseField ID = new ParseField("id");
+ public static final ParseField NAME = new ParseField("name");
+ public static final ParseField EPHEMERAL_ID = new ParseField("ephemeral_id");
+ public static final ParseField TRANSPORT_ADDRESS = new ParseField("transport_address");
+ public static final ParseField ATTRIBUTES = new ParseField("attributes");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("node", true,
+ (a) -> {
+ int i = 0;
+ String id = (String) a[i++];
+ String name = (String) a[i++];
+ String ephemeralId = (String) a[i++];
+ String transportAddress = (String) a[i++];
+ Map attributes = (Map) a[i];
+ return new NodeAttributes(id, name, ephemeralId, transportAddress, attributes);
+ });
+
+ static {
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), ID);
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME);
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), EPHEMERAL_ID);
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), TRANSPORT_ADDRESS);
+ PARSER.declareField(ConstructingObjectParser.constructorArg(),
+ (p, c) -> p.mapStrings(),
+ ATTRIBUTES,
+ ObjectParser.ValueType.OBJECT);
+ }
+
+ private final String id;
+ private final String name;
+ private final String ephemeralId;
+ private final String transportAddress;
+ private final Map attributes;
+
+ public NodeAttributes(String id, String name, String ephemeralId, String transportAddress, Map attributes) {
+ this.id = id;
+ this.name = name;
+ this.ephemeralId = ephemeralId;
+ this.transportAddress = transportAddress;
+ this.attributes = Collections.unmodifiableMap(attributes);
+ }
+
+ /**
+ * The unique identifier of the node.
+ */
+ public String getId() {
+ return id;
+ }
+
+ /**
+ * The node name.
+ */
+ public String getName() {
+ return name;
+ }
+
+ /**
+ * The ephemeral id of the node.
+ */
+ public String getEphemeralId() {
+ return ephemeralId;
+ }
+
+ /**
+ * The host and port where transport HTTP connections are accepted.
+ */
+ public String getTransportAddress() {
+ return transportAddress;
+ }
+
+ /**
+ * Additional attributes related to this node e.g., {"ml.max_open_jobs": "10"}.
+ */
+ public Map getAttributes() {
+ return attributes;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(ID.getPreferredName(), id);
+ builder.field(NAME.getPreferredName(), name);
+ builder.field(EPHEMERAL_ID.getPreferredName(), ephemeralId);
+ builder.field(TRANSPORT_ADDRESS.getPreferredName(), transportAddress);
+ builder.field(ATTRIBUTES.getPreferredName(), attributes);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, name, ephemeralId, transportAddress, attributes);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+
+ NodeAttributes that = (NodeAttributes) other;
+ return Objects.equals(id, that.id) &&
+ Objects.equals(name, that.name) &&
+ Objects.equals(ephemeralId, that.ephemeralId) &&
+ Objects.equals(transportAddress, that.transportAddress) &&
+ Objects.equals(attributes, that.attributes);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java
new file mode 100644
index 00000000000..32684bd7e62
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.config;
+
+import java.util.Locale;
+
+/**
+ * Jobs whether running or complete are in one of these states.
+ * When a job is created it is initialised in the state closed
+ * i.e. it is not running.
+ */
+public enum JobState {
+
+ CLOSING, CLOSED, OPENED, FAILED, OPENING;
+
+ public static JobState fromString(String name) {
+ return valueOf(name.trim().toUpperCase(Locale.ROOT));
+ }
+
+ public String value() {
+ return name().toLowerCase(Locale.ROOT);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java
new file mode 100644
index 00000000000..a6b41beca83
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java
@@ -0,0 +1,174 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * A class to hold statistics about forecasts.
+ */
+public class ForecastStats implements ToXContentObject {
+
+ public static final ParseField TOTAL = new ParseField("total");
+ public static final ParseField FORECASTED_JOBS = new ParseField("forecasted_jobs");
+ public static final ParseField MEMORY_BYTES = new ParseField("memory_bytes");
+ public static final ParseField PROCESSING_TIME_MS = new ParseField("processing_time_ms");
+ public static final ParseField RECORDS = new ParseField("records");
+ public static final ParseField STATUS = new ParseField("status");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("forecast_stats",
+ true,
+ (a) -> {
+ int i = 0;
+ long total = (long)a[i++];
+ SimpleStats memoryStats = (SimpleStats)a[i++];
+ SimpleStats recordStats = (SimpleStats)a[i++];
+ SimpleStats runtimeStats = (SimpleStats)a[i++];
+ Map statusCounts = (Map)a[i];
+ return new ForecastStats(total, memoryStats, recordStats, runtimeStats, statusCounts);
+ });
+
+ static {
+ PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, MEMORY_BYTES);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, RECORDS);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, PROCESSING_TIME_MS);
+ PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
+ p -> {
+ Map counts = new HashMap<>();
+ p.map().forEach((key, value) -> counts.put(key, ((Number)value).longValue()));
+ return counts;
+ }, STATUS, ObjectParser.ValueType.OBJECT);
+ }
+
+ private final long total;
+ private final long forecastedJobs;
+ private SimpleStats memoryStats;
+ private SimpleStats recordStats;
+ private SimpleStats runtimeStats;
+ private Map statusCounts;
+
+ public ForecastStats(long total,
+ SimpleStats memoryStats,
+ SimpleStats recordStats,
+ SimpleStats runtimeStats,
+ Map statusCounts) {
+ this.total = total;
+ this.forecastedJobs = total > 0 ? 1 : 0;
+ if (total > 0) {
+ this.memoryStats = Objects.requireNonNull(memoryStats);
+ this.recordStats = Objects.requireNonNull(recordStats);
+ this.runtimeStats = Objects.requireNonNull(runtimeStats);
+ this.statusCounts = Collections.unmodifiableMap(statusCounts);
+ }
+ }
+
+ /**
+ * The number of forecasts currently available for this model.
+ */
+ public long getTotal() {
+ return total;
+ }
+
+ /**
+ * The number of jobs that have at least one forecast.
+ */
+ public long getForecastedJobs() {
+ return forecastedJobs;
+ }
+
+ /**
+ * Statistics about the memory usage: minimum, maximum, average and total.
+ */
+ public SimpleStats getMemoryStats() {
+ return memoryStats;
+ }
+
+ /**
+ * Statistics about the number of forecast records: minimum, maximum, average and total.
+ */
+ public SimpleStats getRecordStats() {
+ return recordStats;
+ }
+
+ /**
+ * Statistics about the forecast runtime in milliseconds: minimum, maximum, average and total
+ */
+ public SimpleStats getRuntimeStats() {
+ return runtimeStats;
+ }
+
+ /**
+ * Counts per forecast status, for example: {"finished" : 2}.
+ */
+ public Map getStatusCounts() {
+ return statusCounts;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(TOTAL.getPreferredName(), total);
+ builder.field(FORECASTED_JOBS.getPreferredName(), forecastedJobs);
+
+ if (total > 0) {
+ builder.field(MEMORY_BYTES.getPreferredName(), memoryStats);
+ builder.field(RECORDS.getPreferredName(), recordStats);
+ builder.field(PROCESSING_TIME_MS.getPreferredName(), runtimeStats);
+ builder.field(STATUS.getPreferredName(), statusCounts);
+ }
+ return builder.endObject();
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(total, forecastedJobs, memoryStats, recordStats, runtimeStats, statusCounts);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ ForecastStats other = (ForecastStats) obj;
+ return Objects.equals(total, other.total) &&
+ Objects.equals(forecastedJobs, other.forecastedJobs) &&
+ Objects.equals(memoryStats, other.memoryStats) &&
+ Objects.equals(recordStats, other.recordStats) &&
+ Objects.equals(runtimeStats, other.runtimeStats) &&
+ Objects.equals(statusCounts, other.statusCounts);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java
new file mode 100644
index 00000000000..df5be4aa4c5
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java
@@ -0,0 +1,225 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.client.ml.job.config.JobState;
+import org.elasticsearch.client.ml.job.process.DataCounts;
+import org.elasticsearch.client.ml.job.process.ModelSizeStats;
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.client.ml.NodeAttributes;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * Class containing the statistics for a Machine Learning job.
+ *
+ */
+public class JobStats implements ToXContentObject {
+
+ private static final ParseField DATA_COUNTS = new ParseField("data_counts");
+ private static final ParseField MODEL_SIZE_STATS = new ParseField("model_size_stats");
+ private static final ParseField FORECASTS_STATS = new ParseField("forecasts_stats");
+ private static final ParseField STATE = new ParseField("state");
+ private static final ParseField NODE = new ParseField("node");
+ private static final ParseField OPEN_TIME = new ParseField("open_time");
+ private static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation");
+
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("job_stats",
+ true,
+ (a) -> {
+ int i = 0;
+ String jobId = (String) a[i++];
+ DataCounts dataCounts = (DataCounts) a[i++];
+ JobState jobState = (JobState) a[i++];
+ ModelSizeStats.Builder modelSizeStatsBuilder = (ModelSizeStats.Builder) a[i++];
+ ModelSizeStats modelSizeStats = modelSizeStatsBuilder == null ? null : modelSizeStatsBuilder.build();
+ ForecastStats forecastStats = (ForecastStats) a[i++];
+ NodeAttributes node = (NodeAttributes) a[i++];
+ String assignmentExplanation = (String) a[i++];
+ TimeValue openTime = (TimeValue) a[i];
+ return new JobStats(jobId,
+ dataCounts,
+ jobState,
+ modelSizeStats,
+ forecastStats,
+ node,
+ assignmentExplanation,
+ openTime);
+ });
+
+ static {
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
+ PARSER.declareObject(ConstructingObjectParser.constructorArg(), DataCounts.PARSER, DATA_COUNTS);
+ PARSER.declareField(ConstructingObjectParser.constructorArg(),
+ (p) -> JobState.fromString(p.text()),
+ STATE,
+ ObjectParser.ValueType.VALUE);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ModelSizeStats.PARSER, MODEL_SIZE_STATS);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ForecastStats.PARSER, FORECASTS_STATS);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), NodeAttributes.PARSER, NODE);
+ PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ASSIGNMENT_EXPLANATION);
+ PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
+ (p, c) -> TimeValue.parseTimeValue(p.textOrNull(), OPEN_TIME.getPreferredName()),
+ OPEN_TIME,
+ ObjectParser.ValueType.STRING_OR_NULL);
+ }
+
+
+ private final String jobId;
+ private final DataCounts dataCounts;
+ private final JobState state;
+ private final ModelSizeStats modelSizeStats;
+ private final ForecastStats forecastStats;
+ private final NodeAttributes node;
+ private final String assignmentExplanation;
+ private final TimeValue openTime;
+
+ JobStats(String jobId, DataCounts dataCounts, JobState state, @Nullable ModelSizeStats modelSizeStats,
+ @Nullable ForecastStats forecastStats, @Nullable NodeAttributes node,
+ @Nullable String assignmentExplanation, @Nullable TimeValue opentime) {
+ this.jobId = Objects.requireNonNull(jobId);
+ this.dataCounts = Objects.requireNonNull(dataCounts);
+ this.state = Objects.requireNonNull(state);
+ this.modelSizeStats = modelSizeStats;
+ this.forecastStats = forecastStats;
+ this.node = node;
+ this.assignmentExplanation = assignmentExplanation;
+ this.openTime = opentime;
+ }
+
+ /**
+ * The jobId referencing the job for these statistics
+ */
+ public String getJobId() {
+ return jobId;
+ }
+
+ /**
+ * An object that describes the number of records processed and any related error counts
+ * See {@link DataCounts}
+ */
+ public DataCounts getDataCounts() {
+ return dataCounts;
+ }
+
+ /**
+ * An object that provides information about the size and contents of the model.
+ * See {@link ModelSizeStats}
+ */
+ public ModelSizeStats getModelSizeStats() {
+ return modelSizeStats;
+ }
+
+ /**
+ * An object that provides statistical information about forecasts of this job.
+ * See {@link ForecastStats}
+ */
+ public ForecastStats getForecastStats() {
+ return forecastStats;
+ }
+
+ /**
+ * The status of the job
+ * See {@link JobState}
+ */
+ public JobState getState() {
+ return state;
+ }
+
+ /**
+ * For open jobs only, contains information about the node where the job runs
+ * See {@link NodeAttributes}
+ */
+ public NodeAttributes getNode() {
+ return node;
+ }
+
+ /**
+ * For open jobs only, contains messages relating to the selection of a node to run the job.
+ */
+ public String getAssignmentExplanation() {
+ return assignmentExplanation;
+ }
+
+ /**
+ * For open jobs only, the elapsed time for which the job has been open
+ */
+ public TimeValue getOpenTime() {
+ return openTime;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(Job.ID.getPreferredName(), jobId);
+ builder.field(DATA_COUNTS.getPreferredName(), dataCounts);
+ builder.field(STATE.getPreferredName(), state.toString());
+ if (modelSizeStats != null) {
+ builder.field(MODEL_SIZE_STATS.getPreferredName(), modelSizeStats);
+ }
+ if (forecastStats != null) {
+ builder.field(FORECASTS_STATS.getPreferredName(), forecastStats);
+ }
+ if (node != null) {
+ builder.field(NODE.getPreferredName(), node);
+ }
+ if (assignmentExplanation != null) {
+ builder.field(ASSIGNMENT_EXPLANATION.getPreferredName(), assignmentExplanation);
+ }
+ if (openTime != null) {
+ builder.field(OPEN_TIME.getPreferredName(), openTime.getStringRep());
+ }
+ return builder.endObject();
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobId, dataCounts, modelSizeStats, forecastStats, state, node, assignmentExplanation, openTime);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ JobStats other = (JobStats) obj;
+ return Objects.equals(jobId, other.jobId) &&
+ Objects.equals(this.dataCounts, other.dataCounts) &&
+ Objects.equals(this.modelSizeStats, other.modelSizeStats) &&
+ Objects.equals(this.forecastStats, other.forecastStats) &&
+ Objects.equals(this.state, other.state) &&
+ Objects.equals(this.node, other.node) &&
+ Objects.equals(this.assignmentExplanation, other.assignmentExplanation) &&
+ Objects.equals(this.openTime, other.openTime);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java
new file mode 100644
index 00000000000..f4c8aa0fa3b
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * Helper class for min, max, avg and total statistics for a quantity
+ */
+public class SimpleStats implements ToXContentObject {
+
+ public static final ParseField MIN = new ParseField("min");
+ public static final ParseField MAX = new ParseField("max");
+ public static final ParseField AVG = new ParseField("avg");
+ public static final ParseField TOTAL = new ParseField("total");
+
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("simple_stats", true,
+ (a) -> {
+ int i = 0;
+ double total = (double)a[i++];
+ double min = (double)a[i++];
+ double max = (double)a[i++];
+ double avg = (double)a[i++];
+ return new SimpleStats(total, min, max, avg);
+ });
+
+ static {
+ PARSER.declareDouble(ConstructingObjectParser.constructorArg(), TOTAL);
+ PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MIN);
+ PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX);
+ PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AVG);
+ }
+
+ private final double total;
+ private final double min;
+ private final double max;
+ private final double avg;
+
+ SimpleStats(double total, double min, double max, double avg) {
+ this.total = total;
+ this.min = min;
+ this.max = max;
+ this.avg = avg;
+ }
+
+ public double getMin() {
+ return min;
+ }
+
+ public double getMax() {
+ return max;
+ }
+
+ public double getAvg() {
+ return avg;
+ }
+
+ public double getTotal() {
+ return total;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(total, min, max, avg);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ SimpleStats other = (SimpleStats) obj;
+ return Objects.equals(total, other.total) &&
+ Objects.equals(min, other.min) &&
+ Objects.equals(avg, other.avg) &&
+ Objects.equals(max, other.max);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(MIN.getPreferredName(), min);
+ builder.field(MAX.getPreferredName(), max);
+ builder.field(AVG.getPreferredName(), avg);
+ builder.field(TOTAL.getPreferredName(), total);
+ builder.endObject();
+ return builder;
+ }
+}
+
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
index 43f3ef41a8d..d84099d9a3c 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
@@ -36,6 +36,8 @@ import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
+import org.elasticsearch.client.ml.FlushJobRequest;
+import org.elasticsearch.client.ml.GetJobStatsRequest;
import org.elasticsearch.test.ESTestCase;
import java.io.ByteArrayOutputStream;
@@ -139,6 +141,44 @@ public class MLRequestConvertersTests extends ESTestCase {
}
}
+ public void testFlushJob() throws Exception {
+ String jobId = randomAlphaOfLength(10);
+ FlushJobRequest flushJobRequest = new FlushJobRequest(jobId);
+
+ Request request = MLRequestConverters.flushJob(flushJobRequest);
+ assertEquals(HttpPost.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_flush", request.getEndpoint());
+ assertEquals("{\"job_id\":\"" + jobId + "\"}", requestEntityToString(request));
+
+ flushJobRequest.setSkipTime("1000");
+ flushJobRequest.setStart("105");
+ flushJobRequest.setEnd("200");
+ flushJobRequest.setAdvanceTime("100");
+ flushJobRequest.setCalcInterim(true);
+ request = MLRequestConverters.flushJob(flushJobRequest);
+ assertEquals(
+ "{\"job_id\":\"" + jobId + "\",\"calc_interim\":true,\"start\":\"105\"," +
+ "\"end\":\"200\",\"advance_time\":\"100\",\"skip_time\":\"1000\"}",
+ requestEntityToString(request));
+ }
+
+ public void testGetJobStats() {
+ GetJobStatsRequest getJobStatsRequestRequest = new GetJobStatsRequest();
+
+ Request request = MLRequestConverters.getJobStats(getJobStatsRequestRequest);
+
+ assertEquals(HttpGet.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors/_stats", request.getEndpoint());
+ assertFalse(request.getParameters().containsKey("allow_no_jobs"));
+
+ getJobStatsRequestRequest = new GetJobStatsRequest("job1", "jobs*");
+ getJobStatsRequestRequest.setAllowNoJobs(true);
+ request = MLRequestConverters.getJobStats(getJobStatsRequestRequest);
+
+ assertEquals("/_xpack/ml/anomaly_detectors/job1,jobs*/_stats", request.getEndpoint());
+ assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_jobs"));
+ }
+
private static Job createValidJob(String jobId) {
AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList(
Detector.builder().setFunction("count").build()));
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
index cb9dbea129d..cd4b6ffc769 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
@@ -19,6 +19,12 @@
package org.elasticsearch.client;
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
+import org.elasticsearch.ElasticsearchStatusException;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.client.ml.GetJobStatsRequest;
+import org.elasticsearch.client.ml.GetJobStatsResponse;
+import org.elasticsearch.client.ml.job.config.JobState;
+import org.elasticsearch.client.ml.job.stats.JobStats;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.CloseJobResponse;
import org.elasticsearch.client.ml.DeleteJobRequest;
@@ -34,6 +40,8 @@ import org.elasticsearch.client.ml.job.config.DataDescription;
import org.elasticsearch.client.ml.job.config.Detector;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.client.ml.FlushJobRequest;
+import org.elasticsearch.client.ml.FlushJobResponse;
import org.junit.After;
import java.io.IOException;
@@ -41,6 +49,7 @@ import java.util.Arrays;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
+import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.hasItems;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.hasSize;
@@ -138,6 +147,77 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
assertTrue(response.isClosed());
}
+ public void testFlushJob() throws Exception {
+ String jobId = randomValidJobId();
+ Job job = buildJob(jobId);
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+ machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
+ machineLearningClient.openJob(new OpenJobRequest(jobId), RequestOptions.DEFAULT);
+
+ FlushJobResponse response = execute(new FlushJobRequest(jobId),
+ machineLearningClient::flushJob,
+ machineLearningClient::flushJobAsync);
+ assertTrue(response.isFlushed());
+ }
+
+ public void testGetJobStats() throws Exception {
+ String jobId1 = "ml-get-job-stats-test-id-1";
+ String jobId2 = "ml-get-job-stats-test-id-2";
+
+ Job job1 = buildJob(jobId1);
+ Job job2 = buildJob(jobId2);
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+ machineLearningClient.putJob(new PutJobRequest(job1), RequestOptions.DEFAULT);
+ machineLearningClient.putJob(new PutJobRequest(job2), RequestOptions.DEFAULT);
+
+ machineLearningClient.openJob(new OpenJobRequest(jobId1), RequestOptions.DEFAULT);
+
+ GetJobStatsRequest request = new GetJobStatsRequest(jobId1, jobId2);
+
+ // Test getting specific
+ GetJobStatsResponse response = execute(request, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
+
+ assertEquals(2, response.count());
+ assertThat(response.jobStats(), hasSize(2));
+ assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), containsInAnyOrder(jobId1, jobId2));
+ for (JobStats stats : response.jobStats()) {
+ if (stats.getJobId().equals(jobId1)) {
+ assertEquals(JobState.OPENED, stats.getState());
+ } else {
+ assertEquals(JobState.CLOSED, stats.getState());
+ }
+ }
+
+ // Test getting all explicitly
+ request = GetJobStatsRequest.getAllJobStatsRequest();
+ response = execute(request, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
+
+ assertTrue(response.count() >= 2L);
+ assertTrue(response.jobStats().size() >= 2L);
+ assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
+
+ // Test getting all implicitly
+ response = execute(new GetJobStatsRequest(), machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
+
+ assertTrue(response.count() >= 2L);
+ assertTrue(response.jobStats().size() >= 2L);
+ assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
+
+ // Test getting all with wildcard
+ request = new GetJobStatsRequest("ml-get-job-stats-test-id-*");
+ response = execute(request, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
+ assertTrue(response.count() >= 2L);
+ assertTrue(response.jobStats().size() >= 2L);
+ assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
+
+ // Test when allow_no_jobs is false
+ final GetJobStatsRequest erroredRequest = new GetJobStatsRequest("jobs-that-do-not-exist*");
+ erroredRequest.setAllowNoJobs(false);
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
+ () -> execute(erroredRequest, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync));
+ assertThat(exception.status().getStatus(), equalTo(404));
+ }
+
public static String randomValidJobId() {
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz0123456789".toCharArray());
return generator.ofCodePointsLength(random(), 10, 10);
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java
index 94793f0ab79..f92f01f6bad 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java
@@ -35,6 +35,8 @@ import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetBucketsResponse;
import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobResponse;
+import org.elasticsearch.client.ml.GetJobStatsRequest;
+import org.elasticsearch.client.ml.GetJobStatsResponse;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.GetRecordsResponse;
import org.elasticsearch.client.ml.OpenJobRequest;
@@ -50,6 +52,9 @@ import org.elasticsearch.client.ml.job.results.Bucket;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.client.ml.FlushJobRequest;
+import org.elasticsearch.client.ml.FlushJobResponse;
+import org.elasticsearch.client.ml.job.stats.JobStats;
import org.junit.After;
import java.io.IOException;
@@ -458,6 +463,127 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
+ public void testFlushJob() throws Exception {
+ RestHighLevelClient client = highLevelClient();
+
+ Job job = MachineLearningIT.buildJob("flushing-my-first-machine-learning-job");
+ client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
+ client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
+
+ Job secondJob = MachineLearningIT.buildJob("flushing-my-second-machine-learning-job");
+ client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
+ client.machineLearning().openJob(new OpenJobRequest(secondJob.getId()), RequestOptions.DEFAULT);
+
+ {
+ //tag::x-pack-ml-flush-job-request
+ FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-first-machine-learning-job"); //<1>
+ //end::x-pack-ml-flush-job-request
+
+ //tag::x-pack-ml-flush-job-request-options
+ flushJobRequest.setCalcInterim(true); //<1>
+ flushJobRequest.setAdvanceTime("2018-08-31T16:35:07+00:00"); //<2>
+ flushJobRequest.setStart("2018-08-31T16:35:17+00:00"); //<3>
+ flushJobRequest.setEnd("2018-08-31T16:35:27+00:00"); //<4>
+ flushJobRequest.setSkipTime("2018-08-31T16:35:00+00:00"); //<5>
+ //end::x-pack-ml-flush-job-request-options
+
+ //tag::x-pack-ml-flush-job-execute
+ FlushJobResponse flushJobResponse = client.machineLearning().flushJob(flushJobRequest, RequestOptions.DEFAULT);
+ //end::x-pack-ml-flush-job-execute
+
+ //tag::x-pack-ml-flush-job-response
+ boolean isFlushed = flushJobResponse.isFlushed(); //<1>
+ Date lastFinalizedBucketEnd = flushJobResponse.getLastFinalizedBucketEnd(); //<2>
+ //end::x-pack-ml-flush-job-response
+
+ }
+ {
+ //tag::x-pack-ml-flush-job-listener
+ ActionListener listener = new ActionListener() {
+ @Override
+ public void onResponse(FlushJobResponse FlushJobResponse) {
+ //<1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ };
+ //end::x-pack-ml-flush-job-listener
+ FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-second-machine-learning-job");
+
+ // Replace the empty listener by a blocking listener in test
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::x-pack-ml-flush-job-execute-async
+ client.machineLearning().flushJobAsync(flushJobRequest, RequestOptions.DEFAULT, listener); //<1>
+ // end::x-pack-ml-flush-job-execute-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+ }
+
+
+ public void testGetJobStats() throws Exception {
+ RestHighLevelClient client = highLevelClient();
+
+ Job job = MachineLearningIT.buildJob("get-machine-learning-job-stats1");
+ client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
+
+ Job secondJob = MachineLearningIT.buildJob("get-machine-learning-job-stats2");
+ client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
+
+ {
+ //tag::x-pack-ml-get-job-stats-request
+ GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*"); //<1>
+ request.setAllowNoJobs(true); //<2>
+ //end::x-pack-ml-get-job-stats-request
+
+ //tag::x-pack-ml-get-job-stats-execute
+ GetJobStatsResponse response = client.machineLearning().getJobStats(request, RequestOptions.DEFAULT);
+ //end::x-pack-ml-get-job-stats-execute
+
+ //tag::x-pack-ml-get-job-stats-response
+ long numberOfJobStats = response.count(); //<1>
+ List jobStats = response.jobStats(); //<2>
+ //end::x-pack-ml-get-job-stats-response
+
+ assertEquals(2, response.count());
+ assertThat(response.jobStats(), hasSize(2));
+ assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()),
+ containsInAnyOrder(job.getId(), secondJob.getId()));
+ }
+ {
+ GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*");
+
+ // tag::x-pack-ml-get-job-stats-listener
+ ActionListener listener = new ActionListener() {
+ @Override
+ public void onResponse(GetJobStatsResponse response) {
+ // <1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ };
+ // end::x-pack-ml-get-job-stats-listener
+
+ // Replace the empty listener by a blocking listener in test
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::x-pack-ml-get-job-stats-execute-async
+ client.machineLearning().getJobStatsAsync(request, RequestOptions.DEFAULT, listener); // <1>
+ // end::x-pack-ml-get-job-stats-execute-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+ }
+
public void testGetRecords() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FlushJobRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FlushJobRequestTests.java
new file mode 100644
index 00000000000..c2bddd436cc
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FlushJobRequestTests.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+public class FlushJobRequestTests extends AbstractXContentTestCase {
+
+ @Override
+ protected FlushJobRequest createTestInstance() {
+ FlushJobRequest request = new FlushJobRequest(randomAlphaOfLengthBetween(1, 20));
+
+ if (randomBoolean()) {
+ request.setCalcInterim(randomBoolean());
+ }
+ if (randomBoolean()) {
+ request.setAdvanceTime(String.valueOf(randomLong()));
+ }
+ if (randomBoolean()) {
+ request.setStart(String.valueOf(randomLong()));
+ }
+ if (randomBoolean()) {
+ request.setEnd(String.valueOf(randomLong()));
+ }
+ if (randomBoolean()) {
+ request.setSkipTime(String.valueOf(randomLong()));
+ }
+ return request;
+ }
+
+ @Override
+ protected FlushJobRequest doParseInstance(XContentParser parser) throws IOException {
+ return FlushJobRequest.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return false;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FlushJobResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FlushJobResponseTests.java
new file mode 100644
index 00000000000..bc968ff4564
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FlushJobResponseTests.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.Date;
+
+public class FlushJobResponseTests extends AbstractXContentTestCase {
+
+ @Override
+ protected FlushJobResponse createTestInstance() {
+ return new FlushJobResponse(randomBoolean(),
+ randomBoolean() ? null : new Date(randomNonNegativeLong()));
+ }
+
+ @Override
+ protected FlushJobResponse doParseInstance(XContentParser parser) throws IOException {
+ return FlushJobResponse.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobResponseTests.java
index 181804c9676..8cc990730f7 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobResponseTests.java
@@ -26,6 +26,7 @@ import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.function.Predicate;
public class GetJobResponseTests extends AbstractXContentTestCase {
@@ -46,8 +47,13 @@ public class GetJobResponseTests extends AbstractXContentTestCase getRandomFieldsExcludeFilter() {
+ return field -> !field.isEmpty();
+ }
+
@Override
protected boolean supportsUnknownFields() {
- return false;
+ return true;
}
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobStatsRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobStatsRequestTests.java
new file mode 100644
index 00000000000..690e5829766
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobStatsRequestTests.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+public class GetJobStatsRequestTests extends AbstractXContentTestCase {
+
+ public void testAllJobsRequest() {
+ GetJobStatsRequest request = GetJobStatsRequest.getAllJobStatsRequest();
+
+ assertEquals(request.getJobIds().size(), 1);
+ assertEquals(request.getJobIds().get(0), "_all");
+ }
+
+ public void testNewWithJobId() {
+ Exception exception = expectThrows(NullPointerException.class, () -> new GetJobStatsRequest("job", null));
+ assertEquals(exception.getMessage(), "jobIds must not contain null values");
+ }
+
+ @Override
+ protected GetJobStatsRequest createTestInstance() {
+ int jobCount = randomIntBetween(0, 10);
+ List jobIds = new ArrayList<>(jobCount);
+
+ for (int i = 0; i < jobCount; i++) {
+ jobIds.add(randomAlphaOfLength(10));
+ }
+
+ GetJobStatsRequest request = new GetJobStatsRequest(jobIds);
+
+ if (randomBoolean()) {
+ request.setAllowNoJobs(randomBoolean());
+ }
+
+ return request;
+ }
+
+ @Override
+ protected GetJobStatsRequest doParseInstance(XContentParser parser) throws IOException {
+ return GetJobStatsRequest.PARSER.parse(parser, null);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return false;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobStatsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobStatsResponseTests.java
new file mode 100644
index 00000000000..23f7bcc042b
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobStatsResponseTests.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.client.ml.job.stats.JobStats;
+import org.elasticsearch.client.ml.job.stats.JobStatsTests;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+public class GetJobStatsResponseTests extends AbstractXContentTestCase {
+
+ @Override
+ protected GetJobStatsResponse createTestInstance() {
+
+ int count = randomIntBetween(1, 5);
+ List results = new ArrayList<>(count);
+ for(int i = 0; i < count; i++) {
+ results.add(JobStatsTests.createRandomInstance());
+ }
+
+ return new GetJobStatsResponse(results, count);
+ }
+
+ @Override
+ protected GetJobStatsResponse doParseInstance(XContentParser parser) throws IOException {
+ return GetJobStatsResponse.fromXContent(parser);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return false;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/NodeAttributesTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/NodeAttributesTests.java
new file mode 100644
index 00000000000..cee1710a622
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/NodeAttributesTests.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.function.Predicate;
+
+public class NodeAttributesTests extends AbstractXContentTestCase {
+
+ public static NodeAttributes createRandom() {
+ int numberOfAttributes = randomIntBetween(1, 10);
+ Map attributes = new HashMap<>(numberOfAttributes);
+ for(int i = 0; i < numberOfAttributes; i++) {
+ String val = randomAlphaOfLength(10);
+ attributes.put("key-"+i, val);
+ }
+ return new NodeAttributes(randomAlphaOfLength(10),
+ randomAlphaOfLength(10),
+ randomAlphaOfLength(10),
+ randomAlphaOfLength(10),
+ attributes);
+ }
+
+ @Override
+ protected NodeAttributes createTestInstance() {
+ return createRandom();
+ }
+
+ @Override
+ protected NodeAttributes doParseInstance(XContentParser parser) throws IOException {
+ return NodeAttributes.PARSER.parse(parser, null);
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ return field -> !field.isEmpty();
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/ForecastStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/ForecastStatsTests.java
new file mode 100644
index 00000000000..16dfa305479
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/ForecastStatsTests.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.function.Predicate;
+
+public class ForecastStatsTests extends AbstractXContentTestCase {
+
+ @Override
+ public ForecastStats createTestInstance() {
+ if (randomBoolean()) {
+ return createRandom(1, 22);
+ }
+ return new ForecastStats(0, null,null,null,null);
+ }
+
+ @Override
+ protected ForecastStats doParseInstance(XContentParser parser) throws IOException {
+ return ForecastStats.PARSER.parse(parser, null);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ return field -> !field.isEmpty();
+ }
+
+ public static ForecastStats createRandom(long minTotal, long maxTotal) {
+ return new ForecastStats(
+ randomLongBetween(minTotal, maxTotal),
+ SimpleStatsTests.createRandom(),
+ SimpleStatsTests.createRandom(),
+ SimpleStatsTests.createRandom(),
+ createCountStats());
+ }
+
+ private static Map createCountStats() {
+ Map countStats = new HashMap<>();
+ for (int i = 0; i < randomInt(10); ++i) {
+ countStats.put(randomAlphaOfLengthBetween(1, 20), randomLongBetween(1L, 100L));
+ }
+ return countStats;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/JobStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/JobStatsTests.java
new file mode 100644
index 00000000000..5d00f879140
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/JobStatsTests.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.client.ml.NodeAttributes;
+import org.elasticsearch.client.ml.NodeAttributesTests;
+import org.elasticsearch.client.ml.job.process.DataCounts;
+import org.elasticsearch.client.ml.job.process.DataCountsTests;
+import org.elasticsearch.client.ml.job.process.ModelSizeStats;
+import org.elasticsearch.client.ml.job.process.ModelSizeStatsTests;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.client.ml.job.config.JobState;
+import org.elasticsearch.client.ml.job.config.JobTests;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.function.Predicate;
+
+
+public class JobStatsTests extends AbstractXContentTestCase {
+
+ public static JobStats createRandomInstance() {
+ String jobId = JobTests.randomValidJobId();
+ JobState state = randomFrom(JobState.CLOSING, JobState.CLOSED, JobState.OPENED, JobState.FAILED, JobState.OPENING);
+ DataCounts dataCounts = DataCountsTests.createTestInstance(jobId);
+
+ ModelSizeStats modelSizeStats = randomBoolean() ? ModelSizeStatsTests.createRandomized() : null;
+ ForecastStats forecastStats = randomBoolean() ? ForecastStatsTests.createRandom(1, 22) : null;
+ NodeAttributes nodeAttributes = randomBoolean() ? NodeAttributesTests.createRandom() : null;
+ String assigmentExplanation = randomBoolean() ? randomAlphaOfLength(10) : null;
+ TimeValue openTime = randomBoolean() ? TimeValue.timeValueMillis(randomIntBetween(1, 10000)) : null;
+
+ return new JobStats(jobId, dataCounts, state, modelSizeStats, forecastStats, nodeAttributes, assigmentExplanation, openTime);
+ }
+
+ @Override
+ protected JobStats createTestInstance() {
+ return createRandomInstance();
+ }
+
+ @Override
+ protected JobStats doParseInstance(XContentParser parser) throws IOException {
+ return JobStats.PARSER.parse(parser, null);
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ return field -> !field.isEmpty();
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/SimpleStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/SimpleStatsTests.java
new file mode 100644
index 00000000000..eb9e47af9ba
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/SimpleStatsTests.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+
+public class SimpleStatsTests extends AbstractXContentTestCase {
+
+ @Override
+ protected SimpleStats createTestInstance() {
+ return createRandom();
+ }
+
+ @Override
+ protected SimpleStats doParseInstance(XContentParser parser) throws IOException {
+ return SimpleStats.PARSER.parse(parser, null);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ public static SimpleStats createRandom() {
+ return new SimpleStats(randomDouble(), randomDouble(), randomDouble(), randomDouble());
+ }
+}
diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java
index 934b9526086..a7afbc8ffbd 100644
--- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java
+++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java
@@ -85,7 +85,7 @@ import static java.util.Collections.singletonList;
* The hosts that are part of the cluster need to be provided at creation time, but can also be replaced later
* by calling {@link #setNodes(Collection)}.
*
- * The method {@link #performRequest(String, String, Map, HttpEntity, Header...)} allows to send a request to the cluster. When
+ * The method {@link #performRequest(Request)} allows to send a request to the cluster. When
* sending a request, a host gets selected out of the provided ones in a round-robin fashion. Failing hosts are marked dead and
* retried after a certain amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times they previously
* failed (the more failures, the later they will be retried). In case of failures all of the alive nodes (or dead nodes that
@@ -145,17 +145,6 @@ public class RestClient implements Closeable {
return new RestClientBuilder(hostsToNodes(hosts));
}
- /**
- * Replaces the hosts with which the client communicates.
- *
- * @deprecated prefer {@link #setNodes(Collection)} because it allows you
- * to set metadata for use with {@link NodeSelector}s
- */
- @Deprecated
- public void setHosts(HttpHost... hosts) {
- setNodes(hostsToNodes(hosts));
- }
-
/**
* Replaces the nodes with which the client communicates.
*/
@@ -251,234 +240,6 @@ public class RestClient implements Closeable {
}
}
- /**
- * Sends a request to the Elasticsearch cluster that the client points to and waits for the corresponding response
- * to be returned. Shortcut to {@link #performRequest(String, String, Map, HttpEntity, Header...)} but without parameters
- * and request body.
- *
- * @param method the http method
- * @param endpoint the path of the request (without host and port)
- * @param headers the optional request headers
- * @return the response returned by Elasticsearch
- * @throws IOException in case of a problem or the connection was aborted
- * @throws ClientProtocolException in case of an http protocol error
- * @throws ResponseException in case Elasticsearch responded with a status code that indicated an error
- * @deprecated prefer {@link #performRequest(Request)}
- */
- @Deprecated
- public Response performRequest(String method, String endpoint, Header... headers) throws IOException {
- Request request = new Request(method, endpoint);
- addHeaders(request, headers);
- return performRequest(request);
- }
-
- /**
- * Sends a request to the Elasticsearch cluster that the client points to and waits for the corresponding response
- * to be returned. Shortcut to {@link #performRequest(String, String, Map, HttpEntity, Header...)} but without request body.
- *
- * @param method the http method
- * @param endpoint the path of the request (without host and port)
- * @param params the query_string parameters
- * @param headers the optional request headers
- * @return the response returned by Elasticsearch
- * @throws IOException in case of a problem or the connection was aborted
- * @throws ClientProtocolException in case of an http protocol error
- * @throws ResponseException in case Elasticsearch responded with a status code that indicated an error
- * @deprecated prefer {@link #performRequest(Request)}
- */
- @Deprecated
- public Response performRequest(String method, String endpoint, Map params, Header... headers) throws IOException {
- Request request = new Request(method, endpoint);
- addParameters(request, params);
- addHeaders(request, headers);
- return performRequest(request);
- }
-
- /**
- * Sends a request to the Elasticsearch cluster that the client points to and waits for the corresponding response
- * to be returned. Shortcut to {@link #performRequest(String, String, Map, HttpEntity, HttpAsyncResponseConsumerFactory, Header...)}
- * which doesn't require specifying an {@link HttpAsyncResponseConsumerFactory} instance,
- * {@link HttpAsyncResponseConsumerFactory} will be used to create the needed instances of {@link HttpAsyncResponseConsumer}.
- *
- * @param method the http method
- * @param endpoint the path of the request (without host and port)
- * @param params the query_string parameters
- * @param entity the body of the request, null if not applicable
- * @param headers the optional request headers
- * @return the response returned by Elasticsearch
- * @throws IOException in case of a problem or the connection was aborted
- * @throws ClientProtocolException in case of an http protocol error
- * @throws ResponseException in case Elasticsearch responded with a status code that indicated an error
- * @deprecated prefer {@link #performRequest(Request)}
- */
- @Deprecated
- public Response performRequest(String method, String endpoint, Map params,
- HttpEntity entity, Header... headers) throws IOException {
- Request request = new Request(method, endpoint);
- addParameters(request, params);
- request.setEntity(entity);
- addHeaders(request, headers);
- return performRequest(request);
- }
-
- /**
- * Sends a request to the Elasticsearch cluster that the client points to. Blocks until the request is completed and returns
- * its response or fails by throwing an exception. Selects a host out of the provided ones in a round-robin fashion. Failing hosts
- * are marked dead and retried after a certain amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times
- * they previously failed (the more failures, the later they will be retried). In case of failures all of the alive nodes (or dead
- * nodes that deserve a retry) are retried until one responds or none of them does, in which case an {@link IOException} will be thrown.
- *
- * This method works by performing an asynchronous call and waiting
- * for the result. If the asynchronous call throws an exception we wrap
- * it and rethrow it so that the stack trace attached to the exception
- * contains the call site. While we attempt to preserve the original
- * exception this isn't always possible and likely haven't covered all of
- * the cases. You can get the original exception from
- * {@link Exception#getCause()}.
- *
- * @param method the http method
- * @param endpoint the path of the request (without host and port)
- * @param params the query_string parameters
- * @param entity the body of the request, null if not applicable
- * @param httpAsyncResponseConsumerFactory the {@link HttpAsyncResponseConsumerFactory} used to create one
- * {@link HttpAsyncResponseConsumer} callback per retry. Controls how the response body gets streamed from a non-blocking HTTP
- * connection on the client side.
- * @param headers the optional request headers
- * @return the response returned by Elasticsearch
- * @throws IOException in case of a problem or the connection was aborted
- * @throws ClientProtocolException in case of an http protocol error
- * @throws ResponseException in case Elasticsearch responded with a status code that indicated an error
- * @deprecated prefer {@link #performRequest(Request)}
- */
- @Deprecated
- public Response performRequest(String method, String endpoint, Map params,
- HttpEntity entity, HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory,
- Header... headers) throws IOException {
- Request request = new Request(method, endpoint);
- addParameters(request, params);
- request.setEntity(entity);
- setOptions(request, httpAsyncResponseConsumerFactory, headers);
- return performRequest(request);
- }
-
- /**
- * Sends a request to the Elasticsearch cluster that the client points to. Doesn't wait for the response, instead
- * the provided {@link ResponseListener} will be notified upon completion or failure. Shortcut to
- * {@link #performRequestAsync(String, String, Map, HttpEntity, ResponseListener, Header...)} but without parameters and request body.
- *
- * @param method the http method
- * @param endpoint the path of the request (without host and port)
- * @param responseListener the {@link ResponseListener} to notify when the request is completed or fails
- * @param headers the optional request headers
- * @deprecated prefer {@link #performRequestAsync(Request, ResponseListener)}
- */
- @Deprecated
- public void performRequestAsync(String method, String endpoint, ResponseListener responseListener, Header... headers) {
- Request request;
- try {
- request = new Request(method, endpoint);
- addHeaders(request, headers);
- } catch (Exception e) {
- responseListener.onFailure(e);
- return;
- }
- performRequestAsync(request, responseListener);
- }
-
- /**
- * Sends a request to the Elasticsearch cluster that the client points to. Doesn't wait for the response, instead
- * the provided {@link ResponseListener} will be notified upon completion or failure. Shortcut to
- * {@link #performRequestAsync(String, String, Map, HttpEntity, ResponseListener, Header...)} but without request body.
- *
- * @param method the http method
- * @param endpoint the path of the request (without host and port)
- * @param params the query_string parameters
- * @param responseListener the {@link ResponseListener} to notify when the request is completed or fails
- * @param headers the optional request headers
- * @deprecated prefer {@link #performRequestAsync(Request, ResponseListener)}
- */
- @Deprecated
- public void performRequestAsync(String method, String endpoint, Map params,
- ResponseListener responseListener, Header... headers) {
- Request request;
- try {
- request = new Request(method, endpoint);
- addParameters(request, params);
- addHeaders(request, headers);
- } catch (Exception e) {
- responseListener.onFailure(e);
- return;
- }
- performRequestAsync(request, responseListener);
- }
-
- /**
- * Sends a request to the Elasticsearch cluster that the client points to. Doesn't wait for the response, instead
- * the provided {@link ResponseListener} will be notified upon completion or failure.
- * Shortcut to {@link #performRequestAsync(String, String, Map, HttpEntity, HttpAsyncResponseConsumerFactory, ResponseListener,
- * Header...)} which doesn't require specifying an {@link HttpAsyncResponseConsumerFactory} instance,
- * {@link HttpAsyncResponseConsumerFactory} will be used to create the needed instances of {@link HttpAsyncResponseConsumer}.
- *
- * @param method the http method
- * @param endpoint the path of the request (without host and port)
- * @param params the query_string parameters
- * @param entity the body of the request, null if not applicable
- * @param responseListener the {@link ResponseListener} to notify when the request is completed or fails
- * @param headers the optional request headers
- * @deprecated prefer {@link #performRequestAsync(Request, ResponseListener)}
- */
- @Deprecated
- public void performRequestAsync(String method, String endpoint, Map params,
- HttpEntity entity, ResponseListener responseListener, Header... headers) {
- Request request;
- try {
- request = new Request(method, endpoint);
- addParameters(request, params);
- request.setEntity(entity);
- addHeaders(request, headers);
- } catch (Exception e) {
- responseListener.onFailure(e);
- return;
- }
- performRequestAsync(request, responseListener);
- }
-
- /**
- * Sends a request to the Elasticsearch cluster that the client points to. The request is executed asynchronously
- * and the provided {@link ResponseListener} gets notified upon request completion or failure.
- * Selects a host out of the provided ones in a round-robin fashion. Failing hosts are marked dead and retried after a certain
- * amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times they previously failed (the more failures,
- * the later they will be retried). In case of failures all of the alive nodes (or dead nodes that deserve a retry) are retried
- * until one responds or none of them does, in which case an {@link IOException} will be thrown.
- *
- * @param method the http method
- * @param endpoint the path of the request (without host and port)
- * @param params the query_string parameters
- * @param entity the body of the request, null if not applicable
- * @param httpAsyncResponseConsumerFactory the {@link HttpAsyncResponseConsumerFactory} used to create one
- * {@link HttpAsyncResponseConsumer} callback per retry. Controls how the response body gets streamed from a non-blocking HTTP
- * connection on the client side.
- * @param responseListener the {@link ResponseListener} to notify when the request is completed or fails
- * @param headers the optional request headers
- * @deprecated prefer {@link #performRequestAsync(Request, ResponseListener)}
- */
- @Deprecated
- public void performRequestAsync(String method, String endpoint, Map params,
- HttpEntity entity, HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory,
- ResponseListener responseListener, Header... headers) {
- Request request;
- try {
- request = new Request(method, endpoint);
- addParameters(request, params);
- request.setEntity(entity);
- setOptions(request, httpAsyncResponseConsumerFactory, headers);
- } catch (Exception e) {
- responseListener.onFailure(e);
- return;
- }
- performRequestAsync(request, responseListener);
- }
-
void performRequestAsyncNoCatch(Request request, ResponseListener listener) throws IOException {
Map requestParams = new HashMap<>(request.getParameters());
//ignore is a special parameter supported by the clients, shouldn't be sent to es
@@ -1035,42 +796,4 @@ public class RestClient implements Closeable {
itr.remove();
}
}
-
- /**
- * Add all headers from the provided varargs argument to a {@link Request}. This only exists
- * to support methods that exist for backwards compatibility.
- */
- @Deprecated
- private static void addHeaders(Request request, Header... headers) {
- setOptions(request, RequestOptions.DEFAULT.getHttpAsyncResponseConsumerFactory(), headers);
- }
-
- /**
- * Add all headers from the provided varargs argument to a {@link Request}. This only exists
- * to support methods that exist for backwards compatibility.
- */
- @Deprecated
- private static void setOptions(Request request, HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory,
- Header... headers) {
- Objects.requireNonNull(headers, "headers cannot be null");
- RequestOptions.Builder options = request.getOptions().toBuilder();
- for (Header header : headers) {
- Objects.requireNonNull(header, "header cannot be null");
- options.addHeader(header.getName(), header.getValue());
- }
- options.setHttpAsyncResponseConsumerFactory(httpAsyncResponseConsumerFactory);
- request.setOptions(options);
- }
-
- /**
- * Add all parameters from a map to a {@link Request}. This only exists
- * to support methods that exist for backwards compatibility.
- */
- @Deprecated
- private static void addParameters(Request request, Map parameters) {
- Objects.requireNonNull(parameters, "parameters cannot be null");
- for (Map.Entry entry : parameters.entrySet()) {
- request.addParameter(entry.getKey(), entry.getValue());
- }
- }
}
diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java
index 6b5bb3c98ee..fb58f18d42a 100644
--- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java
+++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java
@@ -45,7 +45,6 @@ import java.io.OutputStream;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.Arrays;
-import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
@@ -215,9 +214,15 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
}
final Header[] requestHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header");
final int statusCode = randomStatusCode(getRandom());
+ Request request = new Request(method, "/" + statusCode);
+ RequestOptions.Builder options = request.getOptions().toBuilder();
+ for (Header header : requestHeaders) {
+ options.addHeader(header.getName(), header.getValue());
+ }
+ request.setOptions(options);
Response esResponse;
try {
- esResponse = restClient.performRequest(method, "/" + statusCode, Collections.emptyMap(), requestHeaders);
+ esResponse = restClient.performRequest(request);
} catch (ResponseException e) {
esResponse = e.getResponse();
}
diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java
index cb326f4a24c..0c589e6a40c 100644
--- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java
+++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java
@@ -59,7 +59,6 @@ import java.net.URI;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
-import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@@ -69,7 +68,6 @@ import static java.util.Collections.singletonList;
import static org.elasticsearch.client.RestClientTestUtil.getAllErrorStatusCodes;
import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods;
import static org.elasticsearch.client.RestClientTestUtil.getOkStatusCodes;
-import static org.elasticsearch.client.RestClientTestUtil.randomHttpMethod;
import static org.elasticsearch.client.RestClientTestUtil.randomStatusCode;
import static org.elasticsearch.client.SyncResponseListenerTests.assertExceptionStackContainsCallingMethod;
import static org.hamcrest.CoreMatchers.equalTo;
@@ -192,7 +190,7 @@ public class RestClientSingleHostTests extends RestClientTestCase {
public void testOkStatusCodes() throws IOException {
for (String method : getHttpMethods()) {
for (int okStatusCode : getOkStatusCodes()) {
- Response response = performRequest(method, "/" + okStatusCode);
+ Response response = restClient.performRequest(new Request(method, "/" + okStatusCode));
assertThat(response.getStatusLine().getStatusCode(), equalTo(okStatusCode));
}
}
@@ -223,13 +221,11 @@ public class RestClientSingleHostTests extends RestClientTestCase {
//error status codes should cause an exception to be thrown
for (int errorStatusCode : getAllErrorStatusCodes()) {
try {
- Map params;
- if (ignoreParam.isEmpty()) {
- params = Collections.emptyMap();
- } else {
- params = Collections.singletonMap("ignore", ignoreParam);
+ Request request = new Request(method, "/" + errorStatusCode);
+ if (false == ignoreParam.isEmpty()) {
+ request.addParameter("ignore", ignoreParam);
}
- Response response = performRequest(method, "/" + errorStatusCode, params);
+ Response response = restClient.performRequest(request);
if (expectedIgnores.contains(errorStatusCode)) {
//no exception gets thrown although we got an error status code, as it was configured to be ignored
assertEquals(errorStatusCode, response.getStatusLine().getStatusCode());
@@ -256,14 +252,14 @@ public class RestClientSingleHostTests extends RestClientTestCase {
for (String method : getHttpMethods()) {
//IOExceptions should be let bubble up
try {
- performRequest(method, "/coe");
+ restClient.performRequest(new Request(method, "/coe"));
fail("request should have failed");
} catch(IOException e) {
assertThat(e, instanceOf(ConnectTimeoutException.class));
}
failureListener.assertCalled(singletonList(node));
try {
- performRequest(method, "/soe");
+ restClient.performRequest(new Request(method, "/soe"));
fail("request should have failed");
} catch(IOException e) {
assertThat(e, instanceOf(SocketTimeoutException.class));
@@ -313,48 +309,6 @@ public class RestClientSingleHostTests extends RestClientTestCase {
}
}
- /**
- * @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests}.
- */
- @Deprecated
- public void tesPerformRequestOldStyleNullHeaders() throws IOException {
- String method = randomHttpMethod(getRandom());
- int statusCode = randomStatusCode(getRandom());
- try {
- performRequest(method, "/" + statusCode, (Header[])null);
- fail("request should have failed");
- } catch(NullPointerException e) {
- assertEquals("request headers must not be null", e.getMessage());
- }
- try {
- performRequest(method, "/" + statusCode, (Header)null);
- fail("request should have failed");
- } catch(NullPointerException e) {
- assertEquals("request header must not be null", e.getMessage());
- }
- }
-
- /**
- * @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testAddParameters()}.
- */
- @Deprecated
- public void testPerformRequestOldStyleWithNullParams() throws IOException {
- String method = randomHttpMethod(getRandom());
- int statusCode = randomStatusCode(getRandom());
- try {
- restClient.performRequest(method, "/" + statusCode, (Map)null);
- fail("request should have failed");
- } catch(NullPointerException e) {
- assertEquals("parameters cannot be null", e.getMessage());
- }
- try {
- restClient.performRequest(method, "/" + statusCode, null, (HttpEntity)null);
- fail("request should have failed");
- } catch(NullPointerException e) {
- assertEquals("parameters cannot be null", e.getMessage());
- }
- }
-
/**
* End to end test for request and response headers. Exercises the mock http client ability to send back
* whatever headers it has received.
@@ -464,35 +418,4 @@ public class RestClientSingleHostTests extends RestClientTestCase {
}
return expectedRequest;
}
-
- /**
- * @deprecated prefer {@link RestClient#performRequest(Request)}.
- */
- @Deprecated
- private Response performRequest(String method, String endpoint, Header... headers) throws IOException {
- return performRequest(method, endpoint, Collections.emptyMap(), headers);
- }
-
- /**
- * @deprecated prefer {@link RestClient#performRequest(Request)}.
- */
- @Deprecated
- private Response performRequest(String method, String endpoint, Map params, Header... headers) throws IOException {
- int methodSelector;
- if (params.isEmpty()) {
- methodSelector = randomIntBetween(0, 2);
- } else {
- methodSelector = randomIntBetween(1, 2);
- }
- switch(methodSelector) {
- case 0:
- return restClient.performRequest(method, endpoint, headers);
- case 1:
- return restClient.performRequest(method, endpoint, params, headers);
- case 2:
- return restClient.performRequest(method, endpoint, params, (HttpEntity)null, headers);
- default:
- throw new UnsupportedOperationException();
- }
- }
}
diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java
index ef94b70542f..4a037b18404 100644
--- a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java
+++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java
@@ -42,7 +42,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import static java.util.Collections.singletonList;
-import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods;
import static org.hamcrest.Matchers.instanceOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
@@ -90,88 +89,6 @@ public class RestClientTests extends RestClientTestCase {
}
}
- /**
- * @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link #testPerformAsyncWithUnsupportedMethod()}.
- */
- @Deprecated
- public void testPerformAsyncOldStyleWithUnsupportedMethod() throws Exception {
- final CountDownLatch latch = new CountDownLatch(1);
- try (RestClient restClient = createRestClient()) {
- restClient.performRequestAsync("unsupported", randomAsciiLettersOfLength(5), new ResponseListener() {
- @Override
- public void onSuccess(Response response) {
- throw new UnsupportedOperationException("onSuccess cannot be called when using a mocked http client");
- }
-
- @Override
- public void onFailure(Exception exception) {
- try {
- assertThat(exception, instanceOf(UnsupportedOperationException.class));
- assertEquals("http method not supported: unsupported", exception.getMessage());
- } finally {
- latch.countDown();
- }
- }
- });
- assertTrue("time out waiting for request to return", latch.await(1000, TimeUnit.MILLISECONDS));
- }
- }
-
- /**
- * @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testAddParameters()}.
- */
- @Deprecated
- public void testPerformOldStyleAsyncWithNullParams() throws Exception {
- final CountDownLatch latch = new CountDownLatch(1);
- try (RestClient restClient = createRestClient()) {
- restClient.performRequestAsync(randomAsciiLettersOfLength(5), randomAsciiLettersOfLength(5), null, new ResponseListener() {
- @Override
- public void onSuccess(Response response) {
- throw new UnsupportedOperationException("onSuccess cannot be called when using a mocked http client");
- }
-
- @Override
- public void onFailure(Exception exception) {
- try {
- assertThat(exception, instanceOf(NullPointerException.class));
- assertEquals("parameters cannot be null", exception.getMessage());
- } finally {
- latch.countDown();
- }
- }
- });
- assertTrue("time out waiting for request to return", latch.await(1000, TimeUnit.MILLISECONDS));
- }
- }
-
- /**
- * @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests}.
- */
- @Deprecated
- public void testPerformOldStyleAsyncWithNullHeaders() throws Exception {
- final CountDownLatch latch = new CountDownLatch(1);
- try (RestClient restClient = createRestClient()) {
- ResponseListener listener = new ResponseListener() {
- @Override
- public void onSuccess(Response response) {
- throw new UnsupportedOperationException("onSuccess cannot be called when using a mocked http client");
- }
-
- @Override
- public void onFailure(Exception exception) {
- try {
- assertThat(exception, instanceOf(NullPointerException.class));
- assertEquals("header cannot be null", exception.getMessage());
- } finally {
- latch.countDown();
- }
- }
- };
- restClient.performRequestAsync("GET", randomAsciiLettersOfLength(5), listener, (Header) null);
- assertTrue("time out waiting for request to return", latch.await(1000, TimeUnit.MILLISECONDS));
- }
- }
-
public void testPerformAsyncWithWrongEndpoint() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
try (RestClient restClient = createRestClient()) {
@@ -195,33 +112,6 @@ public class RestClientTests extends RestClientTestCase {
}
}
- /**
- * @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link #testPerformAsyncWithWrongEndpoint()}.
- */
- @Deprecated
- public void testPerformAsyncOldStyleWithWrongEndpoint() throws Exception {
- final CountDownLatch latch = new CountDownLatch(1);
- try (RestClient restClient = createRestClient()) {
- restClient.performRequestAsync("GET", "::http:///", new ResponseListener() {
- @Override
- public void onSuccess(Response response) {
- throw new UnsupportedOperationException("onSuccess cannot be called when using a mocked http client");
- }
-
- @Override
- public void onFailure(Exception exception) {
- try {
- assertThat(exception, instanceOf(IllegalArgumentException.class));
- assertEquals("Expected scheme name at index 0: ::http:///", exception.getMessage());
- } finally {
- latch.countDown();
- }
- }
- });
- assertTrue("time out waiting for request to return", latch.await(1000, TimeUnit.MILLISECONDS));
- }
- }
-
public void testBuildUriLeavesPathUntouched() {
final Map emptyMap = Collections.emptyMap();
{
@@ -259,34 +149,6 @@ public class RestClientTests extends RestClientTestCase {
}
}
- @Deprecated
- public void testSetHostsWrongArguments() throws IOException {
- try (RestClient restClient = createRestClient()) {
- restClient.setHosts((HttpHost[]) null);
- fail("setHosts should have failed");
- } catch (IllegalArgumentException e) {
- assertEquals("hosts must not be null nor empty", e.getMessage());
- }
- try (RestClient restClient = createRestClient()) {
- restClient.setHosts();
- fail("setHosts should have failed");
- } catch (IllegalArgumentException e) {
- assertEquals("hosts must not be null nor empty", e.getMessage());
- }
- try (RestClient restClient = createRestClient()) {
- restClient.setHosts((HttpHost) null);
- fail("setHosts should have failed");
- } catch (IllegalArgumentException e) {
- assertEquals("host cannot be null", e.getMessage());
- }
- try (RestClient restClient = createRestClient()) {
- restClient.setHosts(new HttpHost("localhost", 9200), null, new HttpHost("localhost", 9201));
- fail("setHosts should have failed");
- } catch (IllegalArgumentException e) {
- assertEquals("host cannot be null", e.getMessage());
- }
- }
-
public void testSetNodesWrongArguments() throws IOException {
try (RestClient restClient = createRestClient()) {
restClient.setNodes(null);
@@ -348,23 +210,6 @@ public class RestClientTests extends RestClientTestCase {
}
}
- /**
- * @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testConstructor()}.
- */
- @Deprecated
- public void testNullPath() throws IOException {
- try (RestClient restClient = createRestClient()) {
- for (String method : getHttpMethods()) {
- try {
- restClient.performRequest(method, null);
- fail("path set to null should fail!");
- } catch (NullPointerException e) {
- assertEquals("endpoint cannot be null", e.getMessage());
- }
- }
- }
- }
-
public void testSelectHosts() throws IOException {
Node n1 = new Node(new HttpHost("1"), null, null, "1", null, null);
Node n2 = new Node(new HttpHost("2"), null, null, "2", null, null);
diff --git a/docs/build.gradle b/docs/build.gradle
index 88bccfef4a3..c6a7a8d4837 100644
--- a/docs/build.gradle
+++ b/docs/build.gradle
@@ -19,10 +19,20 @@
apply plugin: 'elasticsearch.docs-test'
-/* List of files that have snippets that require a gold or platinum licence
-and therefore cannot be tested yet... */
+/* List of files that have snippets that will not work until platinum tests can occur ... */
buildRestTests.expectedUnconvertedCandidates = [
'reference/ml/transforms.asciidoc',
+ 'reference/ml/apis/delete-calendar-event.asciidoc',
+ 'reference/ml/apis/get-bucket.asciidoc',
+ 'reference/ml/apis/get-category.asciidoc',
+ 'reference/ml/apis/get-influencer.asciidoc',
+ 'reference/ml/apis/get-job-stats.asciidoc',
+ 'reference/ml/apis/get-overall-buckets.asciidoc',
+ 'reference/ml/apis/get-record.asciidoc',
+ 'reference/ml/apis/get-snapshot.asciidoc',
+ 'reference/ml/apis/post-data.asciidoc',
+ 'reference/ml/apis/revert-snapshot.asciidoc',
+ 'reference/ml/apis/update-snapshot.asciidoc',
]
integTestCluster {
@@ -867,3 +877,224 @@ buildRestTests.setups['sensor_prefab_data'] = '''
{"node.terms.value":"c","temperature.sum.value":202.0,"temperature.max.value":202.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":202.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":4.0,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516294800000,"voltage.avg._count":1.0,"_rollup.id":"sensor"}
'''
+buildRestTests.setups['sample_job'] = '''
+ - do:
+ xpack.ml.put_job:
+ job_id: "sample_job"
+ body: >
+ {
+ "description" : "Very basic job",
+ "analysis_config" : {
+ "bucket_span":"10m",
+ "detectors" :[
+ {
+ "function": "count"
+ }
+ ]},
+ "data_description" : {
+ "time_field":"timestamp",
+ "time_format": "epoch_ms"
+ }
+ }
+'''
+buildRestTests.setups['farequote_index'] = '''
+ - do:
+ indices.create:
+ index: farequote
+ body:
+ settings:
+ number_of_shards: 1
+ number_of_replicas: 0
+ mappings:
+ metric:
+ properties:
+ time:
+ type: date
+ responsetime:
+ type: float
+ airline:
+ type: keyword
+ doc_count:
+ type: integer
+'''
+buildRestTests.setups['farequote_data'] = buildRestTests.setups['farequote_index'] + '''
+ - do:
+ bulk:
+ index: farequote
+ type: metric
+ refresh: true
+ body: |
+ {"index": {"_id":"1"}}
+ {"airline":"JZA","responsetime":990.4628,"time":"2016-02-07T00:00:00+0000", "doc_count": 5}
+ {"index": {"_id":"2"}}
+ {"airline":"JBU","responsetime":877.5927,"time":"2016-02-07T00:00:00+0000", "doc_count": 23}
+ {"index": {"_id":"3"}}
+ {"airline":"KLM","responsetime":1355.4812,"time":"2016-02-07T00:00:00+0000", "doc_count": 42}
+'''
+buildRestTests.setups['farequote_job'] = buildRestTests.setups['farequote_data'] + '''
+ - do:
+ xpack.ml.put_job:
+ job_id: "farequote"
+ body: >
+ {
+ "analysis_config": {
+ "bucket_span": "60m",
+ "detectors": [{
+ "function": "mean",
+ "field_name": "responsetime",
+ "by_field_name": "airline"
+ }],
+ "summary_count_field_name": "doc_count"
+ },
+ "data_description": {
+ "time_field": "time"
+ }
+ }
+'''
+buildRestTests.setups['farequote_datafeed'] = buildRestTests.setups['farequote_job'] + '''
+ - do:
+ xpack.ml.put_datafeed:
+ datafeed_id: "datafeed-farequote"
+ body: >
+ {
+ "job_id":"farequote",
+ "indexes":"farequote"
+ }
+'''
+buildRestTests.setups['server_metrics_index'] = '''
+ - do:
+ indices.create:
+ index: server-metrics
+ body:
+ settings:
+ number_of_shards: 1
+ number_of_replicas: 0
+ mappings:
+ metric:
+ properties:
+ timestamp:
+ type: date
+ total:
+ type: long
+'''
+buildRestTests.setups['server_metrics_data'] = buildRestTests.setups['server_metrics_index'] + '''
+ - do:
+ bulk:
+ index: server-metrics
+ type: metric
+ refresh: true
+ body: |
+ {"index": {"_id":"1177"}}
+ {"timestamp":"2017-03-23T13:00:00","total":40476}
+ {"index": {"_id":"1178"}}
+ {"timestamp":"2017-03-23T13:00:00","total":15287}
+ {"index": {"_id":"1179"}}
+ {"timestamp":"2017-03-23T13:00:00","total":-776}
+ {"index": {"_id":"1180"}}
+ {"timestamp":"2017-03-23T13:00:00","total":11366}
+ {"index": {"_id":"1181"}}
+ {"timestamp":"2017-03-23T13:00:00","total":3606}
+ {"index": {"_id":"1182"}}
+ {"timestamp":"2017-03-23T13:00:00","total":19006}
+ {"index": {"_id":"1183"}}
+ {"timestamp":"2017-03-23T13:00:00","total":38613}
+ {"index": {"_id":"1184"}}
+ {"timestamp":"2017-03-23T13:00:00","total":19516}
+ {"index": {"_id":"1185"}}
+ {"timestamp":"2017-03-23T13:00:00","total":-258}
+ {"index": {"_id":"1186"}}
+ {"timestamp":"2017-03-23T13:00:00","total":9551}
+ {"index": {"_id":"1187"}}
+ {"timestamp":"2017-03-23T13:00:00","total":11217}
+ {"index": {"_id":"1188"}}
+ {"timestamp":"2017-03-23T13:00:00","total":22557}
+ {"index": {"_id":"1189"}}
+ {"timestamp":"2017-03-23T13:00:00","total":40508}
+ {"index": {"_id":"1190"}}
+ {"timestamp":"2017-03-23T13:00:00","total":11887}
+ {"index": {"_id":"1191"}}
+ {"timestamp":"2017-03-23T13:00:00","total":31659}
+'''
+buildRestTests.setups['server_metrics_job'] = buildRestTests.setups['server_metrics_data'] + '''
+ - do:
+ xpack.ml.put_job:
+ job_id: "total-requests"
+ body: >
+ {
+ "description" : "Total sum of requests",
+ "analysis_config" : {
+ "bucket_span":"10m",
+ "detectors" :[
+ {
+ "detector_description": "Sum of total",
+ "function": "sum",
+ "field_name": "total"
+ }
+ ]},
+ "data_description" : {
+ "time_field":"timestamp",
+ "time_format": "epoch_ms"
+ }
+ }
+'''
+buildRestTests.setups['server_metrics_datafeed'] = buildRestTests.setups['server_metrics_job'] + '''
+ - do:
+ xpack.ml.put_datafeed:
+ datafeed_id: "datafeed-total-requests"
+ body: >
+ {
+ "job_id":"total-requests",
+ "indexes":"server-metrics"
+ }
+'''
+buildRestTests.setups['server_metrics_openjob'] = buildRestTests.setups['server_metrics_datafeed'] + '''
+ - do:
+ xpack.ml.open_job:
+ job_id: "total-requests"
+'''
+buildRestTests.setups['server_metrics_startdf'] = buildRestTests.setups['server_metrics_openjob'] + '''
+ - do:
+ xpack.ml.start_datafeed:
+ datafeed_id: "datafeed-total-requests"
+'''
+buildRestTests.setups['calendar_outages'] = '''
+ - do:
+ xpack.ml.put_calendar:
+ calendar_id: "planned-outages"
+'''
+buildRestTests.setups['calendar_outages_addevent'] = buildRestTests.setups['calendar_outages'] + '''
+ - do:
+ xpack.ml.post_calendar_events:
+ calendar_id: "planned-outages"
+ body: >
+ { "description": "event 1", "start_time": "2017-12-01T00:00:00Z", "end_time": "2017-12-02T00:00:00Z", "calendar_id": "planned-outages" }
+
+
+'''
+buildRestTests.setups['calendar_outages_openjob'] = buildRestTests.setups['server_metrics_openjob'] + '''
+ - do:
+ xpack.ml.put_calendar:
+ calendar_id: "planned-outages"
+'''
+buildRestTests.setups['calendar_outages_addjob'] = buildRestTests.setups['server_metrics_openjob'] + '''
+ - do:
+ xpack.ml.put_calendar:
+ calendar_id: "planned-outages"
+ body: >
+ {
+ "job_ids": ["total-requests"]
+ }
+'''
+buildRestTests.setups['calendar_outages_addevent'] = buildRestTests.setups['calendar_outages_addjob'] + '''
+ - do:
+ xpack.ml.post_calendar_events:
+ calendar_id: "planned-outages"
+ body: >
+ { "events" : [
+ { "description": "event 1", "start_time": "1513641600000", "end_time": "1513728000000"},
+ { "description": "event 2", "start_time": "1513814400000", "end_time": "1513900800000"},
+ { "description": "event 3", "start_time": "1514160000000", "end_time": "1514246400000"}
+ ]}
+'''
+
+
diff --git a/docs/java-rest/high-level/ml/flush-job.asciidoc b/docs/java-rest/high-level/ml/flush-job.asciidoc
new file mode 100644
index 00000000000..1f815bba0d5
--- /dev/null
+++ b/docs/java-rest/high-level/ml/flush-job.asciidoc
@@ -0,0 +1,83 @@
+[[java-rest-high-x-pack-ml-flush-job]]
+=== Flush Job API
+
+The Flush Job API provides the ability to flush a {ml} job's
+datafeed in the cluster.
+It accepts a `FlushJobRequest` object and responds
+with a `FlushJobResponse` object.
+
+[[java-rest-high-x-pack-ml-flush-job-request]]
+==== Flush Job Request
+
+A `FlushJobRequest` object gets created with an existing non-null `jobId`.
+All other fields are optional for the request.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-request]
+--------------------------------------------------
+<1> Constructing a new request referencing an existing `jobId`
+
+==== Optional Arguments
+
+The following arguments are optional.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-request-options]
+--------------------------------------------------
+<1> Set request to calculate the interim results
+<2> Set the advanced time to flush to the particular time value
+<3> Set the start time for the range of buckets on which
+to calculate the interim results (requires `calc_interim` to be `true`)
+<4> Set the end time for the range of buckets on which
+to calculate interim results (requires `calc_interim` to be `true`)
+<5> Set the skip time to skip a particular time value
+
+[[java-rest-high-x-pack-ml-flush-job-execution]]
+==== Execution
+
+The request can be executed through the `MachineLearningClient` contained
+in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-execute]
+--------------------------------------------------
+
+[[java-rest-high-x-pack-ml-flush-job-execution-async]]
+==== Asynchronous Execution
+
+The request can also be executed asynchronously:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-execute-async]
+--------------------------------------------------
+<1> The `FlushJobRequest` to execute and the `ActionListener` to use when
+the execution completes
+
+The method does not block and returns immediately. The passed `ActionListener` is used
+to notify the caller of completion. A typical `ActionListener` for `FlushJobResponse` may
+look like
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-listener]
+--------------------------------------------------
+<1> `onResponse` is called back when the action is completed successfully
+<2> `onFailure` is called back when some unexpected error occurs
+
+[[java-rest-high-x-pack-ml-flush-job-response]]
+==== Flush Job Response
+
+A `FlushJobResponse` contains an acknowledgement and an optional end date for the
+last finalized bucket
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-response]
+--------------------------------------------------
+<1> `isFlushed()` indicates if the job was successfully flushed or not.
+<2> `getLastFinalizedBucketEnd()` provides the timestamp
+(in milliseconds-since-the-epoch) of the end of the last bucket that was processed.
\ No newline at end of file
diff --git a/docs/java-rest/high-level/ml/get-job-stats.asciidoc b/docs/java-rest/high-level/ml/get-job-stats.asciidoc
new file mode 100644
index 00000000000..90f7794ae76
--- /dev/null
+++ b/docs/java-rest/high-level/ml/get-job-stats.asciidoc
@@ -0,0 +1,67 @@
+[[java-rest-high-x-pack-ml-get-job-stats]]
+=== Get Job Stats API
+
+The Get Job Stats API provides the ability to get any number of
+ {ml} job's statistics in the cluster.
+It accepts a `GetJobStatsRequest` object and responds
+with a `GetJobStatsResponse` object.
+
+[[java-rest-high-x-pack-ml-get-job-stats-request]]
+==== Get Job Stats Request
+
+A `GetJobsStatsRequest` object can have any number of `jobId`
+entries. However, they all must be non-null. An empty list is the same as
+requesting statistics for all jobs.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-request]
+--------------------------------------------------
+<1> Constructing a new request referencing existing `jobIds`, can contain wildcards
+<2> Whether to ignore if a wildcard expression matches no jobs.
+ (This includes `_all` string or when no jobs have been specified)
+
+[[java-rest-high-x-pack-ml-get-job-stats-execution]]
+==== Execution
+
+The request can be executed through the `MachineLearningClient` contained
+in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-execute]
+--------------------------------------------------
+
+[[java-rest-high-x-pack-ml-get-job-stats-execution-async]]
+==== Asynchronous Execution
+
+The request can also be executed asynchronously:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-execute-async]
+--------------------------------------------------
+<1> The `GetJobsStatsRequest` to execute and the `ActionListener` to use when
+the execution completes
+
+The method does not block and returns immediately. The passed `ActionListener` is used
+to notify the caller of completion. A typical `ActionListener` for `GetJobsStatsResponse` may
+look like
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-listener]
+--------------------------------------------------
+<1> `onResponse` is called back when the action is completed successfully
+<2> `onFailure` is called back when some unexpected error occurs
+
+[[java-rest-high-x-pack-ml-get-job-stats-response]]
+==== Get Job Stats Response
+The returned `GetJobStatsResponse` contains the requested job statistics:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-response]
+--------------------------------------------------
+<1> `getCount()` indicates the number of jobs statistics found
+<2> `getJobStats()` is the collection of {ml} `JobStats` objects found
\ No newline at end of file
diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc
index 2b72ca74f6a..68320fbfe9f 100644
--- a/docs/java-rest/high-level/supported-apis.asciidoc
+++ b/docs/java-rest/high-level/supported-apis.asciidoc
@@ -211,6 +211,8 @@ The Java High Level REST Client supports the following Machine Learning APIs:
* <>
* <>
* <>
+* <>
+* <>
* <>
* <>
@@ -219,6 +221,8 @@ include::ml/get-job.asciidoc[]
include::ml/delete-job.asciidoc[]
include::ml/open-job.asciidoc[]
include::ml/close-job.asciidoc[]
+include::ml/flush-job.asciidoc[]
+include::ml/get-job-stats.asciidoc[]
include::ml/get-buckets.asciidoc[]
include::ml/get-records.asciidoc[]
diff --git a/docs/reference/migration/migrate_7_0.asciidoc b/docs/reference/migration/migrate_7_0.asciidoc
index 42fd6b7afbe..924a6984dc0 100644
--- a/docs/reference/migration/migrate_7_0.asciidoc
+++ b/docs/reference/migration/migrate_7_0.asciidoc
@@ -39,6 +39,7 @@ Elasticsearch 6.x in order to be readable by Elasticsearch 7.x.
* <>
* <>
* <>
+* <>
include::migrate_7_0/aggregations.asciidoc[]
include::migrate_7_0/analysis.asciidoc[]
@@ -53,4 +54,5 @@ include::migrate_7_0/java.asciidoc[]
include::migrate_7_0/settings.asciidoc[]
include::migrate_7_0/scripting.asciidoc[]
include::migrate_7_0/snapshotstats.asciidoc[]
-include::migrate_7_0/restclient.asciidoc[]
\ No newline at end of file
+include::migrate_7_0/restclient.asciidoc[]
+include::migrate_7_0/low_level_restclient.asciidoc[]
diff --git a/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc b/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc
new file mode 100644
index 00000000000..77f5266763f
--- /dev/null
+++ b/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc
@@ -0,0 +1,14 @@
+[[breaking_70_low_level_restclient_changes]]
+=== Low-level REST client changes
+
+==== Deprecated flavors of performRequest have been removed
+
+We deprecated the flavors of `performRequest` and `performRequestAsync` that
+do not take `Request` objects in 6.4.0 in favor of the flavors that take
+`Request` objects because those methods can be extended without breaking
+backwards compatibility.
+
+==== Removed setHosts
+
+We deprecated `setHosts` in 6.4.0 in favor of `setNodes` because it supports
+host metadata used by the `NodeSelector`.
diff --git a/x-pack/docs/en/rest-api/ml/calendarresource.asciidoc b/docs/reference/ml/apis/calendarresource.asciidoc
similarity index 94%
rename from x-pack/docs/en/rest-api/ml/calendarresource.asciidoc
rename to docs/reference/ml/apis/calendarresource.asciidoc
index 8edb43ed7a3..4279102cd35 100644
--- a/x-pack/docs/en/rest-api/ml/calendarresource.asciidoc
+++ b/docs/reference/ml/apis/calendarresource.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-calendar-resource]]
=== Calendar Resources
diff --git a/x-pack/docs/en/rest-api/ml/close-job.asciidoc b/docs/reference/ml/apis/close-job.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/close-job.asciidoc
rename to docs/reference/ml/apis/close-job.asciidoc
index 8e7e8eb0ce8..6dec6402c87 100644
--- a/x-pack/docs/en/rest-api/ml/close-job.asciidoc
+++ b/docs/reference/ml/apis/close-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-close-job]]
=== Close Jobs API
++++
@@ -80,7 +81,7 @@ The following example closes the `total-requests` job:
POST _xpack/ml/anomaly_detectors/total-requests/_close
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_openjob]
+// TEST[skip:setup:server_metrics_openjob]
When the job is closed, you receive the following results:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/datafeedresource.asciidoc b/docs/reference/ml/apis/datafeedresource.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/datafeedresource.asciidoc
rename to docs/reference/ml/apis/datafeedresource.asciidoc
index 0ffeb6bc89d..6fe0b35d951 100644
--- a/x-pack/docs/en/rest-api/ml/datafeedresource.asciidoc
+++ b/docs/reference/ml/apis/datafeedresource.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-datafeed-resource]]
=== {dfeed-cap} Resources
diff --git a/x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc b/docs/reference/ml/apis/delete-calendar-event.asciidoc
similarity index 96%
rename from x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc
rename to docs/reference/ml/apis/delete-calendar-event.asciidoc
index ef8dad39dba..8961726f573 100644
--- a/x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc
+++ b/docs/reference/ml/apis/delete-calendar-event.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-delete-calendar-event]]
=== Delete Events from Calendar API
++++
@@ -44,7 +45,7 @@ calendar:
DELETE _xpack/ml/calendars/planned-outages/events/LS8LJGEBMTCMA-qz49st
--------------------------------------------------
// CONSOLE
-// TEST[catch:missing]
+// TEST[skip:catch:missing]
When the event is removed, you receive the following results:
[source,js]
@@ -53,4 +54,3 @@ When the event is removed, you receive the following results:
"acknowledged": true
}
----
-// NOTCONSOLE
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/delete-calendar-job.asciidoc b/docs/reference/ml/apis/delete-calendar-job.asciidoc
similarity index 93%
rename from x-pack/docs/en/rest-api/ml/delete-calendar-job.asciidoc
rename to docs/reference/ml/apis/delete-calendar-job.asciidoc
index 94388c0c4b6..4362a82b5cb 100644
--- a/x-pack/docs/en/rest-api/ml/delete-calendar-job.asciidoc
+++ b/docs/reference/ml/apis/delete-calendar-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-delete-calendar-job]]
=== Delete Jobs from Calendar API
++++
@@ -38,7 +39,7 @@ calendar and `total-requests` job:
DELETE _xpack/ml/calendars/planned-outages/jobs/total-requests
--------------------------------------------------
// CONSOLE
-// TEST[setup:calendar_outages_addjob]
+// TEST[skip:setup:calendar_outages_addjob]
When the job is removed from the calendar, you receive the following
results:
@@ -50,4 +51,4 @@ results:
"job_ids": []
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/delete-calendar.asciidoc b/docs/reference/ml/apis/delete-calendar.asciidoc
similarity index 92%
rename from x-pack/docs/en/rest-api/ml/delete-calendar.asciidoc
rename to docs/reference/ml/apis/delete-calendar.asciidoc
index f7673b54574..9f9f3457f24 100644
--- a/x-pack/docs/en/rest-api/ml/delete-calendar.asciidoc
+++ b/docs/reference/ml/apis/delete-calendar.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-delete-calendar]]
=== Delete Calendar API
++++
@@ -40,7 +41,7 @@ The following example deletes the `planned-outages` calendar:
DELETE _xpack/ml/calendars/planned-outages
--------------------------------------------------
// CONSOLE
-// TEST[setup:calendar_outages]
+// TEST[skip:setup:calendar_outages]
When the calendar is deleted, you receive the following results:
[source,js]
@@ -49,4 +50,4 @@ When the calendar is deleted, you receive the following results:
"acknowledged": true
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/delete-datafeed.asciidoc b/docs/reference/ml/apis/delete-datafeed.asciidoc
similarity index 94%
rename from x-pack/docs/en/rest-api/ml/delete-datafeed.asciidoc
rename to docs/reference/ml/apis/delete-datafeed.asciidoc
index db4fd5c177a..996d2c7dd2e 100644
--- a/x-pack/docs/en/rest-api/ml/delete-datafeed.asciidoc
+++ b/docs/reference/ml/apis/delete-datafeed.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-delete-datafeed]]
=== Delete {dfeeds-cap} API
++++
@@ -47,7 +48,7 @@ The following example deletes the `datafeed-total-requests` {dfeed}:
DELETE _xpack/ml/datafeeds/datafeed-total-requests
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_datafeed]
+// TEST[skip:setup:server_metrics_datafeed]
When the {dfeed} is deleted, you receive the following results:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/delete-filter.asciidoc b/docs/reference/ml/apis/delete-filter.asciidoc
similarity index 92%
rename from x-pack/docs/en/rest-api/ml/delete-filter.asciidoc
rename to docs/reference/ml/apis/delete-filter.asciidoc
index b58d2980b88..21e35b66076 100644
--- a/x-pack/docs/en/rest-api/ml/delete-filter.asciidoc
+++ b/docs/reference/ml/apis/delete-filter.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-delete-filter]]
=== Delete Filter API
++++
@@ -41,7 +42,7 @@ The following example deletes the `safe_domains` filter:
DELETE _xpack/ml/filters/safe_domains
--------------------------------------------------
// CONSOLE
-// TEST[setup:ml_filter_safe_domains]
+// TEST[skip:setup:ml_filter_safe_domains]
When the filter is deleted, you receive the following results:
[source,js]
@@ -50,4 +51,4 @@ When the filter is deleted, you receive the following results:
"acknowledged": true
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/delete-job.asciidoc b/docs/reference/ml/apis/delete-job.asciidoc
similarity index 95%
rename from x-pack/docs/en/rest-api/ml/delete-job.asciidoc
rename to docs/reference/ml/apis/delete-job.asciidoc
index c01b08545b6..d5ef120ad04 100644
--- a/x-pack/docs/en/rest-api/ml/delete-job.asciidoc
+++ b/docs/reference/ml/apis/delete-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-delete-job]]
=== Delete Jobs API
++++
@@ -56,7 +57,7 @@ The following example deletes the `total-requests` job:
DELETE _xpack/ml/anomaly_detectors/total-requests
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_job]
+// TEST[skip:setup:server_metrics_job]
When the job is deleted, you receive the following results:
[source,js]
@@ -65,4 +66,4 @@ When the job is deleted, you receive the following results:
"acknowledged": true
}
----
-// TESTRESPONSE
+// TESTRESPONSE
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/delete-snapshot.asciidoc b/docs/reference/ml/apis/delete-snapshot.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/delete-snapshot.asciidoc
rename to docs/reference/ml/apis/delete-snapshot.asciidoc
index 2ab0116fe74..96a35900545 100644
--- a/x-pack/docs/en/rest-api/ml/delete-snapshot.asciidoc
+++ b/docs/reference/ml/apis/delete-snapshot.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-delete-snapshot]]
=== Delete Model Snapshots API
++++
@@ -32,7 +33,6 @@ the `model_snapshot_id` in the results from the get jobs API.
You must have `manage_ml`, or `manage` cluster privileges to use this API.
For more information, see {xpack-ref}/security-privileges.html[Security Privileges].
-//<>.
==== Examples
@@ -53,3 +53,4 @@ When the snapshot is deleted, you receive the following results:
"acknowledged": true
}
----
+// TESTRESPONSE
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/eventresource.asciidoc b/docs/reference/ml/apis/eventresource.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/eventresource.asciidoc
rename to docs/reference/ml/apis/eventresource.asciidoc
index c9ab7896421..a1e96f5c25a 100644
--- a/x-pack/docs/en/rest-api/ml/eventresource.asciidoc
+++ b/docs/reference/ml/apis/eventresource.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-event-resource]]
=== Scheduled Event Resources
diff --git a/x-pack/docs/en/rest-api/ml/filterresource.asciidoc b/docs/reference/ml/apis/filterresource.asciidoc
similarity index 95%
rename from x-pack/docs/en/rest-api/ml/filterresource.asciidoc
rename to docs/reference/ml/apis/filterresource.asciidoc
index e942447c1ee..e67c92dc8d0 100644
--- a/x-pack/docs/en/rest-api/ml/filterresource.asciidoc
+++ b/docs/reference/ml/apis/filterresource.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-filter-resource]]
=== Filter Resources
diff --git a/x-pack/docs/en/rest-api/ml/flush-job.asciidoc b/docs/reference/ml/apis/flush-job.asciidoc
similarity index 92%
rename from x-pack/docs/en/rest-api/ml/flush-job.asciidoc
rename to docs/reference/ml/apis/flush-job.asciidoc
index 934a2d81b17..f19d2aa648f 100644
--- a/x-pack/docs/en/rest-api/ml/flush-job.asciidoc
+++ b/docs/reference/ml/apis/flush-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-flush-job]]
=== Flush Jobs API
++++
@@ -74,7 +75,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_flush
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_openjob]
+// TEST[skip:setup:server_metrics_openjob]
When the operation succeeds, you receive the following results:
[source,js]
@@ -84,7 +85,7 @@ When the operation succeeds, you receive the following results:
"last_finalized_bucket_end": 1455234900000
}
----
-// TESTRESPONSE[s/"last_finalized_bucket_end": 1455234900000/"last_finalized_bucket_end": $body.last_finalized_bucket_end/]
+//TESTRESPONSE[s/"last_finalized_bucket_end": 1455234900000/"last_finalized_bucket_end": $body.last_finalized_bucket_end/]
The `last_finalized_bucket_end` provides the timestamp (in
milliseconds-since-the-epoch) of the end of the last bucket that was processed.
@@ -101,7 +102,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_flush
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_openjob]
+// TEST[skip:setup:server_metrics_openjob]
When the operation succeeds, you receive the following results:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/forecast.asciidoc b/docs/reference/ml/apis/forecast.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/forecast.asciidoc
rename to docs/reference/ml/apis/forecast.asciidoc
index 99647ecae1b..197876f3f04 100644
--- a/x-pack/docs/en/rest-api/ml/forecast.asciidoc
+++ b/docs/reference/ml/apis/forecast.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-forecast]]
=== Forecast Jobs API
++++
diff --git a/x-pack/docs/en/rest-api/ml/get-bucket.asciidoc b/docs/reference/ml/apis/get-bucket.asciidoc
similarity index 98%
rename from x-pack/docs/en/rest-api/ml/get-bucket.asciidoc
rename to docs/reference/ml/apis/get-bucket.asciidoc
index 95b05ff7f5d..3a276c13e89 100644
--- a/x-pack/docs/en/rest-api/ml/get-bucket.asciidoc
+++ b/docs/reference/ml/apis/get-bucket.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-bucket]]
=== Get Buckets API
++++
@@ -81,7 +82,6 @@ that stores the results. The `machine_learning_admin` and `machine_learning_user
roles provide these privileges. For more information, see
{xpack-ref}/security-privileges.html[Security Privileges] and
{xpack-ref}/built-in-roles.html[Built-in Roles].
-//<> and <>.
==== Examples
diff --git a/x-pack/docs/en/rest-api/ml/get-calendar-event.asciidoc b/docs/reference/ml/apis/get-calendar-event.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/get-calendar-event.asciidoc
rename to docs/reference/ml/apis/get-calendar-event.asciidoc
index e89173c3382..43dd74e47c9 100644
--- a/x-pack/docs/en/rest-api/ml/get-calendar-event.asciidoc
+++ b/docs/reference/ml/apis/get-calendar-event.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-calendar-event]]
=== Get Scheduled Events API
++++
@@ -66,7 +67,7 @@ The following example gets information about the scheduled events in the
GET _xpack/ml/calendars/planned-outages/events
--------------------------------------------------
// CONSOLE
-// TEST[setup:calendar_outages_addevent]
+// TEST[skip:setup:calendar_outages_addevent]
The API returns the following results:
diff --git a/x-pack/docs/en/rest-api/ml/get-calendar.asciidoc b/docs/reference/ml/apis/get-calendar.asciidoc
similarity index 94%
rename from x-pack/docs/en/rest-api/ml/get-calendar.asciidoc
rename to docs/reference/ml/apis/get-calendar.asciidoc
index ae95fd99688..f86875f326c 100644
--- a/x-pack/docs/en/rest-api/ml/get-calendar.asciidoc
+++ b/docs/reference/ml/apis/get-calendar.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-calendar]]
=== Get Calendars API
++++
@@ -62,7 +63,7 @@ calendar:
GET _xpack/ml/calendars/planned-outages
--------------------------------------------------
// CONSOLE
-// TEST[setup:calendar_outages_addjob]
+// TEST[skip:setup:calendar_outages_addjob]
The API returns the following results:
[source,js]
@@ -79,4 +80,4 @@ The API returns the following results:
]
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/get-category.asciidoc b/docs/reference/ml/apis/get-category.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/get-category.asciidoc
rename to docs/reference/ml/apis/get-category.asciidoc
index 13f274133c0..e5d6fe16802 100644
--- a/x-pack/docs/en/rest-api/ml/get-category.asciidoc
+++ b/docs/reference/ml/apis/get-category.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-category]]
=== Get Categories API
++++
@@ -18,7 +19,6 @@ Retrieves job results for one or more categories.
For more information about categories, see
{xpack-ref}/ml-configuring-categories.html[Categorizing Log Messages].
-//<>.
==== Path Parameters
@@ -56,7 +56,6 @@ that stores the results. The `machine_learning_admin` and `machine_learning_user
roles provide these privileges. For more information, see
{xpack-ref}/security-privileges.html[Security Privileges] and
{xpack-ref}/built-in-roles.html[Built-in Roles].
-//<> and <>.
==== Examples
diff --git a/x-pack/docs/en/rest-api/ml/get-datafeed-stats.asciidoc b/docs/reference/ml/apis/get-datafeed-stats.asciidoc
similarity index 96%
rename from x-pack/docs/en/rest-api/ml/get-datafeed-stats.asciidoc
rename to docs/reference/ml/apis/get-datafeed-stats.asciidoc
index 2869e8222f8..9ca67cc17fb 100644
--- a/x-pack/docs/en/rest-api/ml/get-datafeed-stats.asciidoc
+++ b/docs/reference/ml/apis/get-datafeed-stats.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-datafeed-stats]]
=== Get {dfeed-cap} Statistics API
++++
@@ -66,7 +67,7 @@ The following example gets usage information for the
GET _xpack/ml/datafeeds/datafeed-total-requests/_stats
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_startdf]
+// TEST[skip:setup:server_metrics_startdf]
The API returns the following results:
[source,js]
@@ -97,4 +98,4 @@ The API returns the following results:
// TESTRESPONSE[s/"node-0"/$body.$_path/]
// TESTRESPONSE[s/"hoXMLZB0RWKfR9UPPUCxXX"/$body.$_path/]
// TESTRESPONSE[s/"127.0.0.1:9300"/$body.$_path/]
-// TESTRESPONSE[s/"17179869184"/$body.datafeeds.0.node.attributes.ml\\.machine_memory/]
+// TESTRESPONSE[s/"17179869184"/$body.datafeeds.0.node.attributes.ml\\.machine_memory/]
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/get-datafeed.asciidoc b/docs/reference/ml/apis/get-datafeed.asciidoc
similarity index 96%
rename from x-pack/docs/en/rest-api/ml/get-datafeed.asciidoc
rename to docs/reference/ml/apis/get-datafeed.asciidoc
index 0fa51773fd1..db5f4249669 100644
--- a/x-pack/docs/en/rest-api/ml/get-datafeed.asciidoc
+++ b/docs/reference/ml/apis/get-datafeed.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-datafeed]]
=== Get {dfeeds-cap} API
++++
@@ -60,7 +61,7 @@ The following example gets configuration information for the
GET _xpack/ml/datafeeds/datafeed-total-requests
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_datafeed]
+// TEST[skip:setup:server_metrics_datafeed]
The API returns the following results:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/get-filter.asciidoc b/docs/reference/ml/apis/get-filter.asciidoc
similarity index 94%
rename from x-pack/docs/en/rest-api/ml/get-filter.asciidoc
rename to docs/reference/ml/apis/get-filter.asciidoc
index b4699e9d622..2dbb5d16cc5 100644
--- a/x-pack/docs/en/rest-api/ml/get-filter.asciidoc
+++ b/docs/reference/ml/apis/get-filter.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-filter]]
=== Get Filters API
++++
@@ -62,7 +63,7 @@ filter:
GET _xpack/ml/filters/safe_domains
--------------------------------------------------
// CONSOLE
-// TEST[setup:ml_filter_safe_domains]
+// TEST[skip:setup:ml_filter_safe_domains]
The API returns the following results:
[source,js]
@@ -81,4 +82,4 @@ The API returns the following results:
]
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/get-influencer.asciidoc b/docs/reference/ml/apis/get-influencer.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/get-influencer.asciidoc
rename to docs/reference/ml/apis/get-influencer.asciidoc
index bffd2b8e096..182cca7aa99 100644
--- a/x-pack/docs/en/rest-api/ml/get-influencer.asciidoc
+++ b/docs/reference/ml/apis/get-influencer.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-influencer]]
=== Get Influencers API
++++
diff --git a/x-pack/docs/en/rest-api/ml/get-job-stats.asciidoc b/docs/reference/ml/apis/get-job-stats.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/get-job-stats.asciidoc
rename to docs/reference/ml/apis/get-job-stats.asciidoc
index bd59ee8b258..509d9448a69 100644
--- a/x-pack/docs/en/rest-api/ml/get-job-stats.asciidoc
+++ b/docs/reference/ml/apis/get-job-stats.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-job-stats]]
=== Get Job Statistics API
++++
diff --git a/x-pack/docs/en/rest-api/ml/get-job.asciidoc b/docs/reference/ml/apis/get-job.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/get-job.asciidoc
rename to docs/reference/ml/apis/get-job.asciidoc
index 2e95d8e01bb..c669ac6034e 100644
--- a/x-pack/docs/en/rest-api/ml/get-job.asciidoc
+++ b/docs/reference/ml/apis/get-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-job]]
=== Get Jobs API
++++
@@ -59,7 +60,7 @@ The following example gets configuration information for the `total-requests` jo
GET _xpack/ml/anomaly_detectors/total-requests
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_job]
+// TEST[skip:setup:server_metrics_job]
The API returns the following results:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/get-overall-buckets.asciidoc b/docs/reference/ml/apis/get-overall-buckets.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/get-overall-buckets.asciidoc
rename to docs/reference/ml/apis/get-overall-buckets.asciidoc
index f2581f4904e..f4818f3bbbe 100644
--- a/x-pack/docs/en/rest-api/ml/get-overall-buckets.asciidoc
+++ b/docs/reference/ml/apis/get-overall-buckets.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-overall-buckets]]
=== Get Overall Buckets API
++++
@@ -93,7 +94,6 @@ that stores the results. The `machine_learning_admin` and `machine_learning_user
roles provide these privileges. For more information, see
{xpack-ref}/security-privileges.html[Security Privileges] and
{xpack-ref}/built-in-roles.html[Built-in Roles].
-//<> and <>.
==== Examples
diff --git a/x-pack/docs/en/rest-api/ml/get-record.asciidoc b/docs/reference/ml/apis/get-record.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/get-record.asciidoc
rename to docs/reference/ml/apis/get-record.asciidoc
index 1870b441597..199cce15484 100644
--- a/x-pack/docs/en/rest-api/ml/get-record.asciidoc
+++ b/docs/reference/ml/apis/get-record.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-record]]
=== Get Records API
++++
diff --git a/x-pack/docs/en/rest-api/ml/get-snapshot.asciidoc b/docs/reference/ml/apis/get-snapshot.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/get-snapshot.asciidoc
rename to docs/reference/ml/apis/get-snapshot.asciidoc
index 24e82af1f19..e194d944b63 100644
--- a/x-pack/docs/en/rest-api/ml/get-snapshot.asciidoc
+++ b/docs/reference/ml/apis/get-snapshot.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-snapshot]]
=== Get Model Snapshots API
++++
diff --git a/x-pack/docs/en/rest-api/ml/jobcounts.asciidoc b/docs/reference/ml/apis/jobcounts.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/jobcounts.asciidoc
rename to docs/reference/ml/apis/jobcounts.asciidoc
index d343cc23ae0..d0169e228d5 100644
--- a/x-pack/docs/en/rest-api/ml/jobcounts.asciidoc
+++ b/docs/reference/ml/apis/jobcounts.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-jobstats]]
=== Job Statistics
diff --git a/x-pack/docs/en/rest-api/ml/jobresource.asciidoc b/docs/reference/ml/apis/jobresource.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/jobresource.asciidoc
rename to docs/reference/ml/apis/jobresource.asciidoc
index 5b109b1c21d..e0c314724e7 100644
--- a/x-pack/docs/en/rest-api/ml/jobresource.asciidoc
+++ b/docs/reference/ml/apis/jobresource.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-job-resource]]
=== Job Resources
diff --git a/x-pack/docs/en/rest-api/ml-api.asciidoc b/docs/reference/ml/apis/ml-api.asciidoc
similarity index 61%
rename from x-pack/docs/en/rest-api/ml-api.asciidoc
rename to docs/reference/ml/apis/ml-api.asciidoc
index b48e9f93404..b8509f22152 100644
--- a/x-pack/docs/en/rest-api/ml-api.asciidoc
+++ b/docs/reference/ml/apis/ml-api.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-apis]]
== Machine Learning APIs
@@ -70,57 +71,57 @@ machine learning APIs and in advanced job configuration options in Kibana.
* <>
//ADD
-include::ml/post-calendar-event.asciidoc[]
-include::ml/put-calendar-job.asciidoc[]
+include::post-calendar-event.asciidoc[]
+include::put-calendar-job.asciidoc[]
//CLOSE
-include::ml/close-job.asciidoc[]
+include::close-job.asciidoc[]
//CREATE
-include::ml/put-calendar.asciidoc[]
-include::ml/put-datafeed.asciidoc[]
-include::ml/put-filter.asciidoc[]
-include::ml/put-job.asciidoc[]
+include::put-calendar.asciidoc[]
+include::put-datafeed.asciidoc[]
+include::put-filter.asciidoc[]
+include::put-job.asciidoc[]
//DELETE
-include::ml/delete-calendar.asciidoc[]
-include::ml/delete-datafeed.asciidoc[]
-include::ml/delete-calendar-event.asciidoc[]
-include::ml/delete-filter.asciidoc[]
-include::ml/delete-job.asciidoc[]
-include::ml/delete-calendar-job.asciidoc[]
-include::ml/delete-snapshot.asciidoc[]
+include::delete-calendar.asciidoc[]
+include::delete-datafeed.asciidoc[]
+include::delete-calendar-event.asciidoc[]
+include::delete-filter.asciidoc[]
+include::delete-job.asciidoc[]
+include::delete-calendar-job.asciidoc[]
+include::delete-snapshot.asciidoc[]
//FLUSH
-include::ml/flush-job.asciidoc[]
+include::flush-job.asciidoc[]
//FORECAST
-include::ml/forecast.asciidoc[]
+include::forecast.asciidoc[]
//GET
-include::ml/get-calendar.asciidoc[]
-include::ml/get-bucket.asciidoc[]
-include::ml/get-overall-buckets.asciidoc[]
-include::ml/get-category.asciidoc[]
-include::ml/get-datafeed.asciidoc[]
-include::ml/get-datafeed-stats.asciidoc[]
-include::ml/get-influencer.asciidoc[]
-include::ml/get-job.asciidoc[]
-include::ml/get-job-stats.asciidoc[]
-include::ml/get-snapshot.asciidoc[]
-include::ml/get-calendar-event.asciidoc[]
-include::ml/get-filter.asciidoc[]
-include::ml/get-record.asciidoc[]
+include::get-calendar.asciidoc[]
+include::get-bucket.asciidoc[]
+include::get-overall-buckets.asciidoc[]
+include::get-category.asciidoc[]
+include::get-datafeed.asciidoc[]
+include::get-datafeed-stats.asciidoc[]
+include::get-influencer.asciidoc[]
+include::get-job.asciidoc[]
+include::get-job-stats.asciidoc[]
+include::get-snapshot.asciidoc[]
+include::get-calendar-event.asciidoc[]
+include::get-filter.asciidoc[]
+include::get-record.asciidoc[]
//OPEN
-include::ml/open-job.asciidoc[]
+include::open-job.asciidoc[]
//POST
-include::ml/post-data.asciidoc[]
+include::post-data.asciidoc[]
//PREVIEW
-include::ml/preview-datafeed.asciidoc[]
+include::preview-datafeed.asciidoc[]
//REVERT
-include::ml/revert-snapshot.asciidoc[]
+include::revert-snapshot.asciidoc[]
//START/STOP
-include::ml/start-datafeed.asciidoc[]
-include::ml/stop-datafeed.asciidoc[]
+include::start-datafeed.asciidoc[]
+include::stop-datafeed.asciidoc[]
//UPDATE
-include::ml/update-datafeed.asciidoc[]
-include::ml/update-filter.asciidoc[]
-include::ml/update-job.asciidoc[]
-include::ml/update-snapshot.asciidoc[]
+include::update-datafeed.asciidoc[]
+include::update-filter.asciidoc[]
+include::update-job.asciidoc[]
+include::update-snapshot.asciidoc[]
//VALIDATE
-//include::ml/validate-detector.asciidoc[]
-//include::ml/validate-job.asciidoc[]
+//include::validate-detector.asciidoc[]
+//include::validate-job.asciidoc[]
diff --git a/x-pack/docs/en/rest-api/ml/open-job.asciidoc b/docs/reference/ml/apis/open-job.asciidoc
similarity index 95%
rename from x-pack/docs/en/rest-api/ml/open-job.asciidoc
rename to docs/reference/ml/apis/open-job.asciidoc
index 59d5568ac77..c1e5977b734 100644
--- a/x-pack/docs/en/rest-api/ml/open-job.asciidoc
+++ b/docs/reference/ml/apis/open-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-open-job]]
=== Open Jobs API
++++
@@ -56,7 +57,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_open
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_job]
+// TEST[skip:setup:server_metrics_job]
When the job opens, you receive the following results:
[source,js]
@@ -65,5 +66,4 @@ When the job opens, you receive the following results:
"opened": true
}
----
-//CONSOLE
// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/post-calendar-event.asciidoc b/docs/reference/ml/apis/post-calendar-event.asciidoc
similarity index 96%
rename from x-pack/docs/en/rest-api/ml/post-calendar-event.asciidoc
rename to docs/reference/ml/apis/post-calendar-event.asciidoc
index 41af0841d2e..998db409fc7 100644
--- a/x-pack/docs/en/rest-api/ml/post-calendar-event.asciidoc
+++ b/docs/reference/ml/apis/post-calendar-event.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-post-calendar-event]]
=== Add Events to Calendar API
++++
@@ -52,7 +53,7 @@ POST _xpack/ml/calendars/planned-outages/events
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:calendar_outages_addjob]
+// TEST[skip:setup:calendar_outages_addjob]
The API returns the following results:
@@ -81,7 +82,7 @@ The API returns the following results:
]
}
----
-//TESTRESPONSE
+// TESTRESPONSE
For more information about these properties, see
<>.
diff --git a/x-pack/docs/en/rest-api/ml/post-data.asciidoc b/docs/reference/ml/apis/post-data.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/post-data.asciidoc
rename to docs/reference/ml/apis/post-data.asciidoc
index 40354d7f6f7..6a5a3d3d6cb 100644
--- a/x-pack/docs/en/rest-api/ml/post-data.asciidoc
+++ b/docs/reference/ml/apis/post-data.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-post-data]]
=== Post Data to Jobs API
++++
diff --git a/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc b/docs/reference/ml/apis/preview-datafeed.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc
rename to docs/reference/ml/apis/preview-datafeed.asciidoc
index 637b506cb9a..7b9eccd9a59 100644
--- a/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc
+++ b/docs/reference/ml/apis/preview-datafeed.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-preview-datafeed]]
=== Preview {dfeeds-cap} API
++++
@@ -53,7 +54,7 @@ The following example obtains a preview of the `datafeed-farequote` {dfeed}:
GET _xpack/ml/datafeeds/datafeed-farequote/_preview
--------------------------------------------------
// CONSOLE
-// TEST[setup:farequote_datafeed]
+// TEST[skip:setup:farequote_datafeed]
The data that is returned for this example is as follows:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/put-calendar-job.asciidoc b/docs/reference/ml/apis/put-calendar-job.asciidoc
similarity index 93%
rename from x-pack/docs/en/rest-api/ml/put-calendar-job.asciidoc
rename to docs/reference/ml/apis/put-calendar-job.asciidoc
index 6940957b159..0563047043a 100644
--- a/x-pack/docs/en/rest-api/ml/put-calendar-job.asciidoc
+++ b/docs/reference/ml/apis/put-calendar-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-put-calendar-job]]
=== Add Jobs to Calendar API
++++
@@ -38,7 +39,7 @@ The following example associates the `planned-outages` calendar with the
PUT _xpack/ml/calendars/planned-outages/jobs/total-requests
--------------------------------------------------
// CONSOLE
-// TEST[setup:calendar_outages_openjob]
+// TEST[skip:setup:calendar_outages_openjob]
The API returns the following results:
@@ -51,4 +52,4 @@ The API returns the following results:
]
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/put-calendar.asciidoc b/docs/reference/ml/apis/put-calendar.asciidoc
similarity index 94%
rename from x-pack/docs/en/rest-api/ml/put-calendar.asciidoc
rename to docs/reference/ml/apis/put-calendar.asciidoc
index a82da5a2c0c..06b8e55d774 100644
--- a/x-pack/docs/en/rest-api/ml/put-calendar.asciidoc
+++ b/docs/reference/ml/apis/put-calendar.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-put-calendar]]
=== Create Calendar API
++++
@@ -44,6 +45,7 @@ The following example creates the `planned-outages` calendar:
PUT _xpack/ml/calendars/planned-outages
--------------------------------------------------
// CONSOLE
+// TEST[skip:need-license]
When the calendar is created, you receive the following results:
[source,js]
@@ -53,4 +55,4 @@ When the calendar is created, you receive the following results:
"job_ids": []
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/put-datafeed.asciidoc b/docs/reference/ml/apis/put-datafeed.asciidoc
similarity index 98%
rename from x-pack/docs/en/rest-api/ml/put-datafeed.asciidoc
rename to docs/reference/ml/apis/put-datafeed.asciidoc
index 6b8ad932a1d..b5c99fc8e36 100644
--- a/x-pack/docs/en/rest-api/ml/put-datafeed.asciidoc
+++ b/docs/reference/ml/apis/put-datafeed.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-put-datafeed]]
=== Create {dfeeds-cap} API
++++
@@ -107,7 +108,7 @@ PUT _xpack/ml/datafeeds/datafeed-total-requests
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_job]
+// TEST[skip:setup:server_metrics_job]
When the {dfeed} is created, you receive the following results:
[source,js]
@@ -132,4 +133,4 @@ When the {dfeed} is created, you receive the following results:
}
----
// TESTRESPONSE[s/"query_delay": "83474ms"/"query_delay": $body.query_delay/]
-// TESTRESPONSE[s/"query.boost": "1.0"/"query.boost": $body.query.boost/]
+// TESTRESPONSE[s/"query.boost": "1.0"/"query.boost": $body.query.boost/]
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/put-filter.asciidoc b/docs/reference/ml/apis/put-filter.asciidoc
similarity index 95%
rename from x-pack/docs/en/rest-api/ml/put-filter.asciidoc
rename to docs/reference/ml/apis/put-filter.asciidoc
index d2982a56f61..165fe969758 100644
--- a/x-pack/docs/en/rest-api/ml/put-filter.asciidoc
+++ b/docs/reference/ml/apis/put-filter.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-put-filter]]
=== Create Filter API
++++
@@ -55,6 +56,7 @@ PUT _xpack/ml/filters/safe_domains
}
--------------------------------------------------
// CONSOLE
+// TEST[skip:need-licence]
When the filter is created, you receive the following response:
[source,js]
@@ -65,4 +67,4 @@ When the filter is created, you receive the following response:
"items": ["*.google.com", "wikipedia.org"]
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/put-job.asciidoc b/docs/reference/ml/apis/put-job.asciidoc
similarity index 98%
rename from x-pack/docs/en/rest-api/ml/put-job.asciidoc
rename to docs/reference/ml/apis/put-job.asciidoc
index 1c436f53d32..ce053484906 100644
--- a/x-pack/docs/en/rest-api/ml/put-job.asciidoc
+++ b/docs/reference/ml/apis/put-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-put-job]]
=== Create Jobs API
++++
@@ -104,6 +105,7 @@ PUT _xpack/ml/anomaly_detectors/total-requests
}
--------------------------------------------------
// CONSOLE
+// TEST[skip:need-licence]
When the job is created, you receive the following results:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/resultsresource.asciidoc b/docs/reference/ml/apis/resultsresource.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/resultsresource.asciidoc
rename to docs/reference/ml/apis/resultsresource.asciidoc
index c28ed72aedb..d3abd094be7 100644
--- a/x-pack/docs/en/rest-api/ml/resultsresource.asciidoc
+++ b/docs/reference/ml/apis/resultsresource.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-results-resource]]
=== Results Resources
diff --git a/x-pack/docs/en/rest-api/ml/revert-snapshot.asciidoc b/docs/reference/ml/apis/revert-snapshot.asciidoc
similarity index 67%
rename from x-pack/docs/en/rest-api/ml/revert-snapshot.asciidoc
rename to docs/reference/ml/apis/revert-snapshot.asciidoc
index 1dc3046ac4f..48fc65edf90 100644
--- a/x-pack/docs/en/rest-api/ml/revert-snapshot.asciidoc
+++ b/docs/reference/ml/apis/revert-snapshot.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-revert-snapshot]]
=== Revert Model Snapshots API
++++
@@ -22,33 +23,6 @@ then it might be appropriate to reset the model state to a time before this
event. For example, you might consider reverting to a saved snapshot after Black
Friday or a critical system failure.
-////
-To revert to a saved snapshot, you must follow this sequence:
-. Close the job
-. Revert to a snapshot
-. Open the job
-. Send new data to the job
-
-When reverting to a snapshot, there is a choice to make about whether or not
-you want to keep the results that were created between the time of the snapshot
-and the current time. In the case of Black Friday for instance, you might want
-to keep the results and carry on processing data from the current time,
-though without the models learning the one-off behavior and compensating for it.
-However, say in the event of a critical system failure and you decide to reset
-and models to a previous known good state and process data from that time,
-it makes sense to delete the intervening results for the known bad period and
-resend data from that earlier time.
-
-Any gaps in data since the snapshot time will be treated as nulls and not modeled.
-If there is a partial bucket at the end of the snapshot and/or at the beginning
-of the new input data, then this will be ignored and treated as a gap.
-
-For jobs with many entities, the model state may be very large.
-If a model state is several GB, this could take 10-20 mins to revert depending
-upon machine spec and resources. If this is the case, please ensure this time
-is planned for.
-Model size (in bytes) is available as part of the Job Resource Model Size Stats.
-////
IMPORTANT: Before you revert to a saved snapshot, you must close the job.
@@ -77,7 +51,6 @@ If you want to resend data, then delete the intervening results.
You must have `manage_ml`, or `manage` cluster privileges to use this API.
For more information, see
{xpack-ref}/security-privileges.html[Security Privileges].
-//<>.
==== Examples
diff --git a/x-pack/docs/en/rest-api/ml/snapshotresource.asciidoc b/docs/reference/ml/apis/snapshotresource.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/snapshotresource.asciidoc
rename to docs/reference/ml/apis/snapshotresource.asciidoc
index fb2e3d83de6..f068f6d94ed 100644
--- a/x-pack/docs/en/rest-api/ml/snapshotresource.asciidoc
+++ b/docs/reference/ml/apis/snapshotresource.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-snapshot-resource]]
=== Model Snapshot Resources
diff --git a/x-pack/docs/en/rest-api/ml/start-datafeed.asciidoc b/docs/reference/ml/apis/start-datafeed.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/start-datafeed.asciidoc
rename to docs/reference/ml/apis/start-datafeed.asciidoc
index fa3ea35a751..566e700dd04 100644
--- a/x-pack/docs/en/rest-api/ml/start-datafeed.asciidoc
+++ b/docs/reference/ml/apis/start-datafeed.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-start-datafeed]]
=== Start {dfeeds-cap} API
++++
@@ -79,7 +80,6 @@ of the latest processed record.
You must have `manage_ml`, or `manage` cluster privileges to use this API.
For more information, see
{xpack-ref}/security-privileges.html[Security Privileges].
-//<>.
==== Security Integration
@@ -101,7 +101,7 @@ POST _xpack/ml/datafeeds/datafeed-total-requests/_start
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_openjob]
+// TEST[skip:setup:server_metrics_openjob]
When the {dfeed} starts, you receive the following results:
[source,js]
@@ -110,5 +110,4 @@ When the {dfeed} starts, you receive the following results:
"started": true
}
----
-// CONSOLE
-// TESTRESPONSE
+// TESTRESPONSE
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/stop-datafeed.asciidoc b/docs/reference/ml/apis/stop-datafeed.asciidoc
similarity index 92%
rename from x-pack/docs/en/rest-api/ml/stop-datafeed.asciidoc
rename to docs/reference/ml/apis/stop-datafeed.asciidoc
index 27872ff5a20..7ea48974f2d 100644
--- a/x-pack/docs/en/rest-api/ml/stop-datafeed.asciidoc
+++ b/docs/reference/ml/apis/stop-datafeed.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-stop-datafeed]]
=== Stop {dfeeds-cap} API
++++
@@ -18,7 +19,6 @@ A {dfeed} can be started and stopped multiple times throughout its lifecycle.
`POST _xpack/ml/datafeeds/_all/_stop`
-//TBD: Can there be spaces between the items in the list?
===== Description
@@ -63,14 +63,14 @@ POST _xpack/ml/datafeeds/datafeed-total-requests/_stop
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_startdf]
+// TEST[skip:setup:server_metrics_startdf]
When the {dfeed} stops, you receive the following results:
+
[source,js]
----
{
"stopped": true
}
----
-// CONSOLE
-// TESTRESPONSE
+// TESTRESPONSE
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/update-datafeed.asciidoc b/docs/reference/ml/apis/update-datafeed.asciidoc
similarity index 98%
rename from x-pack/docs/en/rest-api/ml/update-datafeed.asciidoc
rename to docs/reference/ml/apis/update-datafeed.asciidoc
index bc9462347c1..be55d864c87 100644
--- a/x-pack/docs/en/rest-api/ml/update-datafeed.asciidoc
+++ b/docs/reference/ml/apis/update-datafeed.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-update-datafeed]]
=== Update {dfeeds-cap} API
++++
@@ -106,7 +107,7 @@ POST _xpack/ml/datafeeds/datafeed-total-requests/_update
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_datafeed]
+// TEST[skip:setup:server_metrics_datafeed]
When the {dfeed} is updated, you receive the full {dfeed} configuration with
with the updated values:
diff --git a/x-pack/docs/en/rest-api/ml/update-filter.asciidoc b/docs/reference/ml/apis/update-filter.asciidoc
similarity index 94%
rename from x-pack/docs/en/rest-api/ml/update-filter.asciidoc
rename to docs/reference/ml/apis/update-filter.asciidoc
index 1b6760dfed6..f551c8e923b 100644
--- a/x-pack/docs/en/rest-api/ml/update-filter.asciidoc
+++ b/docs/reference/ml/apis/update-filter.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-update-filter]]
=== Update Filter API
++++
@@ -52,7 +53,7 @@ POST _xpack/ml/filters/safe_domains/_update
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:ml_filter_safe_domains]
+// TEST[skip:setup:ml_filter_safe_domains]
The API returns the following results:
@@ -64,4 +65,4 @@ The API returns the following results:
"items": ["*.google.com", "*.myorg.com"]
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/update-job.asciidoc b/docs/reference/ml/apis/update-job.asciidoc
similarity index 98%
rename from x-pack/docs/en/rest-api/ml/update-job.asciidoc
rename to docs/reference/ml/apis/update-job.asciidoc
index 852745e9dd9..58bfb2679d9 100644
--- a/x-pack/docs/en/rest-api/ml/update-job.asciidoc
+++ b/docs/reference/ml/apis/update-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-update-job]]
=== Update Jobs API
++++
@@ -121,7 +122,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_update
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_job]
+// TEST[skip:setup:server_metrics_job]
When the job is updated, you receive a summary of the job configuration
information, including the updated property values. For example:
@@ -177,4 +178,4 @@ information, including the updated property values. For example:
}
----
// TESTRESPONSE[s/"job_version": "7.0.0-alpha1"/"job_version": $body.job_version/]
-// TESTRESPONSE[s/"create_time": 1518808660505/"create_time": $body.create_time/]
+// TESTRESPONSE[s/"create_time": 1518808660505/"create_time": $body.create_time/]
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/update-snapshot.asciidoc b/docs/reference/ml/apis/update-snapshot.asciidoc
similarity index 98%
rename from x-pack/docs/en/rest-api/ml/update-snapshot.asciidoc
rename to docs/reference/ml/apis/update-snapshot.asciidoc
index 8c98a7b7321..b58eebe810f 100644
--- a/x-pack/docs/en/rest-api/ml/update-snapshot.asciidoc
+++ b/docs/reference/ml/apis/update-snapshot.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-update-snapshot]]
=== Update Model Snapshots API
++++
diff --git a/x-pack/docs/en/rest-api/ml/validate-detector.asciidoc b/docs/reference/ml/apis/validate-detector.asciidoc
similarity index 95%
rename from x-pack/docs/en/rest-api/ml/validate-detector.asciidoc
rename to docs/reference/ml/apis/validate-detector.asciidoc
index ab8a0de442c..e525b1a1b20 100644
--- a/x-pack/docs/en/rest-api/ml/validate-detector.asciidoc
+++ b/docs/reference/ml/apis/validate-detector.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-valid-detector]]
=== Validate Detectors API
++++
@@ -44,6 +45,7 @@ POST _xpack/ml/anomaly_detectors/_validate/detector
}
--------------------------------------------------
// CONSOLE
+// TEST[skip:needs-licence]
When the validation completes, you receive the following results:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/validate-job.asciidoc b/docs/reference/ml/apis/validate-job.asciidoc
similarity index 96%
rename from x-pack/docs/en/rest-api/ml/validate-job.asciidoc
rename to docs/reference/ml/apis/validate-job.asciidoc
index 0ccc5bc04e1..b8326058260 100644
--- a/x-pack/docs/en/rest-api/ml/validate-job.asciidoc
+++ b/docs/reference/ml/apis/validate-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-valid-job]]
=== Validate Jobs API
++++
@@ -55,6 +56,7 @@ POST _xpack/ml/anomaly_detectors/_validate
}
--------------------------------------------------
// CONSOLE
+// TEST[skip:needs-licence]
When the validation is complete, you receive the following results:
[source,js]
diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc
index f0e05538053..1a932fdd414 100644
--- a/docs/reference/redirects.asciidoc
+++ b/docs/reference/redirects.asciidoc
@@ -549,3 +549,9 @@ See <>.
=== X-Pack commands
See <>.
+
+[role="exclude",id="ml-api-definitions"]
+=== Machine learning API definitions
+
+See <>.
+
diff --git a/docs/reference/rest-api/defs.asciidoc b/docs/reference/rest-api/defs.asciidoc
new file mode 100644
index 00000000000..4eeedc55399
--- /dev/null
+++ b/docs/reference/rest-api/defs.asciidoc
@@ -0,0 +1,27 @@
+[role="xpack"]
+[[api-definitions]]
+== Definitions
+
+These resource definitions are used in {ml} and {security} APIs and in {kib}
+advanced {ml} job configuration options.
+
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+
+include::{es-repo-dir}/ml/apis/calendarresource.asciidoc[]
+include::{es-repo-dir}/ml/apis/datafeedresource.asciidoc[]
+include::{es-repo-dir}/ml/apis/filterresource.asciidoc[]
+include::{es-repo-dir}/ml/apis/jobresource.asciidoc[]
+include::{es-repo-dir}/ml/apis/jobcounts.asciidoc[]
+include::{es-repo-dir}/ml/apis/snapshotresource.asciidoc[]
+include::{xes-repo-dir}/rest-api/security/role-mapping-resources.asciidoc[]
+include::{es-repo-dir}/ml/apis/resultsresource.asciidoc[]
+include::{es-repo-dir}/ml/apis/eventresource.asciidoc[]
diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc
index e1d607948e1..b80e8badf5b 100644
--- a/docs/reference/rest-api/index.asciidoc
+++ b/docs/reference/rest-api/index.asciidoc
@@ -22,8 +22,8 @@ include::info.asciidoc[]
include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[]
include::{es-repo-dir}/licensing/index.asciidoc[]
include::{es-repo-dir}/migration/migration.asciidoc[]
-include::{xes-repo-dir}/rest-api/ml-api.asciidoc[]
+include::{es-repo-dir}/ml/apis/ml-api.asciidoc[]
include::{es-repo-dir}/rollup/rollup-api.asciidoc[]
include::{xes-repo-dir}/rest-api/security.asciidoc[]
include::{xes-repo-dir}/rest-api/watcher.asciidoc[]
-include::{xes-repo-dir}/rest-api/defs.asciidoc[]
+include::defs.asciidoc[]
diff --git a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java
index 29b16ca28f4..36d52d4475b 100644
--- a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java
+++ b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java
@@ -125,6 +125,7 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase {
closeShards(shard);
}
+ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33330")
public void testSyncerOnClosingShard() throws Exception {
IndexShard shard = newStartedShard(true);
AtomicBoolean syncActionCalled = new AtomicBoolean();
diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle
index 99e62532e2d..f027493b0ab 100644
--- a/x-pack/docs/build.gradle
+++ b/x-pack/docs/build.gradle
@@ -14,17 +14,6 @@ buildRestTests.expectedUnconvertedCandidates = [
'en/security/authorization/run-as-privilege.asciidoc',
'en/security/ccs-clients-integrations/http.asciidoc',
'en/security/authorization/custom-roles-provider.asciidoc',
- 'en/rest-api/ml/delete-snapshot.asciidoc',
- 'en/rest-api/ml/get-bucket.asciidoc',
- 'en/rest-api/ml/get-job-stats.asciidoc',
- 'en/rest-api/ml/get-overall-buckets.asciidoc',
- 'en/rest-api/ml/get-category.asciidoc',
- 'en/rest-api/ml/get-record.asciidoc',
- 'en/rest-api/ml/get-influencer.asciidoc',
- 'en/rest-api/ml/get-snapshot.asciidoc',
- 'en/rest-api/ml/post-data.asciidoc',
- 'en/rest-api/ml/revert-snapshot.asciidoc',
- 'en/rest-api/ml/update-snapshot.asciidoc',
'en/rest-api/watcher/stats.asciidoc',
'en/watcher/example-watches/watching-time-series-data.asciidoc',
]
diff --git a/x-pack/docs/en/rest-api/defs.asciidoc b/x-pack/docs/en/rest-api/defs.asciidoc
deleted file mode 100644
index ed53929391b..00000000000
--- a/x-pack/docs/en/rest-api/defs.asciidoc
+++ /dev/null
@@ -1,36 +0,0 @@
-[role="xpack"]
-[[ml-api-definitions]]
-== Definitions
-
-These resource definitions are used in {ml} and {security} APIs and in {kib}
-advanced {ml} job configuration options.
-
-* <>
-* <>
-* <>
-* <>
-* <>
-* <>
-* <>
-* <>
-* <>
-* <>
-
-[role="xpack"]
-include::ml/calendarresource.asciidoc[]
-[role="xpack"]
-include::ml/datafeedresource.asciidoc[]
-[role="xpack"]
-include::ml/filterresource.asciidoc[]
-[role="xpack"]
-include::ml/jobresource.asciidoc[]
-[role="xpack"]
-include::ml/jobcounts.asciidoc[]
-[role="xpack"]
-include::security/role-mapping-resources.asciidoc[]
-[role="xpack"]
-include::ml/snapshotresource.asciidoc[]
-[role="xpack"]
-include::ml/resultsresource.asciidoc[]
-[role="xpack"]
-include::ml/eventresource.asciidoc[]