Merge branch 'master' into ccr
* master: HLRC: ML Flush job (#33187) HLRC: Adding ML Job stats (#33183) LLREST: Drop deprecated methods (#33223) Mute testSyncerOnClosingShard [DOCS] Moves machine learning APIs to docs folder (#31118)
This commit is contained in:
commit
3197a6bbdd
|
@ -28,10 +28,12 @@ import org.elasticsearch.client.ml.CloseJobRequest;
|
||||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||||
import org.elasticsearch.client.ml.GetBucketsRequest;
|
import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||||
import org.elasticsearch.client.ml.GetJobRequest;
|
import org.elasticsearch.client.ml.GetJobRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||||
import org.elasticsearch.client.ml.GetRecordsRequest;
|
import org.elasticsearch.client.ml.GetRecordsRequest;
|
||||||
import org.elasticsearch.client.ml.OpenJobRequest;
|
import org.elasticsearch.client.ml.OpenJobRequest;
|
||||||
import org.elasticsearch.client.ml.PutJobRequest;
|
import org.elasticsearch.client.ml.PutJobRequest;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -126,6 +128,36 @@ final class MLRequestConverters {
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static Request flushJob(FlushJobRequest flushJobRequest) throws IOException {
|
||||||
|
String endpoint = new EndpointBuilder()
|
||||||
|
.addPathPartAsIs("_xpack")
|
||||||
|
.addPathPartAsIs("ml")
|
||||||
|
.addPathPartAsIs("anomaly_detectors")
|
||||||
|
.addPathPart(flushJobRequest.getJobId())
|
||||||
|
.addPathPartAsIs("_flush")
|
||||||
|
.build();
|
||||||
|
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||||
|
request.setEntity(createEntity(flushJobRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request getJobStats(GetJobStatsRequest getJobStatsRequest) {
|
||||||
|
String endpoint = new EndpointBuilder()
|
||||||
|
.addPathPartAsIs("_xpack")
|
||||||
|
.addPathPartAsIs("ml")
|
||||||
|
.addPathPartAsIs("anomaly_detectors")
|
||||||
|
.addPathPart(Strings.collectionToCommaDelimitedString(getJobStatsRequest.getJobIds()))
|
||||||
|
.addPathPartAsIs("_stats")
|
||||||
|
.build();
|
||||||
|
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||||
|
|
||||||
|
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||||
|
if (getJobStatsRequest.isAllowNoJobs() != null) {
|
||||||
|
params.putParam("allow_no_jobs", Boolean.toString(getJobStatsRequest.isAllowNoJobs()));
|
||||||
|
}
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
static Request getRecords(GetRecordsRequest getRecordsRequest) throws IOException {
|
static Request getRecords(GetRecordsRequest getRecordsRequest) throws IOException {
|
||||||
String endpoint = new EndpointBuilder()
|
String endpoint = new EndpointBuilder()
|
||||||
.addPathPartAsIs("_xpack")
|
.addPathPartAsIs("_xpack")
|
||||||
|
|
|
@ -19,6 +19,11 @@
|
||||||
package org.elasticsearch.client;
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
|
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||||
|
import org.elasticsearch.client.ml.FlushJobResponse;
|
||||||
|
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetJobStatsResponse;
|
||||||
|
import org.elasticsearch.client.ml.job.stats.JobStats;
|
||||||
import org.elasticsearch.client.ml.CloseJobRequest;
|
import org.elasticsearch.client.ml.CloseJobRequest;
|
||||||
import org.elasticsearch.client.ml.CloseJobResponse;
|
import org.elasticsearch.client.ml.CloseJobResponse;
|
||||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||||
|
@ -288,6 +293,101 @@ public final class MachineLearningClient {
|
||||||
Collections.emptySet());
|
Collections.emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Flushes internally buffered data for the given Machine Learning Job ensuring all data sent to the has been processed.
|
||||||
|
* This may cause new results to be calculated depending on the contents of the buffer
|
||||||
|
*
|
||||||
|
* Both flush and close operations are similar,
|
||||||
|
* however the flush is more efficient if you are expecting to send more data for analysis.
|
||||||
|
*
|
||||||
|
* When flushing, the job remains open and is available to continue analyzing data.
|
||||||
|
* A close operation additionally prunes and persists the model state to disk and the
|
||||||
|
* job must be opened again before analyzing further data.
|
||||||
|
*
|
||||||
|
* <p>
|
||||||
|
* For additional info
|
||||||
|
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-flush-job.html">Flush ML job documentation</a>
|
||||||
|
*
|
||||||
|
* @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
|
||||||
|
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
*/
|
||||||
|
public FlushJobResponse flushJob(FlushJobRequest request, RequestOptions options) throws IOException {
|
||||||
|
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||||
|
MLRequestConverters::flushJob,
|
||||||
|
options,
|
||||||
|
FlushJobResponse::fromXContent,
|
||||||
|
Collections.emptySet());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Flushes internally buffered data for the given Machine Learning Job asynchronously ensuring all data sent to the has been processed.
|
||||||
|
* This may cause new results to be calculated depending on the contents of the buffer
|
||||||
|
*
|
||||||
|
* Both flush and close operations are similar,
|
||||||
|
* however the flush is more efficient if you are expecting to send more data for analysis.
|
||||||
|
*
|
||||||
|
* When flushing, the job remains open and is available to continue analyzing data.
|
||||||
|
* A close operation additionally prunes and persists the model state to disk and the
|
||||||
|
* job must be opened again before analyzing further data.
|
||||||
|
*
|
||||||
|
* <p>
|
||||||
|
* For additional info
|
||||||
|
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-flush-job.html">Flush ML job documentation</a>
|
||||||
|
*
|
||||||
|
* @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
|
||||||
|
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @param listener Listener to be notified upon request completion
|
||||||
|
*/
|
||||||
|
public void flushJobAsync(FlushJobRequest request, RequestOptions options, ActionListener<FlushJobResponse> listener) {
|
||||||
|
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||||
|
MLRequestConverters::flushJob,
|
||||||
|
options,
|
||||||
|
FlushJobResponse::fromXContent,
|
||||||
|
listener,
|
||||||
|
Collections.emptySet());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets usage statistics for one or more Machine Learning jobs
|
||||||
|
*
|
||||||
|
* <p>
|
||||||
|
* For additional info
|
||||||
|
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html">Get Job stats docs</a>
|
||||||
|
* </p>
|
||||||
|
* @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
|
||||||
|
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @return {@link GetJobStatsResponse} response object containing
|
||||||
|
* the {@link JobStats} objects and the number of jobs found
|
||||||
|
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||||
|
*/
|
||||||
|
public GetJobStatsResponse getJobStats(GetJobStatsRequest request, RequestOptions options) throws IOException {
|
||||||
|
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||||
|
MLRequestConverters::getJobStats,
|
||||||
|
options,
|
||||||
|
GetJobStatsResponse::fromXContent,
|
||||||
|
Collections.emptySet());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets one or more Machine Learning job configuration info, asynchronously.
|
||||||
|
*
|
||||||
|
* <p>
|
||||||
|
* For additional info
|
||||||
|
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html">Get Job stats docs</a>
|
||||||
|
* </p>
|
||||||
|
* @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
|
||||||
|
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @param listener Listener to be notified with {@link GetJobStatsResponse} upon request completion
|
||||||
|
*/
|
||||||
|
public void getJobStatsAsync(GetJobStatsRequest request, RequestOptions options, ActionListener<GetJobStatsResponse> listener) {
|
||||||
|
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||||
|
MLRequestConverters::getJobStats,
|
||||||
|
options,
|
||||||
|
GetJobStatsResponse::fromXContent,
|
||||||
|
listener,
|
||||||
|
Collections.emptySet());
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the records for a Machine Learning Job.
|
* Gets the records for a Machine Learning Job.
|
||||||
* <p>
|
* <p>
|
||||||
|
|
|
@ -0,0 +1,195 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.ActionRequest;
|
||||||
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
|
import org.elasticsearch.client.ml.job.config.Job;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Request object to flush a given Machine Learning job.
|
||||||
|
*/
|
||||||
|
public class FlushJobRequest extends ActionRequest implements ToXContentObject {
|
||||||
|
|
||||||
|
public static final ParseField CALC_INTERIM = new ParseField("calc_interim");
|
||||||
|
public static final ParseField START = new ParseField("start");
|
||||||
|
public static final ParseField END = new ParseField("end");
|
||||||
|
public static final ParseField ADVANCE_TIME = new ParseField("advance_time");
|
||||||
|
public static final ParseField SKIP_TIME = new ParseField("skip_time");
|
||||||
|
|
||||||
|
public static final ConstructingObjectParser<FlushJobRequest, Void> PARSER =
|
||||||
|
new ConstructingObjectParser<>("flush_job_request", (a) -> new FlushJobRequest((String) a[0]));
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||||
|
PARSER.declareBoolean(FlushJobRequest::setCalcInterim, CALC_INTERIM);
|
||||||
|
PARSER.declareString(FlushJobRequest::setStart, START);
|
||||||
|
PARSER.declareString(FlushJobRequest::setEnd, END);
|
||||||
|
PARSER.declareString(FlushJobRequest::setAdvanceTime, ADVANCE_TIME);
|
||||||
|
PARSER.declareString(FlushJobRequest::setSkipTime, SKIP_TIME);
|
||||||
|
}
|
||||||
|
|
||||||
|
private final String jobId;
|
||||||
|
private Boolean calcInterim;
|
||||||
|
private String start;
|
||||||
|
private String end;
|
||||||
|
private String advanceTime;
|
||||||
|
private String skipTime;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create new Flush job request
|
||||||
|
*
|
||||||
|
* @param jobId The job ID of the job to flush
|
||||||
|
*/
|
||||||
|
public FlushJobRequest(String jobId) {
|
||||||
|
this.jobId = jobId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getJobId() {
|
||||||
|
return jobId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean getCalcInterim() {
|
||||||
|
return calcInterim;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When {@code true} calculates the interim results for the most recent bucket or all buckets within the latency period.
|
||||||
|
*
|
||||||
|
* @param calcInterim defaults to {@code false}.
|
||||||
|
*/
|
||||||
|
public void setCalcInterim(boolean calcInterim) {
|
||||||
|
this.calcInterim = calcInterim;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getStart() {
|
||||||
|
return start;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When used in conjunction with {@link FlushJobRequest#calcInterim},
|
||||||
|
* specifies the start of the range of buckets on which to calculate interim results.
|
||||||
|
*
|
||||||
|
* @param start the beginning of the range of buckets; may be an epoch seconds, epoch millis or an ISO string
|
||||||
|
*/
|
||||||
|
public void setStart(String start) {
|
||||||
|
this.start = start;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getEnd() {
|
||||||
|
return end;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When used in conjunction with {@link FlushJobRequest#calcInterim}, specifies the end of the range
|
||||||
|
* of buckets on which to calculate interim results
|
||||||
|
*
|
||||||
|
* @param end the end of the range of buckets; may be an epoch seconds, epoch millis or an ISO string
|
||||||
|
*/
|
||||||
|
public void setEnd(String end) {
|
||||||
|
this.end = end;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getAdvanceTime() {
|
||||||
|
return advanceTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies to advance to a particular time value.
|
||||||
|
* Results are generated and the model is updated for data from the specified time interval.
|
||||||
|
*
|
||||||
|
* @param advanceTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
|
||||||
|
*/
|
||||||
|
public void setAdvanceTime(String advanceTime) {
|
||||||
|
this.advanceTime = advanceTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSkipTime() {
|
||||||
|
return skipTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies to skip to a particular time value.
|
||||||
|
* Results are not generated and the model is not updated for data from the specified time interval.
|
||||||
|
*
|
||||||
|
* @param skipTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
|
||||||
|
*/
|
||||||
|
public void setSkipTime(String skipTime) {
|
||||||
|
this.skipTime = skipTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(jobId, calcInterim, start, end, advanceTime, skipTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (this == obj) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
FlushJobRequest other = (FlushJobRequest) obj;
|
||||||
|
return Objects.equals(jobId, other.jobId) &&
|
||||||
|
calcInterim == other.calcInterim &&
|
||||||
|
Objects.equals(start, other.start) &&
|
||||||
|
Objects.equals(end, other.end) &&
|
||||||
|
Objects.equals(advanceTime, other.advanceTime) &&
|
||||||
|
Objects.equals(skipTime, other.skipTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
builder.field(Job.ID.getPreferredName(), jobId);
|
||||||
|
if (calcInterim != null) {
|
||||||
|
builder.field(CALC_INTERIM.getPreferredName(), calcInterim);
|
||||||
|
}
|
||||||
|
if (start != null) {
|
||||||
|
builder.field(START.getPreferredName(), start);
|
||||||
|
}
|
||||||
|
if (end != null) {
|
||||||
|
builder.field(END.getPreferredName(), end);
|
||||||
|
}
|
||||||
|
if (advanceTime != null) {
|
||||||
|
builder.field(ADVANCE_TIME.getPreferredName(), advanceTime);
|
||||||
|
}
|
||||||
|
if (skipTime != null) {
|
||||||
|
builder.field(SKIP_TIME.getPreferredName(), skipTime);
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ActionRequestValidationException validate() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,112 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.ActionResponse;
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Response object containing flush acknowledgement and additional data
|
||||||
|
*/
|
||||||
|
public class FlushJobResponse extends ActionResponse implements ToXContentObject {
|
||||||
|
|
||||||
|
public static final ParseField FLUSHED = new ParseField("flushed");
|
||||||
|
public static final ParseField LAST_FINALIZED_BUCKET_END = new ParseField("last_finalized_bucket_end");
|
||||||
|
|
||||||
|
public static final ConstructingObjectParser<FlushJobResponse, Void> PARSER =
|
||||||
|
new ConstructingObjectParser<>("flush_job_response",
|
||||||
|
true,
|
||||||
|
(a) -> {
|
||||||
|
boolean flushed = (boolean) a[0];
|
||||||
|
Date date = a[1] == null ? null : new Date((long) a[1]);
|
||||||
|
return new FlushJobResponse(flushed, date);
|
||||||
|
});
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), FLUSHED);
|
||||||
|
PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_FINALIZED_BUCKET_END);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static FlushJobResponse fromXContent(XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
private final boolean flushed;
|
||||||
|
private final Date lastFinalizedBucketEnd;
|
||||||
|
|
||||||
|
public FlushJobResponse(boolean flushed, @Nullable Date lastFinalizedBucketEnd) {
|
||||||
|
this.flushed = flushed;
|
||||||
|
this.lastFinalizedBucketEnd = lastFinalizedBucketEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Was the job successfully flushed or not
|
||||||
|
*/
|
||||||
|
public boolean isFlushed() {
|
||||||
|
return flushed;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides the timestamp (in milliseconds-since-the-epoch) of the end of the last bucket that was processed.
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
public Date getLastFinalizedBucketEnd() {
|
||||||
|
return lastFinalizedBucketEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(flushed, lastFinalizedBucketEnd);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
FlushJobResponse that = (FlushJobResponse) other;
|
||||||
|
return that.flushed == flushed && Objects.equals(lastFinalizedBucketEnd, that.lastFinalizedBucketEnd);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
builder.field(FLUSHED.getPreferredName(), flushed);
|
||||||
|
if (lastFinalizedBucketEnd != null) {
|
||||||
|
builder.timeField(LAST_FINALIZED_BUCKET_END.getPreferredName(),
|
||||||
|
LAST_FINALIZED_BUCKET_END.getPreferredName() + "_string", lastFinalizedBucketEnd.getTime());
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,146 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.ActionRequest;
|
||||||
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
|
import org.elasticsearch.client.ml.job.config.Job;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Request object to get {@link org.elasticsearch.client.ml.job.stats.JobStats} by their respective jobIds
|
||||||
|
*
|
||||||
|
* `_all` explicitly gets all the jobs' statistics in the cluster
|
||||||
|
* An empty request (no `jobId`s) implicitly gets all the jobs' statistics in the cluster
|
||||||
|
*/
|
||||||
|
public class GetJobStatsRequest extends ActionRequest implements ToXContentObject {
|
||||||
|
|
||||||
|
public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs");
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static final ConstructingObjectParser<GetJobStatsRequest, Void> PARSER = new ConstructingObjectParser<>(
|
||||||
|
"get_jobs_stats_request", a -> new GetJobStatsRequest((List<String>) a[0]));
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
||||||
|
p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())),
|
||||||
|
Job.ID, ObjectParser.ValueType.STRING_ARRAY);
|
||||||
|
PARSER.declareBoolean(GetJobStatsRequest::setAllowNoJobs, ALLOW_NO_JOBS);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final String ALL_JOBS = "_all";
|
||||||
|
|
||||||
|
private final List<String> jobIds;
|
||||||
|
private Boolean allowNoJobs;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Explicitly gets all jobs statistics
|
||||||
|
*
|
||||||
|
* @return a {@link GetJobStatsRequest} for all existing jobs
|
||||||
|
*/
|
||||||
|
public static GetJobStatsRequest getAllJobStatsRequest(){
|
||||||
|
return new GetJobStatsRequest(ALL_JOBS);
|
||||||
|
}
|
||||||
|
|
||||||
|
GetJobStatsRequest(List<String> jobIds) {
|
||||||
|
if (jobIds.stream().anyMatch(Objects::isNull)) {
|
||||||
|
throw new NullPointerException("jobIds must not contain null values");
|
||||||
|
}
|
||||||
|
this.jobIds = new ArrayList<>(jobIds);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the specified Job's statistics via their unique jobIds
|
||||||
|
*
|
||||||
|
* @param jobIds must be non-null and each jobId must be non-null
|
||||||
|
*/
|
||||||
|
public GetJobStatsRequest(String... jobIds) {
|
||||||
|
this(Arrays.asList(jobIds));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All the jobIds for which to get statistics
|
||||||
|
*/
|
||||||
|
public List<String> getJobIds() {
|
||||||
|
return jobIds;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Boolean isAllowNoJobs() {
|
||||||
|
return this.allowNoJobs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether to ignore if a wildcard expression matches no jobs.
|
||||||
|
*
|
||||||
|
* This includes `_all` string or when no jobs have been specified
|
||||||
|
*
|
||||||
|
* @param allowNoJobs When {@code true} ignore if wildcard or `_all` matches no jobs. Defaults to {@code true}
|
||||||
|
*/
|
||||||
|
public void setAllowNoJobs(boolean allowNoJobs) {
|
||||||
|
this.allowNoJobs = allowNoJobs;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(jobIds, allowNoJobs);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
GetJobStatsRequest that = (GetJobStatsRequest) other;
|
||||||
|
return Objects.equals(jobIds, that.jobIds) &&
|
||||||
|
Objects.equals(allowNoJobs, that.allowNoJobs);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ActionRequestValidationException validate() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
builder.field(Job.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds));
|
||||||
|
if (allowNoJobs != null) {
|
||||||
|
builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs);
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,88 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.client.ml.job.stats.JobStats;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Contains a {@link List} of the found {@link JobStats} objects and the total count found
|
||||||
|
*/
|
||||||
|
public class GetJobStatsResponse extends AbstractResultResponse<JobStats> {
|
||||||
|
|
||||||
|
public static final ParseField RESULTS_FIELD = new ParseField("jobs");
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static final ConstructingObjectParser<GetJobStatsResponse, Void> PARSER =
|
||||||
|
new ConstructingObjectParser<>("jobs_stats_response", true,
|
||||||
|
a -> new GetJobStatsResponse((List<JobStats>) a[0], (long) a[1]));
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareObjectArray(constructorArg(), JobStats.PARSER, RESULTS_FIELD);
|
||||||
|
PARSER.declareLong(constructorArg(), COUNT);
|
||||||
|
}
|
||||||
|
|
||||||
|
GetJobStatsResponse(List<JobStats> jobStats, long count) {
|
||||||
|
super(RESULTS_FIELD, jobStats, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The collection of {@link JobStats} objects found in the query
|
||||||
|
*/
|
||||||
|
public List<JobStats> jobStats() {
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static GetJobStatsResponse fromXContent(XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(results, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (this == obj) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
GetJobStatsResponse other = (GetJobStatsResponse) obj;
|
||||||
|
return Objects.equals(results, other.results) && count == other.count;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final String toString() {
|
||||||
|
return Strings.toString(this);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,150 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A Pojo class containing an Elastic Node's attributes
|
||||||
|
*/
|
||||||
|
public class NodeAttributes implements ToXContentObject {
|
||||||
|
|
||||||
|
public static final ParseField ID = new ParseField("id");
|
||||||
|
public static final ParseField NAME = new ParseField("name");
|
||||||
|
public static final ParseField EPHEMERAL_ID = new ParseField("ephemeral_id");
|
||||||
|
public static final ParseField TRANSPORT_ADDRESS = new ParseField("transport_address");
|
||||||
|
public static final ParseField ATTRIBUTES = new ParseField("attributes");
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static final ConstructingObjectParser<NodeAttributes, Void> PARSER =
|
||||||
|
new ConstructingObjectParser<>("node", true,
|
||||||
|
(a) -> {
|
||||||
|
int i = 0;
|
||||||
|
String id = (String) a[i++];
|
||||||
|
String name = (String) a[i++];
|
||||||
|
String ephemeralId = (String) a[i++];
|
||||||
|
String transportAddress = (String) a[i++];
|
||||||
|
Map<String, String> attributes = (Map<String, String>) a[i];
|
||||||
|
return new NodeAttributes(id, name, ephemeralId, transportAddress, attributes);
|
||||||
|
});
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), ID);
|
||||||
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME);
|
||||||
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), EPHEMERAL_ID);
|
||||||
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), TRANSPORT_ADDRESS);
|
||||||
|
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
||||||
|
(p, c) -> p.mapStrings(),
|
||||||
|
ATTRIBUTES,
|
||||||
|
ObjectParser.ValueType.OBJECT);
|
||||||
|
}
|
||||||
|
|
||||||
|
private final String id;
|
||||||
|
private final String name;
|
||||||
|
private final String ephemeralId;
|
||||||
|
private final String transportAddress;
|
||||||
|
private final Map<String, String> attributes;
|
||||||
|
|
||||||
|
public NodeAttributes(String id, String name, String ephemeralId, String transportAddress, Map<String, String> attributes) {
|
||||||
|
this.id = id;
|
||||||
|
this.name = name;
|
||||||
|
this.ephemeralId = ephemeralId;
|
||||||
|
this.transportAddress = transportAddress;
|
||||||
|
this.attributes = Collections.unmodifiableMap(attributes);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The unique identifier of the node.
|
||||||
|
*/
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The node name.
|
||||||
|
*/
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The ephemeral id of the node.
|
||||||
|
*/
|
||||||
|
public String getEphemeralId() {
|
||||||
|
return ephemeralId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The host and port where transport HTTP connections are accepted.
|
||||||
|
*/
|
||||||
|
public String getTransportAddress() {
|
||||||
|
return transportAddress;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Additional attributes related to this node e.g., {"ml.max_open_jobs": "10"}.
|
||||||
|
*/
|
||||||
|
public Map<String, String> getAttributes() {
|
||||||
|
return attributes;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
builder.field(ID.getPreferredName(), id);
|
||||||
|
builder.field(NAME.getPreferredName(), name);
|
||||||
|
builder.field(EPHEMERAL_ID.getPreferredName(), ephemeralId);
|
||||||
|
builder.field(TRANSPORT_ADDRESS.getPreferredName(), transportAddress);
|
||||||
|
builder.field(ATTRIBUTES.getPreferredName(), attributes);
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(id, name, ephemeralId, transportAddress, attributes);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
NodeAttributes that = (NodeAttributes) other;
|
||||||
|
return Objects.equals(id, that.id) &&
|
||||||
|
Objects.equals(name, that.name) &&
|
||||||
|
Objects.equals(ephemeralId, that.ephemeralId) &&
|
||||||
|
Objects.equals(transportAddress, that.transportAddress) &&
|
||||||
|
Objects.equals(attributes, that.attributes);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,39 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml.job.config;
|
||||||
|
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Jobs whether running or complete are in one of these states.
|
||||||
|
* When a job is created it is initialised in the state closed
|
||||||
|
* i.e. it is not running.
|
||||||
|
*/
|
||||||
|
public enum JobState {
|
||||||
|
|
||||||
|
CLOSING, CLOSED, OPENED, FAILED, OPENING;
|
||||||
|
|
||||||
|
public static JobState fromString(String name) {
|
||||||
|
return valueOf(name.trim().toUpperCase(Locale.ROOT));
|
||||||
|
}
|
||||||
|
|
||||||
|
public String value() {
|
||||||
|
return name().toLowerCase(Locale.ROOT);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,174 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml.job.stats;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A class to hold statistics about forecasts.
|
||||||
|
*/
|
||||||
|
public class ForecastStats implements ToXContentObject {
|
||||||
|
|
||||||
|
public static final ParseField TOTAL = new ParseField("total");
|
||||||
|
public static final ParseField FORECASTED_JOBS = new ParseField("forecasted_jobs");
|
||||||
|
public static final ParseField MEMORY_BYTES = new ParseField("memory_bytes");
|
||||||
|
public static final ParseField PROCESSING_TIME_MS = new ParseField("processing_time_ms");
|
||||||
|
public static final ParseField RECORDS = new ParseField("records");
|
||||||
|
public static final ParseField STATUS = new ParseField("status");
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static final ConstructingObjectParser<ForecastStats, Void> PARSER =
|
||||||
|
new ConstructingObjectParser<>("forecast_stats",
|
||||||
|
true,
|
||||||
|
(a) -> {
|
||||||
|
int i = 0;
|
||||||
|
long total = (long)a[i++];
|
||||||
|
SimpleStats memoryStats = (SimpleStats)a[i++];
|
||||||
|
SimpleStats recordStats = (SimpleStats)a[i++];
|
||||||
|
SimpleStats runtimeStats = (SimpleStats)a[i++];
|
||||||
|
Map<String, Long> statusCounts = (Map<String, Long>)a[i];
|
||||||
|
return new ForecastStats(total, memoryStats, recordStats, runtimeStats, statusCounts);
|
||||||
|
});
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL);
|
||||||
|
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, MEMORY_BYTES);
|
||||||
|
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, RECORDS);
|
||||||
|
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, PROCESSING_TIME_MS);
|
||||||
|
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
|
||||||
|
p -> {
|
||||||
|
Map<String, Long> counts = new HashMap<>();
|
||||||
|
p.map().forEach((key, value) -> counts.put(key, ((Number)value).longValue()));
|
||||||
|
return counts;
|
||||||
|
}, STATUS, ObjectParser.ValueType.OBJECT);
|
||||||
|
}
|
||||||
|
|
||||||
|
private final long total;
|
||||||
|
private final long forecastedJobs;
|
||||||
|
private SimpleStats memoryStats;
|
||||||
|
private SimpleStats recordStats;
|
||||||
|
private SimpleStats runtimeStats;
|
||||||
|
private Map<String, Long> statusCounts;
|
||||||
|
|
||||||
|
public ForecastStats(long total,
|
||||||
|
SimpleStats memoryStats,
|
||||||
|
SimpleStats recordStats,
|
||||||
|
SimpleStats runtimeStats,
|
||||||
|
Map<String, Long> statusCounts) {
|
||||||
|
this.total = total;
|
||||||
|
this.forecastedJobs = total > 0 ? 1 : 0;
|
||||||
|
if (total > 0) {
|
||||||
|
this.memoryStats = Objects.requireNonNull(memoryStats);
|
||||||
|
this.recordStats = Objects.requireNonNull(recordStats);
|
||||||
|
this.runtimeStats = Objects.requireNonNull(runtimeStats);
|
||||||
|
this.statusCounts = Collections.unmodifiableMap(statusCounts);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The number of forecasts currently available for this model.
|
||||||
|
*/
|
||||||
|
public long getTotal() {
|
||||||
|
return total;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The number of jobs that have at least one forecast.
|
||||||
|
*/
|
||||||
|
public long getForecastedJobs() {
|
||||||
|
return forecastedJobs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Statistics about the memory usage: minimum, maximum, average and total.
|
||||||
|
*/
|
||||||
|
public SimpleStats getMemoryStats() {
|
||||||
|
return memoryStats;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Statistics about the number of forecast records: minimum, maximum, average and total.
|
||||||
|
*/
|
||||||
|
public SimpleStats getRecordStats() {
|
||||||
|
return recordStats;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Statistics about the forecast runtime in milliseconds: minimum, maximum, average and total
|
||||||
|
*/
|
||||||
|
public SimpleStats getRuntimeStats() {
|
||||||
|
return runtimeStats;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Counts per forecast status, for example: {"finished" : 2}.
|
||||||
|
*/
|
||||||
|
public Map<String, Long> getStatusCounts() {
|
||||||
|
return statusCounts;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
builder.field(TOTAL.getPreferredName(), total);
|
||||||
|
builder.field(FORECASTED_JOBS.getPreferredName(), forecastedJobs);
|
||||||
|
|
||||||
|
if (total > 0) {
|
||||||
|
builder.field(MEMORY_BYTES.getPreferredName(), memoryStats);
|
||||||
|
builder.field(RECORDS.getPreferredName(), recordStats);
|
||||||
|
builder.field(PROCESSING_TIME_MS.getPreferredName(), runtimeStats);
|
||||||
|
builder.field(STATUS.getPreferredName(), statusCounts);
|
||||||
|
}
|
||||||
|
return builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(total, forecastedJobs, memoryStats, recordStats, runtimeStats, statusCounts);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (this == obj) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
ForecastStats other = (ForecastStats) obj;
|
||||||
|
return Objects.equals(total, other.total) &&
|
||||||
|
Objects.equals(forecastedJobs, other.forecastedJobs) &&
|
||||||
|
Objects.equals(memoryStats, other.memoryStats) &&
|
||||||
|
Objects.equals(recordStats, other.recordStats) &&
|
||||||
|
Objects.equals(runtimeStats, other.runtimeStats) &&
|
||||||
|
Objects.equals(statusCounts, other.statusCounts);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,225 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml.job.stats;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ml.job.config.Job;
|
||||||
|
import org.elasticsearch.client.ml.job.config.JobState;
|
||||||
|
import org.elasticsearch.client.ml.job.process.DataCounts;
|
||||||
|
import org.elasticsearch.client.ml.job.process.ModelSizeStats;
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.client.ml.NodeAttributes;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class containing the statistics for a Machine Learning job.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class JobStats implements ToXContentObject {
|
||||||
|
|
||||||
|
private static final ParseField DATA_COUNTS = new ParseField("data_counts");
|
||||||
|
private static final ParseField MODEL_SIZE_STATS = new ParseField("model_size_stats");
|
||||||
|
private static final ParseField FORECASTS_STATS = new ParseField("forecasts_stats");
|
||||||
|
private static final ParseField STATE = new ParseField("state");
|
||||||
|
private static final ParseField NODE = new ParseField("node");
|
||||||
|
private static final ParseField OPEN_TIME = new ParseField("open_time");
|
||||||
|
private static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation");
|
||||||
|
|
||||||
|
public static final ConstructingObjectParser<JobStats, Void> PARSER =
|
||||||
|
new ConstructingObjectParser<>("job_stats",
|
||||||
|
true,
|
||||||
|
(a) -> {
|
||||||
|
int i = 0;
|
||||||
|
String jobId = (String) a[i++];
|
||||||
|
DataCounts dataCounts = (DataCounts) a[i++];
|
||||||
|
JobState jobState = (JobState) a[i++];
|
||||||
|
ModelSizeStats.Builder modelSizeStatsBuilder = (ModelSizeStats.Builder) a[i++];
|
||||||
|
ModelSizeStats modelSizeStats = modelSizeStatsBuilder == null ? null : modelSizeStatsBuilder.build();
|
||||||
|
ForecastStats forecastStats = (ForecastStats) a[i++];
|
||||||
|
NodeAttributes node = (NodeAttributes) a[i++];
|
||||||
|
String assignmentExplanation = (String) a[i++];
|
||||||
|
TimeValue openTime = (TimeValue) a[i];
|
||||||
|
return new JobStats(jobId,
|
||||||
|
dataCounts,
|
||||||
|
jobState,
|
||||||
|
modelSizeStats,
|
||||||
|
forecastStats,
|
||||||
|
node,
|
||||||
|
assignmentExplanation,
|
||||||
|
openTime);
|
||||||
|
});
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||||
|
PARSER.declareObject(ConstructingObjectParser.constructorArg(), DataCounts.PARSER, DATA_COUNTS);
|
||||||
|
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
||||||
|
(p) -> JobState.fromString(p.text()),
|
||||||
|
STATE,
|
||||||
|
ObjectParser.ValueType.VALUE);
|
||||||
|
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ModelSizeStats.PARSER, MODEL_SIZE_STATS);
|
||||||
|
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ForecastStats.PARSER, FORECASTS_STATS);
|
||||||
|
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), NodeAttributes.PARSER, NODE);
|
||||||
|
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ASSIGNMENT_EXPLANATION);
|
||||||
|
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
|
||||||
|
(p, c) -> TimeValue.parseTimeValue(p.textOrNull(), OPEN_TIME.getPreferredName()),
|
||||||
|
OPEN_TIME,
|
||||||
|
ObjectParser.ValueType.STRING_OR_NULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private final String jobId;
|
||||||
|
private final DataCounts dataCounts;
|
||||||
|
private final JobState state;
|
||||||
|
private final ModelSizeStats modelSizeStats;
|
||||||
|
private final ForecastStats forecastStats;
|
||||||
|
private final NodeAttributes node;
|
||||||
|
private final String assignmentExplanation;
|
||||||
|
private final TimeValue openTime;
|
||||||
|
|
||||||
|
JobStats(String jobId, DataCounts dataCounts, JobState state, @Nullable ModelSizeStats modelSizeStats,
|
||||||
|
@Nullable ForecastStats forecastStats, @Nullable NodeAttributes node,
|
||||||
|
@Nullable String assignmentExplanation, @Nullable TimeValue opentime) {
|
||||||
|
this.jobId = Objects.requireNonNull(jobId);
|
||||||
|
this.dataCounts = Objects.requireNonNull(dataCounts);
|
||||||
|
this.state = Objects.requireNonNull(state);
|
||||||
|
this.modelSizeStats = modelSizeStats;
|
||||||
|
this.forecastStats = forecastStats;
|
||||||
|
this.node = node;
|
||||||
|
this.assignmentExplanation = assignmentExplanation;
|
||||||
|
this.openTime = opentime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The jobId referencing the job for these statistics
|
||||||
|
*/
|
||||||
|
public String getJobId() {
|
||||||
|
return jobId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An object that describes the number of records processed and any related error counts
|
||||||
|
* See {@link DataCounts}
|
||||||
|
*/
|
||||||
|
public DataCounts getDataCounts() {
|
||||||
|
return dataCounts;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An object that provides information about the size and contents of the model.
|
||||||
|
* See {@link ModelSizeStats}
|
||||||
|
*/
|
||||||
|
public ModelSizeStats getModelSizeStats() {
|
||||||
|
return modelSizeStats;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An object that provides statistical information about forecasts of this job.
|
||||||
|
* See {@link ForecastStats}
|
||||||
|
*/
|
||||||
|
public ForecastStats getForecastStats() {
|
||||||
|
return forecastStats;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The status of the job
|
||||||
|
* See {@link JobState}
|
||||||
|
*/
|
||||||
|
public JobState getState() {
|
||||||
|
return state;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For open jobs only, contains information about the node where the job runs
|
||||||
|
* See {@link NodeAttributes}
|
||||||
|
*/
|
||||||
|
public NodeAttributes getNode() {
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For open jobs only, contains messages relating to the selection of a node to run the job.
|
||||||
|
*/
|
||||||
|
public String getAssignmentExplanation() {
|
||||||
|
return assignmentExplanation;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For open jobs only, the elapsed time for which the job has been open
|
||||||
|
*/
|
||||||
|
public TimeValue getOpenTime() {
|
||||||
|
return openTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
builder.field(Job.ID.getPreferredName(), jobId);
|
||||||
|
builder.field(DATA_COUNTS.getPreferredName(), dataCounts);
|
||||||
|
builder.field(STATE.getPreferredName(), state.toString());
|
||||||
|
if (modelSizeStats != null) {
|
||||||
|
builder.field(MODEL_SIZE_STATS.getPreferredName(), modelSizeStats);
|
||||||
|
}
|
||||||
|
if (forecastStats != null) {
|
||||||
|
builder.field(FORECASTS_STATS.getPreferredName(), forecastStats);
|
||||||
|
}
|
||||||
|
if (node != null) {
|
||||||
|
builder.field(NODE.getPreferredName(), node);
|
||||||
|
}
|
||||||
|
if (assignmentExplanation != null) {
|
||||||
|
builder.field(ASSIGNMENT_EXPLANATION.getPreferredName(), assignmentExplanation);
|
||||||
|
}
|
||||||
|
if (openTime != null) {
|
||||||
|
builder.field(OPEN_TIME.getPreferredName(), openTime.getStringRep());
|
||||||
|
}
|
||||||
|
return builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(jobId, dataCounts, modelSizeStats, forecastStats, state, node, assignmentExplanation, openTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (this == obj) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
JobStats other = (JobStats) obj;
|
||||||
|
return Objects.equals(jobId, other.jobId) &&
|
||||||
|
Objects.equals(this.dataCounts, other.dataCounts) &&
|
||||||
|
Objects.equals(this.modelSizeStats, other.modelSizeStats) &&
|
||||||
|
Objects.equals(this.forecastStats, other.forecastStats) &&
|
||||||
|
Objects.equals(this.state, other.state) &&
|
||||||
|
Objects.equals(this.node, other.node) &&
|
||||||
|
Objects.equals(this.assignmentExplanation, other.assignmentExplanation) &&
|
||||||
|
Objects.equals(this.openTime, other.openTime);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,117 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml.job.stats;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper class for min, max, avg and total statistics for a quantity
|
||||||
|
*/
|
||||||
|
public class SimpleStats implements ToXContentObject {
|
||||||
|
|
||||||
|
public static final ParseField MIN = new ParseField("min");
|
||||||
|
public static final ParseField MAX = new ParseField("max");
|
||||||
|
public static final ParseField AVG = new ParseField("avg");
|
||||||
|
public static final ParseField TOTAL = new ParseField("total");
|
||||||
|
|
||||||
|
public static final ConstructingObjectParser<SimpleStats, Void> PARSER = new ConstructingObjectParser<>("simple_stats", true,
|
||||||
|
(a) -> {
|
||||||
|
int i = 0;
|
||||||
|
double total = (double)a[i++];
|
||||||
|
double min = (double)a[i++];
|
||||||
|
double max = (double)a[i++];
|
||||||
|
double avg = (double)a[i++];
|
||||||
|
return new SimpleStats(total, min, max, avg);
|
||||||
|
});
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), TOTAL);
|
||||||
|
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MIN);
|
||||||
|
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX);
|
||||||
|
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AVG);
|
||||||
|
}
|
||||||
|
|
||||||
|
private final double total;
|
||||||
|
private final double min;
|
||||||
|
private final double max;
|
||||||
|
private final double avg;
|
||||||
|
|
||||||
|
SimpleStats(double total, double min, double max, double avg) {
|
||||||
|
this.total = total;
|
||||||
|
this.min = min;
|
||||||
|
this.max = max;
|
||||||
|
this.avg = avg;
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getMin() {
|
||||||
|
return min;
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getMax() {
|
||||||
|
return max;
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getAvg() {
|
||||||
|
return avg;
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getTotal() {
|
||||||
|
return total;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(total, min, max, avg);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (this == obj) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
SimpleStats other = (SimpleStats) obj;
|
||||||
|
return Objects.equals(total, other.total) &&
|
||||||
|
Objects.equals(min, other.min) &&
|
||||||
|
Objects.equals(avg, other.avg) &&
|
||||||
|
Objects.equals(max, other.max);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
builder.field(MIN.getPreferredName(), min);
|
||||||
|
builder.field(MAX.getPreferredName(), max);
|
||||||
|
builder.field(AVG.getPreferredName(), avg);
|
||||||
|
builder.field(TOTAL.getPreferredName(), total);
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -36,6 +36,8 @@ import org.elasticsearch.client.ml.job.util.PageParams;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
|
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
|
@ -139,6 +141,44 @@ public class MLRequestConvertersTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testFlushJob() throws Exception {
|
||||||
|
String jobId = randomAlphaOfLength(10);
|
||||||
|
FlushJobRequest flushJobRequest = new FlushJobRequest(jobId);
|
||||||
|
|
||||||
|
Request request = MLRequestConverters.flushJob(flushJobRequest);
|
||||||
|
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||||
|
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_flush", request.getEndpoint());
|
||||||
|
assertEquals("{\"job_id\":\"" + jobId + "\"}", requestEntityToString(request));
|
||||||
|
|
||||||
|
flushJobRequest.setSkipTime("1000");
|
||||||
|
flushJobRequest.setStart("105");
|
||||||
|
flushJobRequest.setEnd("200");
|
||||||
|
flushJobRequest.setAdvanceTime("100");
|
||||||
|
flushJobRequest.setCalcInterim(true);
|
||||||
|
request = MLRequestConverters.flushJob(flushJobRequest);
|
||||||
|
assertEquals(
|
||||||
|
"{\"job_id\":\"" + jobId + "\",\"calc_interim\":true,\"start\":\"105\"," +
|
||||||
|
"\"end\":\"200\",\"advance_time\":\"100\",\"skip_time\":\"1000\"}",
|
||||||
|
requestEntityToString(request));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testGetJobStats() {
|
||||||
|
GetJobStatsRequest getJobStatsRequestRequest = new GetJobStatsRequest();
|
||||||
|
|
||||||
|
Request request = MLRequestConverters.getJobStats(getJobStatsRequestRequest);
|
||||||
|
|
||||||
|
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||||
|
assertEquals("/_xpack/ml/anomaly_detectors/_stats", request.getEndpoint());
|
||||||
|
assertFalse(request.getParameters().containsKey("allow_no_jobs"));
|
||||||
|
|
||||||
|
getJobStatsRequestRequest = new GetJobStatsRequest("job1", "jobs*");
|
||||||
|
getJobStatsRequestRequest.setAllowNoJobs(true);
|
||||||
|
request = MLRequestConverters.getJobStats(getJobStatsRequestRequest);
|
||||||
|
|
||||||
|
assertEquals("/_xpack/ml/anomaly_detectors/job1,jobs*/_stats", request.getEndpoint());
|
||||||
|
assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_jobs"));
|
||||||
|
}
|
||||||
|
|
||||||
private static Job createValidJob(String jobId) {
|
private static Job createValidJob(String jobId) {
|
||||||
AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList(
|
AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList(
|
||||||
Detector.builder().setFunction("count").build()));
|
Detector.builder().setFunction("count").build()));
|
||||||
|
|
|
@ -19,6 +19,12 @@
|
||||||
package org.elasticsearch.client;
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
|
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
|
||||||
|
import org.elasticsearch.ElasticsearchStatusException;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetJobStatsResponse;
|
||||||
|
import org.elasticsearch.client.ml.job.config.JobState;
|
||||||
|
import org.elasticsearch.client.ml.job.stats.JobStats;
|
||||||
import org.elasticsearch.client.ml.CloseJobRequest;
|
import org.elasticsearch.client.ml.CloseJobRequest;
|
||||||
import org.elasticsearch.client.ml.CloseJobResponse;
|
import org.elasticsearch.client.ml.CloseJobResponse;
|
||||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||||
|
@ -34,6 +40,8 @@ import org.elasticsearch.client.ml.job.config.DataDescription;
|
||||||
import org.elasticsearch.client.ml.job.config.Detector;
|
import org.elasticsearch.client.ml.job.config.Detector;
|
||||||
import org.elasticsearch.client.ml.job.config.Job;
|
import org.elasticsearch.client.ml.job.config.Job;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||||
|
import org.elasticsearch.client.ml.FlushJobResponse;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -41,6 +49,7 @@ import java.util.Arrays;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.hamcrest.CoreMatchers.equalTo;
|
||||||
import static org.hamcrest.CoreMatchers.hasItems;
|
import static org.hamcrest.CoreMatchers.hasItems;
|
||||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||||
import static org.hamcrest.Matchers.hasSize;
|
import static org.hamcrest.Matchers.hasSize;
|
||||||
|
@ -138,6 +147,77 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
||||||
assertTrue(response.isClosed());
|
assertTrue(response.isClosed());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testFlushJob() throws Exception {
|
||||||
|
String jobId = randomValidJobId();
|
||||||
|
Job job = buildJob(jobId);
|
||||||
|
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||||
|
machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
|
||||||
|
machineLearningClient.openJob(new OpenJobRequest(jobId), RequestOptions.DEFAULT);
|
||||||
|
|
||||||
|
FlushJobResponse response = execute(new FlushJobRequest(jobId),
|
||||||
|
machineLearningClient::flushJob,
|
||||||
|
machineLearningClient::flushJobAsync);
|
||||||
|
assertTrue(response.isFlushed());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testGetJobStats() throws Exception {
|
||||||
|
String jobId1 = "ml-get-job-stats-test-id-1";
|
||||||
|
String jobId2 = "ml-get-job-stats-test-id-2";
|
||||||
|
|
||||||
|
Job job1 = buildJob(jobId1);
|
||||||
|
Job job2 = buildJob(jobId2);
|
||||||
|
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||||
|
machineLearningClient.putJob(new PutJobRequest(job1), RequestOptions.DEFAULT);
|
||||||
|
machineLearningClient.putJob(new PutJobRequest(job2), RequestOptions.DEFAULT);
|
||||||
|
|
||||||
|
machineLearningClient.openJob(new OpenJobRequest(jobId1), RequestOptions.DEFAULT);
|
||||||
|
|
||||||
|
GetJobStatsRequest request = new GetJobStatsRequest(jobId1, jobId2);
|
||||||
|
|
||||||
|
// Test getting specific
|
||||||
|
GetJobStatsResponse response = execute(request, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
|
||||||
|
|
||||||
|
assertEquals(2, response.count());
|
||||||
|
assertThat(response.jobStats(), hasSize(2));
|
||||||
|
assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), containsInAnyOrder(jobId1, jobId2));
|
||||||
|
for (JobStats stats : response.jobStats()) {
|
||||||
|
if (stats.getJobId().equals(jobId1)) {
|
||||||
|
assertEquals(JobState.OPENED, stats.getState());
|
||||||
|
} else {
|
||||||
|
assertEquals(JobState.CLOSED, stats.getState());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test getting all explicitly
|
||||||
|
request = GetJobStatsRequest.getAllJobStatsRequest();
|
||||||
|
response = execute(request, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
|
||||||
|
|
||||||
|
assertTrue(response.count() >= 2L);
|
||||||
|
assertTrue(response.jobStats().size() >= 2L);
|
||||||
|
assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
|
||||||
|
|
||||||
|
// Test getting all implicitly
|
||||||
|
response = execute(new GetJobStatsRequest(), machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
|
||||||
|
|
||||||
|
assertTrue(response.count() >= 2L);
|
||||||
|
assertTrue(response.jobStats().size() >= 2L);
|
||||||
|
assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
|
||||||
|
|
||||||
|
// Test getting all with wildcard
|
||||||
|
request = new GetJobStatsRequest("ml-get-job-stats-test-id-*");
|
||||||
|
response = execute(request, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
|
||||||
|
assertTrue(response.count() >= 2L);
|
||||||
|
assertTrue(response.jobStats().size() >= 2L);
|
||||||
|
assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
|
||||||
|
|
||||||
|
// Test when allow_no_jobs is false
|
||||||
|
final GetJobStatsRequest erroredRequest = new GetJobStatsRequest("jobs-that-do-not-exist*");
|
||||||
|
erroredRequest.setAllowNoJobs(false);
|
||||||
|
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
|
||||||
|
() -> execute(erroredRequest, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync));
|
||||||
|
assertThat(exception.status().getStatus(), equalTo(404));
|
||||||
|
}
|
||||||
|
|
||||||
public static String randomValidJobId() {
|
public static String randomValidJobId() {
|
||||||
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz0123456789".toCharArray());
|
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz0123456789".toCharArray());
|
||||||
return generator.ofCodePointsLength(random(), 10, 10);
|
return generator.ofCodePointsLength(random(), 10, 10);
|
||||||
|
|
|
@ -35,6 +35,8 @@ import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||||
import org.elasticsearch.client.ml.GetBucketsResponse;
|
import org.elasticsearch.client.ml.GetBucketsResponse;
|
||||||
import org.elasticsearch.client.ml.GetJobRequest;
|
import org.elasticsearch.client.ml.GetJobRequest;
|
||||||
import org.elasticsearch.client.ml.GetJobResponse;
|
import org.elasticsearch.client.ml.GetJobResponse;
|
||||||
|
import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetJobStatsResponse;
|
||||||
import org.elasticsearch.client.ml.GetRecordsRequest;
|
import org.elasticsearch.client.ml.GetRecordsRequest;
|
||||||
import org.elasticsearch.client.ml.GetRecordsResponse;
|
import org.elasticsearch.client.ml.GetRecordsResponse;
|
||||||
import org.elasticsearch.client.ml.OpenJobRequest;
|
import org.elasticsearch.client.ml.OpenJobRequest;
|
||||||
|
@ -50,6 +52,9 @@ import org.elasticsearch.client.ml.job.results.Bucket;
|
||||||
import org.elasticsearch.client.ml.job.util.PageParams;
|
import org.elasticsearch.client.ml.job.util.PageParams;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
|
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||||
|
import org.elasticsearch.client.ml.FlushJobResponse;
|
||||||
|
import org.elasticsearch.client.ml.job.stats.JobStats;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -458,6 +463,127 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testFlushJob() throws Exception {
|
||||||
|
RestHighLevelClient client = highLevelClient();
|
||||||
|
|
||||||
|
Job job = MachineLearningIT.buildJob("flushing-my-first-machine-learning-job");
|
||||||
|
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
|
||||||
|
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
|
||||||
|
|
||||||
|
Job secondJob = MachineLearningIT.buildJob("flushing-my-second-machine-learning-job");
|
||||||
|
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
|
||||||
|
client.machineLearning().openJob(new OpenJobRequest(secondJob.getId()), RequestOptions.DEFAULT);
|
||||||
|
|
||||||
|
{
|
||||||
|
//tag::x-pack-ml-flush-job-request
|
||||||
|
FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-first-machine-learning-job"); //<1>
|
||||||
|
//end::x-pack-ml-flush-job-request
|
||||||
|
|
||||||
|
//tag::x-pack-ml-flush-job-request-options
|
||||||
|
flushJobRequest.setCalcInterim(true); //<1>
|
||||||
|
flushJobRequest.setAdvanceTime("2018-08-31T16:35:07+00:00"); //<2>
|
||||||
|
flushJobRequest.setStart("2018-08-31T16:35:17+00:00"); //<3>
|
||||||
|
flushJobRequest.setEnd("2018-08-31T16:35:27+00:00"); //<4>
|
||||||
|
flushJobRequest.setSkipTime("2018-08-31T16:35:00+00:00"); //<5>
|
||||||
|
//end::x-pack-ml-flush-job-request-options
|
||||||
|
|
||||||
|
//tag::x-pack-ml-flush-job-execute
|
||||||
|
FlushJobResponse flushJobResponse = client.machineLearning().flushJob(flushJobRequest, RequestOptions.DEFAULT);
|
||||||
|
//end::x-pack-ml-flush-job-execute
|
||||||
|
|
||||||
|
//tag::x-pack-ml-flush-job-response
|
||||||
|
boolean isFlushed = flushJobResponse.isFlushed(); //<1>
|
||||||
|
Date lastFinalizedBucketEnd = flushJobResponse.getLastFinalizedBucketEnd(); //<2>
|
||||||
|
//end::x-pack-ml-flush-job-response
|
||||||
|
|
||||||
|
}
|
||||||
|
{
|
||||||
|
//tag::x-pack-ml-flush-job-listener
|
||||||
|
ActionListener<FlushJobResponse> listener = new ActionListener<FlushJobResponse>() {
|
||||||
|
@Override
|
||||||
|
public void onResponse(FlushJobResponse FlushJobResponse) {
|
||||||
|
//<1>
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFailure(Exception e) {
|
||||||
|
// <2>
|
||||||
|
}
|
||||||
|
};
|
||||||
|
//end::x-pack-ml-flush-job-listener
|
||||||
|
FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-second-machine-learning-job");
|
||||||
|
|
||||||
|
// Replace the empty listener by a blocking listener in test
|
||||||
|
final CountDownLatch latch = new CountDownLatch(1);
|
||||||
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
|
// tag::x-pack-ml-flush-job-execute-async
|
||||||
|
client.machineLearning().flushJobAsync(flushJobRequest, RequestOptions.DEFAULT, listener); //<1>
|
||||||
|
// end::x-pack-ml-flush-job-execute-async
|
||||||
|
|
||||||
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void testGetJobStats() throws Exception {
|
||||||
|
RestHighLevelClient client = highLevelClient();
|
||||||
|
|
||||||
|
Job job = MachineLearningIT.buildJob("get-machine-learning-job-stats1");
|
||||||
|
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
|
||||||
|
|
||||||
|
Job secondJob = MachineLearningIT.buildJob("get-machine-learning-job-stats2");
|
||||||
|
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
|
||||||
|
|
||||||
|
{
|
||||||
|
//tag::x-pack-ml-get-job-stats-request
|
||||||
|
GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*"); //<1>
|
||||||
|
request.setAllowNoJobs(true); //<2>
|
||||||
|
//end::x-pack-ml-get-job-stats-request
|
||||||
|
|
||||||
|
//tag::x-pack-ml-get-job-stats-execute
|
||||||
|
GetJobStatsResponse response = client.machineLearning().getJobStats(request, RequestOptions.DEFAULT);
|
||||||
|
//end::x-pack-ml-get-job-stats-execute
|
||||||
|
|
||||||
|
//tag::x-pack-ml-get-job-stats-response
|
||||||
|
long numberOfJobStats = response.count(); //<1>
|
||||||
|
List<JobStats> jobStats = response.jobStats(); //<2>
|
||||||
|
//end::x-pack-ml-get-job-stats-response
|
||||||
|
|
||||||
|
assertEquals(2, response.count());
|
||||||
|
assertThat(response.jobStats(), hasSize(2));
|
||||||
|
assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()),
|
||||||
|
containsInAnyOrder(job.getId(), secondJob.getId()));
|
||||||
|
}
|
||||||
|
{
|
||||||
|
GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*");
|
||||||
|
|
||||||
|
// tag::x-pack-ml-get-job-stats-listener
|
||||||
|
ActionListener<GetJobStatsResponse> listener = new ActionListener<GetJobStatsResponse>() {
|
||||||
|
@Override
|
||||||
|
public void onResponse(GetJobStatsResponse response) {
|
||||||
|
// <1>
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFailure(Exception e) {
|
||||||
|
// <2>
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// end::x-pack-ml-get-job-stats-listener
|
||||||
|
|
||||||
|
// Replace the empty listener by a blocking listener in test
|
||||||
|
final CountDownLatch latch = new CountDownLatch(1);
|
||||||
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
|
// tag::x-pack-ml-get-job-stats-execute-async
|
||||||
|
client.machineLearning().getJobStatsAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||||
|
// end::x-pack-ml-get-job-stats-execute-async
|
||||||
|
|
||||||
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void testGetRecords() throws IOException, InterruptedException {
|
public void testGetRecords() throws IOException, InterruptedException {
|
||||||
RestHighLevelClient client = highLevelClient();
|
RestHighLevelClient client = highLevelClient();
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class FlushJobRequestTests extends AbstractXContentTestCase<FlushJobRequest> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected FlushJobRequest createTestInstance() {
|
||||||
|
FlushJobRequest request = new FlushJobRequest(randomAlphaOfLengthBetween(1, 20));
|
||||||
|
|
||||||
|
if (randomBoolean()) {
|
||||||
|
request.setCalcInterim(randomBoolean());
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
request.setAdvanceTime(String.valueOf(randomLong()));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
request.setStart(String.valueOf(randomLong()));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
request.setEnd(String.valueOf(randomLong()));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
request.setSkipTime(String.valueOf(randomLong()));
|
||||||
|
}
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected FlushJobRequest doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return FlushJobRequest.PARSER.apply(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,44 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
public class FlushJobResponseTests extends AbstractXContentTestCase<FlushJobResponse> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected FlushJobResponse createTestInstance() {
|
||||||
|
return new FlushJobResponse(randomBoolean(),
|
||||||
|
randomBoolean() ? null : new Date(randomNonNegativeLong()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected FlushJobResponse doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return FlushJobResponse.PARSER.apply(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
public class GetJobResponseTests extends AbstractXContentTestCase<GetJobResponse> {
|
public class GetJobResponseTests extends AbstractXContentTestCase<GetJobResponse> {
|
||||||
|
|
||||||
|
@ -46,8 +47,13 @@ public class GetJobResponseTests extends AbstractXContentTestCase<GetJobResponse
|
||||||
return GetJobResponse.fromXContent(parser);
|
return GetJobResponse.fromXContent(parser);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||||
|
return field -> !field.isEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean supportsUnknownFields() {
|
protected boolean supportsUnknownFields() {
|
||||||
return false;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,69 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class GetJobStatsRequestTests extends AbstractXContentTestCase<GetJobStatsRequest> {
|
||||||
|
|
||||||
|
public void testAllJobsRequest() {
|
||||||
|
GetJobStatsRequest request = GetJobStatsRequest.getAllJobStatsRequest();
|
||||||
|
|
||||||
|
assertEquals(request.getJobIds().size(), 1);
|
||||||
|
assertEquals(request.getJobIds().get(0), "_all");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testNewWithJobId() {
|
||||||
|
Exception exception = expectThrows(NullPointerException.class, () -> new GetJobStatsRequest("job", null));
|
||||||
|
assertEquals(exception.getMessage(), "jobIds must not contain null values");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GetJobStatsRequest createTestInstance() {
|
||||||
|
int jobCount = randomIntBetween(0, 10);
|
||||||
|
List<String> jobIds = new ArrayList<>(jobCount);
|
||||||
|
|
||||||
|
for (int i = 0; i < jobCount; i++) {
|
||||||
|
jobIds.add(randomAlphaOfLength(10));
|
||||||
|
}
|
||||||
|
|
||||||
|
GetJobStatsRequest request = new GetJobStatsRequest(jobIds);
|
||||||
|
|
||||||
|
if (randomBoolean()) {
|
||||||
|
request.setAllowNoJobs(randomBoolean());
|
||||||
|
}
|
||||||
|
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GetJobStatsRequest doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return GetJobStatsRequest.PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,53 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.client.ml.job.stats.JobStats;
|
||||||
|
import org.elasticsearch.client.ml.job.stats.JobStatsTests;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class GetJobStatsResponseTests extends AbstractXContentTestCase<GetJobStatsResponse> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GetJobStatsResponse createTestInstance() {
|
||||||
|
|
||||||
|
int count = randomIntBetween(1, 5);
|
||||||
|
List<JobStats> results = new ArrayList<>(count);
|
||||||
|
for(int i = 0; i < count; i++) {
|
||||||
|
results.add(JobStatsTests.createRandomInstance());
|
||||||
|
}
|
||||||
|
|
||||||
|
return new GetJobStatsResponse(results, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GetJobStatsResponse doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return GetJobStatsResponse.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,64 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
public class NodeAttributesTests extends AbstractXContentTestCase<NodeAttributes> {
|
||||||
|
|
||||||
|
public static NodeAttributes createRandom() {
|
||||||
|
int numberOfAttributes = randomIntBetween(1, 10);
|
||||||
|
Map<String, String> attributes = new HashMap<>(numberOfAttributes);
|
||||||
|
for(int i = 0; i < numberOfAttributes; i++) {
|
||||||
|
String val = randomAlphaOfLength(10);
|
||||||
|
attributes.put("key-"+i, val);
|
||||||
|
}
|
||||||
|
return new NodeAttributes(randomAlphaOfLength(10),
|
||||||
|
randomAlphaOfLength(10),
|
||||||
|
randomAlphaOfLength(10),
|
||||||
|
randomAlphaOfLength(10),
|
||||||
|
attributes);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected NodeAttributes createTestInstance() {
|
||||||
|
return createRandom();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected NodeAttributes doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return NodeAttributes.PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||||
|
return field -> !field.isEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,70 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml.job.stats;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
public class ForecastStatsTests extends AbstractXContentTestCase<ForecastStats> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ForecastStats createTestInstance() {
|
||||||
|
if (randomBoolean()) {
|
||||||
|
return createRandom(1, 22);
|
||||||
|
}
|
||||||
|
return new ForecastStats(0, null,null,null,null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected ForecastStats doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return ForecastStats.PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||||
|
return field -> !field.isEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ForecastStats createRandom(long minTotal, long maxTotal) {
|
||||||
|
return new ForecastStats(
|
||||||
|
randomLongBetween(minTotal, maxTotal),
|
||||||
|
SimpleStatsTests.createRandom(),
|
||||||
|
SimpleStatsTests.createRandom(),
|
||||||
|
SimpleStatsTests.createRandom(),
|
||||||
|
createCountStats());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Map<String, Long> createCountStats() {
|
||||||
|
Map<String, Long> countStats = new HashMap<>();
|
||||||
|
for (int i = 0; i < randomInt(10); ++i) {
|
||||||
|
countStats.put(randomAlphaOfLengthBetween(1, 20), randomLongBetween(1L, 100L));
|
||||||
|
}
|
||||||
|
return countStats;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,72 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml.job.stats;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ml.NodeAttributes;
|
||||||
|
import org.elasticsearch.client.ml.NodeAttributesTests;
|
||||||
|
import org.elasticsearch.client.ml.job.process.DataCounts;
|
||||||
|
import org.elasticsearch.client.ml.job.process.DataCountsTests;
|
||||||
|
import org.elasticsearch.client.ml.job.process.ModelSizeStats;
|
||||||
|
import org.elasticsearch.client.ml.job.process.ModelSizeStatsTests;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.client.ml.job.config.JobState;
|
||||||
|
import org.elasticsearch.client.ml.job.config.JobTests;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
|
||||||
|
public class JobStatsTests extends AbstractXContentTestCase<JobStats> {
|
||||||
|
|
||||||
|
public static JobStats createRandomInstance() {
|
||||||
|
String jobId = JobTests.randomValidJobId();
|
||||||
|
JobState state = randomFrom(JobState.CLOSING, JobState.CLOSED, JobState.OPENED, JobState.FAILED, JobState.OPENING);
|
||||||
|
DataCounts dataCounts = DataCountsTests.createTestInstance(jobId);
|
||||||
|
|
||||||
|
ModelSizeStats modelSizeStats = randomBoolean() ? ModelSizeStatsTests.createRandomized() : null;
|
||||||
|
ForecastStats forecastStats = randomBoolean() ? ForecastStatsTests.createRandom(1, 22) : null;
|
||||||
|
NodeAttributes nodeAttributes = randomBoolean() ? NodeAttributesTests.createRandom() : null;
|
||||||
|
String assigmentExplanation = randomBoolean() ? randomAlphaOfLength(10) : null;
|
||||||
|
TimeValue openTime = randomBoolean() ? TimeValue.timeValueMillis(randomIntBetween(1, 10000)) : null;
|
||||||
|
|
||||||
|
return new JobStats(jobId, dataCounts, state, modelSizeStats, forecastStats, nodeAttributes, assigmentExplanation, openTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected JobStats createTestInstance() {
|
||||||
|
return createRandomInstance();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected JobStats doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return JobStats.PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||||
|
return field -> !field.isEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,47 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml.job.stats;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
|
||||||
|
public class SimpleStatsTests extends AbstractXContentTestCase<SimpleStats> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected SimpleStats createTestInstance() {
|
||||||
|
return createRandom();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected SimpleStats doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return SimpleStats.PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static SimpleStats createRandom() {
|
||||||
|
return new SimpleStats(randomDouble(), randomDouble(), randomDouble(), randomDouble());
|
||||||
|
}
|
||||||
|
}
|
|
@ -85,7 +85,7 @@ import static java.util.Collections.singletonList;
|
||||||
* The hosts that are part of the cluster need to be provided at creation time, but can also be replaced later
|
* The hosts that are part of the cluster need to be provided at creation time, but can also be replaced later
|
||||||
* by calling {@link #setNodes(Collection)}.
|
* by calling {@link #setNodes(Collection)}.
|
||||||
* <p>
|
* <p>
|
||||||
* The method {@link #performRequest(String, String, Map, HttpEntity, Header...)} allows to send a request to the cluster. When
|
* The method {@link #performRequest(Request)} allows to send a request to the cluster. When
|
||||||
* sending a request, a host gets selected out of the provided ones in a round-robin fashion. Failing hosts are marked dead and
|
* sending a request, a host gets selected out of the provided ones in a round-robin fashion. Failing hosts are marked dead and
|
||||||
* retried after a certain amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times they previously
|
* retried after a certain amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times they previously
|
||||||
* failed (the more failures, the later they will be retried). In case of failures all of the alive nodes (or dead nodes that
|
* failed (the more failures, the later they will be retried). In case of failures all of the alive nodes (or dead nodes that
|
||||||
|
@ -145,17 +145,6 @@ public class RestClient implements Closeable {
|
||||||
return new RestClientBuilder(hostsToNodes(hosts));
|
return new RestClientBuilder(hostsToNodes(hosts));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Replaces the hosts with which the client communicates.
|
|
||||||
*
|
|
||||||
* @deprecated prefer {@link #setNodes(Collection)} because it allows you
|
|
||||||
* to set metadata for use with {@link NodeSelector}s
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public void setHosts(HttpHost... hosts) {
|
|
||||||
setNodes(hostsToNodes(hosts));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Replaces the nodes with which the client communicates.
|
* Replaces the nodes with which the client communicates.
|
||||||
*/
|
*/
|
||||||
|
@ -251,234 +240,6 @@ public class RestClient implements Closeable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Sends a request to the Elasticsearch cluster that the client points to and waits for the corresponding response
|
|
||||||
* to be returned. Shortcut to {@link #performRequest(String, String, Map, HttpEntity, Header...)} but without parameters
|
|
||||||
* and request body.
|
|
||||||
*
|
|
||||||
* @param method the http method
|
|
||||||
* @param endpoint the path of the request (without host and port)
|
|
||||||
* @param headers the optional request headers
|
|
||||||
* @return the response returned by Elasticsearch
|
|
||||||
* @throws IOException in case of a problem or the connection was aborted
|
|
||||||
* @throws ClientProtocolException in case of an http protocol error
|
|
||||||
* @throws ResponseException in case Elasticsearch responded with a status code that indicated an error
|
|
||||||
* @deprecated prefer {@link #performRequest(Request)}
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public Response performRequest(String method, String endpoint, Header... headers) throws IOException {
|
|
||||||
Request request = new Request(method, endpoint);
|
|
||||||
addHeaders(request, headers);
|
|
||||||
return performRequest(request);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sends a request to the Elasticsearch cluster that the client points to and waits for the corresponding response
|
|
||||||
* to be returned. Shortcut to {@link #performRequest(String, String, Map, HttpEntity, Header...)} but without request body.
|
|
||||||
*
|
|
||||||
* @param method the http method
|
|
||||||
* @param endpoint the path of the request (without host and port)
|
|
||||||
* @param params the query_string parameters
|
|
||||||
* @param headers the optional request headers
|
|
||||||
* @return the response returned by Elasticsearch
|
|
||||||
* @throws IOException in case of a problem or the connection was aborted
|
|
||||||
* @throws ClientProtocolException in case of an http protocol error
|
|
||||||
* @throws ResponseException in case Elasticsearch responded with a status code that indicated an error
|
|
||||||
* @deprecated prefer {@link #performRequest(Request)}
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public Response performRequest(String method, String endpoint, Map<String, String> params, Header... headers) throws IOException {
|
|
||||||
Request request = new Request(method, endpoint);
|
|
||||||
addParameters(request, params);
|
|
||||||
addHeaders(request, headers);
|
|
||||||
return performRequest(request);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sends a request to the Elasticsearch cluster that the client points to and waits for the corresponding response
|
|
||||||
* to be returned. Shortcut to {@link #performRequest(String, String, Map, HttpEntity, HttpAsyncResponseConsumerFactory, Header...)}
|
|
||||||
* which doesn't require specifying an {@link HttpAsyncResponseConsumerFactory} instance,
|
|
||||||
* {@link HttpAsyncResponseConsumerFactory} will be used to create the needed instances of {@link HttpAsyncResponseConsumer}.
|
|
||||||
*
|
|
||||||
* @param method the http method
|
|
||||||
* @param endpoint the path of the request (without host and port)
|
|
||||||
* @param params the query_string parameters
|
|
||||||
* @param entity the body of the request, null if not applicable
|
|
||||||
* @param headers the optional request headers
|
|
||||||
* @return the response returned by Elasticsearch
|
|
||||||
* @throws IOException in case of a problem or the connection was aborted
|
|
||||||
* @throws ClientProtocolException in case of an http protocol error
|
|
||||||
* @throws ResponseException in case Elasticsearch responded with a status code that indicated an error
|
|
||||||
* @deprecated prefer {@link #performRequest(Request)}
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public Response performRequest(String method, String endpoint, Map<String, String> params,
|
|
||||||
HttpEntity entity, Header... headers) throws IOException {
|
|
||||||
Request request = new Request(method, endpoint);
|
|
||||||
addParameters(request, params);
|
|
||||||
request.setEntity(entity);
|
|
||||||
addHeaders(request, headers);
|
|
||||||
return performRequest(request);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sends a request to the Elasticsearch cluster that the client points to. Blocks until the request is completed and returns
|
|
||||||
* its response or fails by throwing an exception. Selects a host out of the provided ones in a round-robin fashion. Failing hosts
|
|
||||||
* are marked dead and retried after a certain amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times
|
|
||||||
* they previously failed (the more failures, the later they will be retried). In case of failures all of the alive nodes (or dead
|
|
||||||
* nodes that deserve a retry) are retried until one responds or none of them does, in which case an {@link IOException} will be thrown.
|
|
||||||
*
|
|
||||||
* This method works by performing an asynchronous call and waiting
|
|
||||||
* for the result. If the asynchronous call throws an exception we wrap
|
|
||||||
* it and rethrow it so that the stack trace attached to the exception
|
|
||||||
* contains the call site. While we attempt to preserve the original
|
|
||||||
* exception this isn't always possible and likely haven't covered all of
|
|
||||||
* the cases. You can get the original exception from
|
|
||||||
* {@link Exception#getCause()}.
|
|
||||||
*
|
|
||||||
* @param method the http method
|
|
||||||
* @param endpoint the path of the request (without host and port)
|
|
||||||
* @param params the query_string parameters
|
|
||||||
* @param entity the body of the request, null if not applicable
|
|
||||||
* @param httpAsyncResponseConsumerFactory the {@link HttpAsyncResponseConsumerFactory} used to create one
|
|
||||||
* {@link HttpAsyncResponseConsumer} callback per retry. Controls how the response body gets streamed from a non-blocking HTTP
|
|
||||||
* connection on the client side.
|
|
||||||
* @param headers the optional request headers
|
|
||||||
* @return the response returned by Elasticsearch
|
|
||||||
* @throws IOException in case of a problem or the connection was aborted
|
|
||||||
* @throws ClientProtocolException in case of an http protocol error
|
|
||||||
* @throws ResponseException in case Elasticsearch responded with a status code that indicated an error
|
|
||||||
* @deprecated prefer {@link #performRequest(Request)}
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public Response performRequest(String method, String endpoint, Map<String, String> params,
|
|
||||||
HttpEntity entity, HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory,
|
|
||||||
Header... headers) throws IOException {
|
|
||||||
Request request = new Request(method, endpoint);
|
|
||||||
addParameters(request, params);
|
|
||||||
request.setEntity(entity);
|
|
||||||
setOptions(request, httpAsyncResponseConsumerFactory, headers);
|
|
||||||
return performRequest(request);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sends a request to the Elasticsearch cluster that the client points to. Doesn't wait for the response, instead
|
|
||||||
* the provided {@link ResponseListener} will be notified upon completion or failure. Shortcut to
|
|
||||||
* {@link #performRequestAsync(String, String, Map, HttpEntity, ResponseListener, Header...)} but without parameters and request body.
|
|
||||||
*
|
|
||||||
* @param method the http method
|
|
||||||
* @param endpoint the path of the request (without host and port)
|
|
||||||
* @param responseListener the {@link ResponseListener} to notify when the request is completed or fails
|
|
||||||
* @param headers the optional request headers
|
|
||||||
* @deprecated prefer {@link #performRequestAsync(Request, ResponseListener)}
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public void performRequestAsync(String method, String endpoint, ResponseListener responseListener, Header... headers) {
|
|
||||||
Request request;
|
|
||||||
try {
|
|
||||||
request = new Request(method, endpoint);
|
|
||||||
addHeaders(request, headers);
|
|
||||||
} catch (Exception e) {
|
|
||||||
responseListener.onFailure(e);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
performRequestAsync(request, responseListener);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sends a request to the Elasticsearch cluster that the client points to. Doesn't wait for the response, instead
|
|
||||||
* the provided {@link ResponseListener} will be notified upon completion or failure. Shortcut to
|
|
||||||
* {@link #performRequestAsync(String, String, Map, HttpEntity, ResponseListener, Header...)} but without request body.
|
|
||||||
*
|
|
||||||
* @param method the http method
|
|
||||||
* @param endpoint the path of the request (without host and port)
|
|
||||||
* @param params the query_string parameters
|
|
||||||
* @param responseListener the {@link ResponseListener} to notify when the request is completed or fails
|
|
||||||
* @param headers the optional request headers
|
|
||||||
* @deprecated prefer {@link #performRequestAsync(Request, ResponseListener)}
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public void performRequestAsync(String method, String endpoint, Map<String, String> params,
|
|
||||||
ResponseListener responseListener, Header... headers) {
|
|
||||||
Request request;
|
|
||||||
try {
|
|
||||||
request = new Request(method, endpoint);
|
|
||||||
addParameters(request, params);
|
|
||||||
addHeaders(request, headers);
|
|
||||||
} catch (Exception e) {
|
|
||||||
responseListener.onFailure(e);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
performRequestAsync(request, responseListener);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sends a request to the Elasticsearch cluster that the client points to. Doesn't wait for the response, instead
|
|
||||||
* the provided {@link ResponseListener} will be notified upon completion or failure.
|
|
||||||
* Shortcut to {@link #performRequestAsync(String, String, Map, HttpEntity, HttpAsyncResponseConsumerFactory, ResponseListener,
|
|
||||||
* Header...)} which doesn't require specifying an {@link HttpAsyncResponseConsumerFactory} instance,
|
|
||||||
* {@link HttpAsyncResponseConsumerFactory} will be used to create the needed instances of {@link HttpAsyncResponseConsumer}.
|
|
||||||
*
|
|
||||||
* @param method the http method
|
|
||||||
* @param endpoint the path of the request (without host and port)
|
|
||||||
* @param params the query_string parameters
|
|
||||||
* @param entity the body of the request, null if not applicable
|
|
||||||
* @param responseListener the {@link ResponseListener} to notify when the request is completed or fails
|
|
||||||
* @param headers the optional request headers
|
|
||||||
* @deprecated prefer {@link #performRequestAsync(Request, ResponseListener)}
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public void performRequestAsync(String method, String endpoint, Map<String, String> params,
|
|
||||||
HttpEntity entity, ResponseListener responseListener, Header... headers) {
|
|
||||||
Request request;
|
|
||||||
try {
|
|
||||||
request = new Request(method, endpoint);
|
|
||||||
addParameters(request, params);
|
|
||||||
request.setEntity(entity);
|
|
||||||
addHeaders(request, headers);
|
|
||||||
} catch (Exception e) {
|
|
||||||
responseListener.onFailure(e);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
performRequestAsync(request, responseListener);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sends a request to the Elasticsearch cluster that the client points to. The request is executed asynchronously
|
|
||||||
* and the provided {@link ResponseListener} gets notified upon request completion or failure.
|
|
||||||
* Selects a host out of the provided ones in a round-robin fashion. Failing hosts are marked dead and retried after a certain
|
|
||||||
* amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times they previously failed (the more failures,
|
|
||||||
* the later they will be retried). In case of failures all of the alive nodes (or dead nodes that deserve a retry) are retried
|
|
||||||
* until one responds or none of them does, in which case an {@link IOException} will be thrown.
|
|
||||||
*
|
|
||||||
* @param method the http method
|
|
||||||
* @param endpoint the path of the request (without host and port)
|
|
||||||
* @param params the query_string parameters
|
|
||||||
* @param entity the body of the request, null if not applicable
|
|
||||||
* @param httpAsyncResponseConsumerFactory the {@link HttpAsyncResponseConsumerFactory} used to create one
|
|
||||||
* {@link HttpAsyncResponseConsumer} callback per retry. Controls how the response body gets streamed from a non-blocking HTTP
|
|
||||||
* connection on the client side.
|
|
||||||
* @param responseListener the {@link ResponseListener} to notify when the request is completed or fails
|
|
||||||
* @param headers the optional request headers
|
|
||||||
* @deprecated prefer {@link #performRequestAsync(Request, ResponseListener)}
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public void performRequestAsync(String method, String endpoint, Map<String, String> params,
|
|
||||||
HttpEntity entity, HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory,
|
|
||||||
ResponseListener responseListener, Header... headers) {
|
|
||||||
Request request;
|
|
||||||
try {
|
|
||||||
request = new Request(method, endpoint);
|
|
||||||
addParameters(request, params);
|
|
||||||
request.setEntity(entity);
|
|
||||||
setOptions(request, httpAsyncResponseConsumerFactory, headers);
|
|
||||||
} catch (Exception e) {
|
|
||||||
responseListener.onFailure(e);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
performRequestAsync(request, responseListener);
|
|
||||||
}
|
|
||||||
|
|
||||||
void performRequestAsyncNoCatch(Request request, ResponseListener listener) throws IOException {
|
void performRequestAsyncNoCatch(Request request, ResponseListener listener) throws IOException {
|
||||||
Map<String, String> requestParams = new HashMap<>(request.getParameters());
|
Map<String, String> requestParams = new HashMap<>(request.getParameters());
|
||||||
//ignore is a special parameter supported by the clients, shouldn't be sent to es
|
//ignore is a special parameter supported by the clients, shouldn't be sent to es
|
||||||
|
@ -1035,42 +796,4 @@ public class RestClient implements Closeable {
|
||||||
itr.remove();
|
itr.remove();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Add all headers from the provided varargs argument to a {@link Request}. This only exists
|
|
||||||
* to support methods that exist for backwards compatibility.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
private static void addHeaders(Request request, Header... headers) {
|
|
||||||
setOptions(request, RequestOptions.DEFAULT.getHttpAsyncResponseConsumerFactory(), headers);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add all headers from the provided varargs argument to a {@link Request}. This only exists
|
|
||||||
* to support methods that exist for backwards compatibility.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
private static void setOptions(Request request, HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory,
|
|
||||||
Header... headers) {
|
|
||||||
Objects.requireNonNull(headers, "headers cannot be null");
|
|
||||||
RequestOptions.Builder options = request.getOptions().toBuilder();
|
|
||||||
for (Header header : headers) {
|
|
||||||
Objects.requireNonNull(header, "header cannot be null");
|
|
||||||
options.addHeader(header.getName(), header.getValue());
|
|
||||||
}
|
|
||||||
options.setHttpAsyncResponseConsumerFactory(httpAsyncResponseConsumerFactory);
|
|
||||||
request.setOptions(options);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add all parameters from a map to a {@link Request}. This only exists
|
|
||||||
* to support methods that exist for backwards compatibility.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
private static void addParameters(Request request, Map<String, String> parameters) {
|
|
||||||
Objects.requireNonNull(parameters, "parameters cannot be null");
|
|
||||||
for (Map.Entry<String, String> entry : parameters.entrySet()) {
|
|
||||||
request.addParameter(entry.getKey(), entry.getValue());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,7 +45,6 @@ import java.io.OutputStream;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -215,9 +214,15 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
|
||||||
}
|
}
|
||||||
final Header[] requestHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header");
|
final Header[] requestHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header");
|
||||||
final int statusCode = randomStatusCode(getRandom());
|
final int statusCode = randomStatusCode(getRandom());
|
||||||
|
Request request = new Request(method, "/" + statusCode);
|
||||||
|
RequestOptions.Builder options = request.getOptions().toBuilder();
|
||||||
|
for (Header header : requestHeaders) {
|
||||||
|
options.addHeader(header.getName(), header.getValue());
|
||||||
|
}
|
||||||
|
request.setOptions(options);
|
||||||
Response esResponse;
|
Response esResponse;
|
||||||
try {
|
try {
|
||||||
esResponse = restClient.performRequest(method, "/" + statusCode, Collections.<String, String>emptyMap(), requestHeaders);
|
esResponse = restClient.performRequest(request);
|
||||||
} catch (ResponseException e) {
|
} catch (ResponseException e) {
|
||||||
esResponse = e.getResponse();
|
esResponse = e.getResponse();
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,7 +59,6 @@ import java.net.URI;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
|
@ -69,7 +68,6 @@ import static java.util.Collections.singletonList;
|
||||||
import static org.elasticsearch.client.RestClientTestUtil.getAllErrorStatusCodes;
|
import static org.elasticsearch.client.RestClientTestUtil.getAllErrorStatusCodes;
|
||||||
import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods;
|
import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods;
|
||||||
import static org.elasticsearch.client.RestClientTestUtil.getOkStatusCodes;
|
import static org.elasticsearch.client.RestClientTestUtil.getOkStatusCodes;
|
||||||
import static org.elasticsearch.client.RestClientTestUtil.randomHttpMethod;
|
|
||||||
import static org.elasticsearch.client.RestClientTestUtil.randomStatusCode;
|
import static org.elasticsearch.client.RestClientTestUtil.randomStatusCode;
|
||||||
import static org.elasticsearch.client.SyncResponseListenerTests.assertExceptionStackContainsCallingMethod;
|
import static org.elasticsearch.client.SyncResponseListenerTests.assertExceptionStackContainsCallingMethod;
|
||||||
import static org.hamcrest.CoreMatchers.equalTo;
|
import static org.hamcrest.CoreMatchers.equalTo;
|
||||||
|
@ -192,7 +190,7 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
||||||
public void testOkStatusCodes() throws IOException {
|
public void testOkStatusCodes() throws IOException {
|
||||||
for (String method : getHttpMethods()) {
|
for (String method : getHttpMethods()) {
|
||||||
for (int okStatusCode : getOkStatusCodes()) {
|
for (int okStatusCode : getOkStatusCodes()) {
|
||||||
Response response = performRequest(method, "/" + okStatusCode);
|
Response response = restClient.performRequest(new Request(method, "/" + okStatusCode));
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(okStatusCode));
|
assertThat(response.getStatusLine().getStatusCode(), equalTo(okStatusCode));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -223,13 +221,11 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
||||||
//error status codes should cause an exception to be thrown
|
//error status codes should cause an exception to be thrown
|
||||||
for (int errorStatusCode : getAllErrorStatusCodes()) {
|
for (int errorStatusCode : getAllErrorStatusCodes()) {
|
||||||
try {
|
try {
|
||||||
Map<String, String> params;
|
Request request = new Request(method, "/" + errorStatusCode);
|
||||||
if (ignoreParam.isEmpty()) {
|
if (false == ignoreParam.isEmpty()) {
|
||||||
params = Collections.emptyMap();
|
request.addParameter("ignore", ignoreParam);
|
||||||
} else {
|
|
||||||
params = Collections.singletonMap("ignore", ignoreParam);
|
|
||||||
}
|
}
|
||||||
Response response = performRequest(method, "/" + errorStatusCode, params);
|
Response response = restClient.performRequest(request);
|
||||||
if (expectedIgnores.contains(errorStatusCode)) {
|
if (expectedIgnores.contains(errorStatusCode)) {
|
||||||
//no exception gets thrown although we got an error status code, as it was configured to be ignored
|
//no exception gets thrown although we got an error status code, as it was configured to be ignored
|
||||||
assertEquals(errorStatusCode, response.getStatusLine().getStatusCode());
|
assertEquals(errorStatusCode, response.getStatusLine().getStatusCode());
|
||||||
|
@ -256,14 +252,14 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
||||||
for (String method : getHttpMethods()) {
|
for (String method : getHttpMethods()) {
|
||||||
//IOExceptions should be let bubble up
|
//IOExceptions should be let bubble up
|
||||||
try {
|
try {
|
||||||
performRequest(method, "/coe");
|
restClient.performRequest(new Request(method, "/coe"));
|
||||||
fail("request should have failed");
|
fail("request should have failed");
|
||||||
} catch(IOException e) {
|
} catch(IOException e) {
|
||||||
assertThat(e, instanceOf(ConnectTimeoutException.class));
|
assertThat(e, instanceOf(ConnectTimeoutException.class));
|
||||||
}
|
}
|
||||||
failureListener.assertCalled(singletonList(node));
|
failureListener.assertCalled(singletonList(node));
|
||||||
try {
|
try {
|
||||||
performRequest(method, "/soe");
|
restClient.performRequest(new Request(method, "/soe"));
|
||||||
fail("request should have failed");
|
fail("request should have failed");
|
||||||
} catch(IOException e) {
|
} catch(IOException e) {
|
||||||
assertThat(e, instanceOf(SocketTimeoutException.class));
|
assertThat(e, instanceOf(SocketTimeoutException.class));
|
||||||
|
@ -313,48 +309,6 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests}.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public void tesPerformRequestOldStyleNullHeaders() throws IOException {
|
|
||||||
String method = randomHttpMethod(getRandom());
|
|
||||||
int statusCode = randomStatusCode(getRandom());
|
|
||||||
try {
|
|
||||||
performRequest(method, "/" + statusCode, (Header[])null);
|
|
||||||
fail("request should have failed");
|
|
||||||
} catch(NullPointerException e) {
|
|
||||||
assertEquals("request headers must not be null", e.getMessage());
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
performRequest(method, "/" + statusCode, (Header)null);
|
|
||||||
fail("request should have failed");
|
|
||||||
} catch(NullPointerException e) {
|
|
||||||
assertEquals("request header must not be null", e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testAddParameters()}.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public void testPerformRequestOldStyleWithNullParams() throws IOException {
|
|
||||||
String method = randomHttpMethod(getRandom());
|
|
||||||
int statusCode = randomStatusCode(getRandom());
|
|
||||||
try {
|
|
||||||
restClient.performRequest(method, "/" + statusCode, (Map<String, String>)null);
|
|
||||||
fail("request should have failed");
|
|
||||||
} catch(NullPointerException e) {
|
|
||||||
assertEquals("parameters cannot be null", e.getMessage());
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
restClient.performRequest(method, "/" + statusCode, null, (HttpEntity)null);
|
|
||||||
fail("request should have failed");
|
|
||||||
} catch(NullPointerException e) {
|
|
||||||
assertEquals("parameters cannot be null", e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* End to end test for request and response headers. Exercises the mock http client ability to send back
|
* End to end test for request and response headers. Exercises the mock http client ability to send back
|
||||||
* whatever headers it has received.
|
* whatever headers it has received.
|
||||||
|
@ -464,35 +418,4 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
||||||
}
|
}
|
||||||
return expectedRequest;
|
return expectedRequest;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated prefer {@link RestClient#performRequest(Request)}.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
private Response performRequest(String method, String endpoint, Header... headers) throws IOException {
|
|
||||||
return performRequest(method, endpoint, Collections.<String, String>emptyMap(), headers);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated prefer {@link RestClient#performRequest(Request)}.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
private Response performRequest(String method, String endpoint, Map<String, String> params, Header... headers) throws IOException {
|
|
||||||
int methodSelector;
|
|
||||||
if (params.isEmpty()) {
|
|
||||||
methodSelector = randomIntBetween(0, 2);
|
|
||||||
} else {
|
|
||||||
methodSelector = randomIntBetween(1, 2);
|
|
||||||
}
|
|
||||||
switch(methodSelector) {
|
|
||||||
case 0:
|
|
||||||
return restClient.performRequest(method, endpoint, headers);
|
|
||||||
case 1:
|
|
||||||
return restClient.performRequest(method, endpoint, params, headers);
|
|
||||||
case 2:
|
|
||||||
return restClient.performRequest(method, endpoint, params, (HttpEntity)null, headers);
|
|
||||||
default:
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,7 +42,6 @@ import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
import static java.util.Collections.singletonList;
|
import static java.util.Collections.singletonList;
|
||||||
import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods;
|
|
||||||
import static org.hamcrest.Matchers.instanceOf;
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertSame;
|
import static org.junit.Assert.assertSame;
|
||||||
|
@ -90,88 +89,6 @@ public class RestClientTests extends RestClientTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link #testPerformAsyncWithUnsupportedMethod()}.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public void testPerformAsyncOldStyleWithUnsupportedMethod() throws Exception {
|
|
||||||
final CountDownLatch latch = new CountDownLatch(1);
|
|
||||||
try (RestClient restClient = createRestClient()) {
|
|
||||||
restClient.performRequestAsync("unsupported", randomAsciiLettersOfLength(5), new ResponseListener() {
|
|
||||||
@Override
|
|
||||||
public void onSuccess(Response response) {
|
|
||||||
throw new UnsupportedOperationException("onSuccess cannot be called when using a mocked http client");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onFailure(Exception exception) {
|
|
||||||
try {
|
|
||||||
assertThat(exception, instanceOf(UnsupportedOperationException.class));
|
|
||||||
assertEquals("http method not supported: unsupported", exception.getMessage());
|
|
||||||
} finally {
|
|
||||||
latch.countDown();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
assertTrue("time out waiting for request to return", latch.await(1000, TimeUnit.MILLISECONDS));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testAddParameters()}.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public void testPerformOldStyleAsyncWithNullParams() throws Exception {
|
|
||||||
final CountDownLatch latch = new CountDownLatch(1);
|
|
||||||
try (RestClient restClient = createRestClient()) {
|
|
||||||
restClient.performRequestAsync(randomAsciiLettersOfLength(5), randomAsciiLettersOfLength(5), null, new ResponseListener() {
|
|
||||||
@Override
|
|
||||||
public void onSuccess(Response response) {
|
|
||||||
throw new UnsupportedOperationException("onSuccess cannot be called when using a mocked http client");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onFailure(Exception exception) {
|
|
||||||
try {
|
|
||||||
assertThat(exception, instanceOf(NullPointerException.class));
|
|
||||||
assertEquals("parameters cannot be null", exception.getMessage());
|
|
||||||
} finally {
|
|
||||||
latch.countDown();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
assertTrue("time out waiting for request to return", latch.await(1000, TimeUnit.MILLISECONDS));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests}.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public void testPerformOldStyleAsyncWithNullHeaders() throws Exception {
|
|
||||||
final CountDownLatch latch = new CountDownLatch(1);
|
|
||||||
try (RestClient restClient = createRestClient()) {
|
|
||||||
ResponseListener listener = new ResponseListener() {
|
|
||||||
@Override
|
|
||||||
public void onSuccess(Response response) {
|
|
||||||
throw new UnsupportedOperationException("onSuccess cannot be called when using a mocked http client");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onFailure(Exception exception) {
|
|
||||||
try {
|
|
||||||
assertThat(exception, instanceOf(NullPointerException.class));
|
|
||||||
assertEquals("header cannot be null", exception.getMessage());
|
|
||||||
} finally {
|
|
||||||
latch.countDown();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
restClient.performRequestAsync("GET", randomAsciiLettersOfLength(5), listener, (Header) null);
|
|
||||||
assertTrue("time out waiting for request to return", latch.await(1000, TimeUnit.MILLISECONDS));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testPerformAsyncWithWrongEndpoint() throws Exception {
|
public void testPerformAsyncWithWrongEndpoint() throws Exception {
|
||||||
final CountDownLatch latch = new CountDownLatch(1);
|
final CountDownLatch latch = new CountDownLatch(1);
|
||||||
try (RestClient restClient = createRestClient()) {
|
try (RestClient restClient = createRestClient()) {
|
||||||
|
@ -195,33 +112,6 @@ public class RestClientTests extends RestClientTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link #testPerformAsyncWithWrongEndpoint()}.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public void testPerformAsyncOldStyleWithWrongEndpoint() throws Exception {
|
|
||||||
final CountDownLatch latch = new CountDownLatch(1);
|
|
||||||
try (RestClient restClient = createRestClient()) {
|
|
||||||
restClient.performRequestAsync("GET", "::http:///", new ResponseListener() {
|
|
||||||
@Override
|
|
||||||
public void onSuccess(Response response) {
|
|
||||||
throw new UnsupportedOperationException("onSuccess cannot be called when using a mocked http client");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onFailure(Exception exception) {
|
|
||||||
try {
|
|
||||||
assertThat(exception, instanceOf(IllegalArgumentException.class));
|
|
||||||
assertEquals("Expected scheme name at index 0: ::http:///", exception.getMessage());
|
|
||||||
} finally {
|
|
||||||
latch.countDown();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
assertTrue("time out waiting for request to return", latch.await(1000, TimeUnit.MILLISECONDS));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testBuildUriLeavesPathUntouched() {
|
public void testBuildUriLeavesPathUntouched() {
|
||||||
final Map<String, String> emptyMap = Collections.emptyMap();
|
final Map<String, String> emptyMap = Collections.emptyMap();
|
||||||
{
|
{
|
||||||
|
@ -259,34 +149,6 @@ public class RestClientTests extends RestClientTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Deprecated
|
|
||||||
public void testSetHostsWrongArguments() throws IOException {
|
|
||||||
try (RestClient restClient = createRestClient()) {
|
|
||||||
restClient.setHosts((HttpHost[]) null);
|
|
||||||
fail("setHosts should have failed");
|
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertEquals("hosts must not be null nor empty", e.getMessage());
|
|
||||||
}
|
|
||||||
try (RestClient restClient = createRestClient()) {
|
|
||||||
restClient.setHosts();
|
|
||||||
fail("setHosts should have failed");
|
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertEquals("hosts must not be null nor empty", e.getMessage());
|
|
||||||
}
|
|
||||||
try (RestClient restClient = createRestClient()) {
|
|
||||||
restClient.setHosts((HttpHost) null);
|
|
||||||
fail("setHosts should have failed");
|
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertEquals("host cannot be null", e.getMessage());
|
|
||||||
}
|
|
||||||
try (RestClient restClient = createRestClient()) {
|
|
||||||
restClient.setHosts(new HttpHost("localhost", 9200), null, new HttpHost("localhost", 9201));
|
|
||||||
fail("setHosts should have failed");
|
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertEquals("host cannot be null", e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testSetNodesWrongArguments() throws IOException {
|
public void testSetNodesWrongArguments() throws IOException {
|
||||||
try (RestClient restClient = createRestClient()) {
|
try (RestClient restClient = createRestClient()) {
|
||||||
restClient.setNodes(null);
|
restClient.setNodes(null);
|
||||||
|
@ -348,23 +210,6 @@ public class RestClientTests extends RestClientTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testConstructor()}.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public void testNullPath() throws IOException {
|
|
||||||
try (RestClient restClient = createRestClient()) {
|
|
||||||
for (String method : getHttpMethods()) {
|
|
||||||
try {
|
|
||||||
restClient.performRequest(method, null);
|
|
||||||
fail("path set to null should fail!");
|
|
||||||
} catch (NullPointerException e) {
|
|
||||||
assertEquals("endpoint cannot be null", e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testSelectHosts() throws IOException {
|
public void testSelectHosts() throws IOException {
|
||||||
Node n1 = new Node(new HttpHost("1"), null, null, "1", null, null);
|
Node n1 = new Node(new HttpHost("1"), null, null, "1", null, null);
|
||||||
Node n2 = new Node(new HttpHost("2"), null, null, "2", null, null);
|
Node n2 = new Node(new HttpHost("2"), null, null, "2", null, null);
|
||||||
|
|
|
@ -19,10 +19,20 @@
|
||||||
|
|
||||||
apply plugin: 'elasticsearch.docs-test'
|
apply plugin: 'elasticsearch.docs-test'
|
||||||
|
|
||||||
/* List of files that have snippets that require a gold or platinum licence
|
/* List of files that have snippets that will not work until platinum tests can occur ... */
|
||||||
and therefore cannot be tested yet... */
|
|
||||||
buildRestTests.expectedUnconvertedCandidates = [
|
buildRestTests.expectedUnconvertedCandidates = [
|
||||||
'reference/ml/transforms.asciidoc',
|
'reference/ml/transforms.asciidoc',
|
||||||
|
'reference/ml/apis/delete-calendar-event.asciidoc',
|
||||||
|
'reference/ml/apis/get-bucket.asciidoc',
|
||||||
|
'reference/ml/apis/get-category.asciidoc',
|
||||||
|
'reference/ml/apis/get-influencer.asciidoc',
|
||||||
|
'reference/ml/apis/get-job-stats.asciidoc',
|
||||||
|
'reference/ml/apis/get-overall-buckets.asciidoc',
|
||||||
|
'reference/ml/apis/get-record.asciidoc',
|
||||||
|
'reference/ml/apis/get-snapshot.asciidoc',
|
||||||
|
'reference/ml/apis/post-data.asciidoc',
|
||||||
|
'reference/ml/apis/revert-snapshot.asciidoc',
|
||||||
|
'reference/ml/apis/update-snapshot.asciidoc',
|
||||||
]
|
]
|
||||||
|
|
||||||
integTestCluster {
|
integTestCluster {
|
||||||
|
@ -867,3 +877,224 @@ buildRestTests.setups['sensor_prefab_data'] = '''
|
||||||
{"node.terms.value":"c","temperature.sum.value":202.0,"temperature.max.value":202.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":202.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":4.0,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516294800000,"voltage.avg._count":1.0,"_rollup.id":"sensor"}
|
{"node.terms.value":"c","temperature.sum.value":202.0,"temperature.max.value":202.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":202.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":4.0,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516294800000,"voltage.avg._count":1.0,"_rollup.id":"sensor"}
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
buildRestTests.setups['sample_job'] = '''
|
||||||
|
- do:
|
||||||
|
xpack.ml.put_job:
|
||||||
|
job_id: "sample_job"
|
||||||
|
body: >
|
||||||
|
{
|
||||||
|
"description" : "Very basic job",
|
||||||
|
"analysis_config" : {
|
||||||
|
"bucket_span":"10m",
|
||||||
|
"detectors" :[
|
||||||
|
{
|
||||||
|
"function": "count"
|
||||||
|
}
|
||||||
|
]},
|
||||||
|
"data_description" : {
|
||||||
|
"time_field":"timestamp",
|
||||||
|
"time_format": "epoch_ms"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['farequote_index'] = '''
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: farequote
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
number_of_shards: 1
|
||||||
|
number_of_replicas: 0
|
||||||
|
mappings:
|
||||||
|
metric:
|
||||||
|
properties:
|
||||||
|
time:
|
||||||
|
type: date
|
||||||
|
responsetime:
|
||||||
|
type: float
|
||||||
|
airline:
|
||||||
|
type: keyword
|
||||||
|
doc_count:
|
||||||
|
type: integer
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['farequote_data'] = buildRestTests.setups['farequote_index'] + '''
|
||||||
|
- do:
|
||||||
|
bulk:
|
||||||
|
index: farequote
|
||||||
|
type: metric
|
||||||
|
refresh: true
|
||||||
|
body: |
|
||||||
|
{"index": {"_id":"1"}}
|
||||||
|
{"airline":"JZA","responsetime":990.4628,"time":"2016-02-07T00:00:00+0000", "doc_count": 5}
|
||||||
|
{"index": {"_id":"2"}}
|
||||||
|
{"airline":"JBU","responsetime":877.5927,"time":"2016-02-07T00:00:00+0000", "doc_count": 23}
|
||||||
|
{"index": {"_id":"3"}}
|
||||||
|
{"airline":"KLM","responsetime":1355.4812,"time":"2016-02-07T00:00:00+0000", "doc_count": 42}
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['farequote_job'] = buildRestTests.setups['farequote_data'] + '''
|
||||||
|
- do:
|
||||||
|
xpack.ml.put_job:
|
||||||
|
job_id: "farequote"
|
||||||
|
body: >
|
||||||
|
{
|
||||||
|
"analysis_config": {
|
||||||
|
"bucket_span": "60m",
|
||||||
|
"detectors": [{
|
||||||
|
"function": "mean",
|
||||||
|
"field_name": "responsetime",
|
||||||
|
"by_field_name": "airline"
|
||||||
|
}],
|
||||||
|
"summary_count_field_name": "doc_count"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"time_field": "time"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['farequote_datafeed'] = buildRestTests.setups['farequote_job'] + '''
|
||||||
|
- do:
|
||||||
|
xpack.ml.put_datafeed:
|
||||||
|
datafeed_id: "datafeed-farequote"
|
||||||
|
body: >
|
||||||
|
{
|
||||||
|
"job_id":"farequote",
|
||||||
|
"indexes":"farequote"
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['server_metrics_index'] = '''
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: server-metrics
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
number_of_shards: 1
|
||||||
|
number_of_replicas: 0
|
||||||
|
mappings:
|
||||||
|
metric:
|
||||||
|
properties:
|
||||||
|
timestamp:
|
||||||
|
type: date
|
||||||
|
total:
|
||||||
|
type: long
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['server_metrics_data'] = buildRestTests.setups['server_metrics_index'] + '''
|
||||||
|
- do:
|
||||||
|
bulk:
|
||||||
|
index: server-metrics
|
||||||
|
type: metric
|
||||||
|
refresh: true
|
||||||
|
body: |
|
||||||
|
{"index": {"_id":"1177"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":40476}
|
||||||
|
{"index": {"_id":"1178"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":15287}
|
||||||
|
{"index": {"_id":"1179"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":-776}
|
||||||
|
{"index": {"_id":"1180"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":11366}
|
||||||
|
{"index": {"_id":"1181"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":3606}
|
||||||
|
{"index": {"_id":"1182"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":19006}
|
||||||
|
{"index": {"_id":"1183"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":38613}
|
||||||
|
{"index": {"_id":"1184"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":19516}
|
||||||
|
{"index": {"_id":"1185"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":-258}
|
||||||
|
{"index": {"_id":"1186"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":9551}
|
||||||
|
{"index": {"_id":"1187"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":11217}
|
||||||
|
{"index": {"_id":"1188"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":22557}
|
||||||
|
{"index": {"_id":"1189"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":40508}
|
||||||
|
{"index": {"_id":"1190"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":11887}
|
||||||
|
{"index": {"_id":"1191"}}
|
||||||
|
{"timestamp":"2017-03-23T13:00:00","total":31659}
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['server_metrics_job'] = buildRestTests.setups['server_metrics_data'] + '''
|
||||||
|
- do:
|
||||||
|
xpack.ml.put_job:
|
||||||
|
job_id: "total-requests"
|
||||||
|
body: >
|
||||||
|
{
|
||||||
|
"description" : "Total sum of requests",
|
||||||
|
"analysis_config" : {
|
||||||
|
"bucket_span":"10m",
|
||||||
|
"detectors" :[
|
||||||
|
{
|
||||||
|
"detector_description": "Sum of total",
|
||||||
|
"function": "sum",
|
||||||
|
"field_name": "total"
|
||||||
|
}
|
||||||
|
]},
|
||||||
|
"data_description" : {
|
||||||
|
"time_field":"timestamp",
|
||||||
|
"time_format": "epoch_ms"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['server_metrics_datafeed'] = buildRestTests.setups['server_metrics_job'] + '''
|
||||||
|
- do:
|
||||||
|
xpack.ml.put_datafeed:
|
||||||
|
datafeed_id: "datafeed-total-requests"
|
||||||
|
body: >
|
||||||
|
{
|
||||||
|
"job_id":"total-requests",
|
||||||
|
"indexes":"server-metrics"
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['server_metrics_openjob'] = buildRestTests.setups['server_metrics_datafeed'] + '''
|
||||||
|
- do:
|
||||||
|
xpack.ml.open_job:
|
||||||
|
job_id: "total-requests"
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['server_metrics_startdf'] = buildRestTests.setups['server_metrics_openjob'] + '''
|
||||||
|
- do:
|
||||||
|
xpack.ml.start_datafeed:
|
||||||
|
datafeed_id: "datafeed-total-requests"
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['calendar_outages'] = '''
|
||||||
|
- do:
|
||||||
|
xpack.ml.put_calendar:
|
||||||
|
calendar_id: "planned-outages"
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['calendar_outages_addevent'] = buildRestTests.setups['calendar_outages'] + '''
|
||||||
|
- do:
|
||||||
|
xpack.ml.post_calendar_events:
|
||||||
|
calendar_id: "planned-outages"
|
||||||
|
body: >
|
||||||
|
{ "description": "event 1", "start_time": "2017-12-01T00:00:00Z", "end_time": "2017-12-02T00:00:00Z", "calendar_id": "planned-outages" }
|
||||||
|
|
||||||
|
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['calendar_outages_openjob'] = buildRestTests.setups['server_metrics_openjob'] + '''
|
||||||
|
- do:
|
||||||
|
xpack.ml.put_calendar:
|
||||||
|
calendar_id: "planned-outages"
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['calendar_outages_addjob'] = buildRestTests.setups['server_metrics_openjob'] + '''
|
||||||
|
- do:
|
||||||
|
xpack.ml.put_calendar:
|
||||||
|
calendar_id: "planned-outages"
|
||||||
|
body: >
|
||||||
|
{
|
||||||
|
"job_ids": ["total-requests"]
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
buildRestTests.setups['calendar_outages_addevent'] = buildRestTests.setups['calendar_outages_addjob'] + '''
|
||||||
|
- do:
|
||||||
|
xpack.ml.post_calendar_events:
|
||||||
|
calendar_id: "planned-outages"
|
||||||
|
body: >
|
||||||
|
{ "events" : [
|
||||||
|
{ "description": "event 1", "start_time": "1513641600000", "end_time": "1513728000000"},
|
||||||
|
{ "description": "event 2", "start_time": "1513814400000", "end_time": "1513900800000"},
|
||||||
|
{ "description": "event 3", "start_time": "1514160000000", "end_time": "1514246400000"}
|
||||||
|
]}
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,83 @@
|
||||||
|
[[java-rest-high-x-pack-ml-flush-job]]
|
||||||
|
=== Flush Job API
|
||||||
|
|
||||||
|
The Flush Job API provides the ability to flush a {ml} job's
|
||||||
|
datafeed in the cluster.
|
||||||
|
It accepts a `FlushJobRequest` object and responds
|
||||||
|
with a `FlushJobResponse` object.
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-flush-job-request]]
|
||||||
|
==== Flush Job Request
|
||||||
|
|
||||||
|
A `FlushJobRequest` object gets created with an existing non-null `jobId`.
|
||||||
|
All other fields are optional for the request.
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-request]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Constructing a new request referencing an existing `jobId`
|
||||||
|
|
||||||
|
==== Optional Arguments
|
||||||
|
|
||||||
|
The following arguments are optional.
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-request-options]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Set request to calculate the interim results
|
||||||
|
<2> Set the advanced time to flush to the particular time value
|
||||||
|
<3> Set the start time for the range of buckets on which
|
||||||
|
to calculate the interim results (requires `calc_interim` to be `true`)
|
||||||
|
<4> Set the end time for the range of buckets on which
|
||||||
|
to calculate interim results (requires `calc_interim` to be `true`)
|
||||||
|
<5> Set the skip time to skip a particular time value
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-flush-job-execution]]
|
||||||
|
==== Execution
|
||||||
|
|
||||||
|
The request can be executed through the `MachineLearningClient` contained
|
||||||
|
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-execute]
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-flush-job-execution-async]]
|
||||||
|
==== Asynchronous Execution
|
||||||
|
|
||||||
|
The request can also be executed asynchronously:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-execute-async]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The `FlushJobRequest` to execute and the `ActionListener` to use when
|
||||||
|
the execution completes
|
||||||
|
|
||||||
|
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||||
|
to notify the caller of completion. A typical `ActionListener` for `FlushJobResponse` may
|
||||||
|
look like
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-listener]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> `onResponse` is called back when the action is completed successfully
|
||||||
|
<2> `onFailure` is called back when some unexpected error occurs
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-flush-job-response]]
|
||||||
|
==== Flush Job Response
|
||||||
|
|
||||||
|
A `FlushJobResponse` contains an acknowledgement and an optional end date for the
|
||||||
|
last finalized bucket
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-response]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> `isFlushed()` indicates if the job was successfully flushed or not.
|
||||||
|
<2> `getLastFinalizedBucketEnd()` provides the timestamp
|
||||||
|
(in milliseconds-since-the-epoch) of the end of the last bucket that was processed.
|
|
@ -0,0 +1,67 @@
|
||||||
|
[[java-rest-high-x-pack-ml-get-job-stats]]
|
||||||
|
=== Get Job Stats API
|
||||||
|
|
||||||
|
The Get Job Stats API provides the ability to get any number of
|
||||||
|
{ml} job's statistics in the cluster.
|
||||||
|
It accepts a `GetJobStatsRequest` object and responds
|
||||||
|
with a `GetJobStatsResponse` object.
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-get-job-stats-request]]
|
||||||
|
==== Get Job Stats Request
|
||||||
|
|
||||||
|
A `GetJobsStatsRequest` object can have any number of `jobId`
|
||||||
|
entries. However, they all must be non-null. An empty list is the same as
|
||||||
|
requesting statistics for all jobs.
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-request]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Constructing a new request referencing existing `jobIds`, can contain wildcards
|
||||||
|
<2> Whether to ignore if a wildcard expression matches no jobs.
|
||||||
|
(This includes `_all` string or when no jobs have been specified)
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-get-job-stats-execution]]
|
||||||
|
==== Execution
|
||||||
|
|
||||||
|
The request can be executed through the `MachineLearningClient` contained
|
||||||
|
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-execute]
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-get-job-stats-execution-async]]
|
||||||
|
==== Asynchronous Execution
|
||||||
|
|
||||||
|
The request can also be executed asynchronously:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-execute-async]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The `GetJobsStatsRequest` to execute and the `ActionListener` to use when
|
||||||
|
the execution completes
|
||||||
|
|
||||||
|
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||||
|
to notify the caller of completion. A typical `ActionListener` for `GetJobsStatsResponse` may
|
||||||
|
look like
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-listener]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> `onResponse` is called back when the action is completed successfully
|
||||||
|
<2> `onFailure` is called back when some unexpected error occurs
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-get-job-stats-response]]
|
||||||
|
==== Get Job Stats Response
|
||||||
|
The returned `GetJobStatsResponse` contains the requested job statistics:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-response]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> `getCount()` indicates the number of jobs statistics found
|
||||||
|
<2> `getJobStats()` is the collection of {ml} `JobStats` objects found
|
|
@ -211,6 +211,8 @@ The Java High Level REST Client supports the following Machine Learning APIs:
|
||||||
* <<java-rest-high-x-pack-ml-delete-job>>
|
* <<java-rest-high-x-pack-ml-delete-job>>
|
||||||
* <<java-rest-high-x-pack-ml-open-job>>
|
* <<java-rest-high-x-pack-ml-open-job>>
|
||||||
* <<java-rest-high-x-pack-ml-close-job>>
|
* <<java-rest-high-x-pack-ml-close-job>>
|
||||||
|
* <<java-rest-high-x-pack-ml-flush-job>>
|
||||||
|
* <<java-rest-high-x-pack-ml-get-job-stats>>
|
||||||
* <<java-rest-high-x-pack-ml-get-buckets>>
|
* <<java-rest-high-x-pack-ml-get-buckets>>
|
||||||
* <<java-rest-high-x-pack-ml-get-records>>
|
* <<java-rest-high-x-pack-ml-get-records>>
|
||||||
|
|
||||||
|
@ -219,6 +221,8 @@ include::ml/get-job.asciidoc[]
|
||||||
include::ml/delete-job.asciidoc[]
|
include::ml/delete-job.asciidoc[]
|
||||||
include::ml/open-job.asciidoc[]
|
include::ml/open-job.asciidoc[]
|
||||||
include::ml/close-job.asciidoc[]
|
include::ml/close-job.asciidoc[]
|
||||||
|
include::ml/flush-job.asciidoc[]
|
||||||
|
include::ml/get-job-stats.asciidoc[]
|
||||||
include::ml/get-buckets.asciidoc[]
|
include::ml/get-buckets.asciidoc[]
|
||||||
include::ml/get-records.asciidoc[]
|
include::ml/get-records.asciidoc[]
|
||||||
|
|
||||||
|
|
|
@ -39,6 +39,7 @@ Elasticsearch 6.x in order to be readable by Elasticsearch 7.x.
|
||||||
* <<breaking_70_scripting_changes>>
|
* <<breaking_70_scripting_changes>>
|
||||||
* <<breaking_70_snapshotstats_changes>>
|
* <<breaking_70_snapshotstats_changes>>
|
||||||
* <<breaking_70_restclient_changes>>
|
* <<breaking_70_restclient_changes>>
|
||||||
|
* <<breaking_70_low_level_restclient_changes>>
|
||||||
|
|
||||||
include::migrate_7_0/aggregations.asciidoc[]
|
include::migrate_7_0/aggregations.asciidoc[]
|
||||||
include::migrate_7_0/analysis.asciidoc[]
|
include::migrate_7_0/analysis.asciidoc[]
|
||||||
|
@ -54,3 +55,4 @@ include::migrate_7_0/settings.asciidoc[]
|
||||||
include::migrate_7_0/scripting.asciidoc[]
|
include::migrate_7_0/scripting.asciidoc[]
|
||||||
include::migrate_7_0/snapshotstats.asciidoc[]
|
include::migrate_7_0/snapshotstats.asciidoc[]
|
||||||
include::migrate_7_0/restclient.asciidoc[]
|
include::migrate_7_0/restclient.asciidoc[]
|
||||||
|
include::migrate_7_0/low_level_restclient.asciidoc[]
|
||||||
|
|
|
@ -0,0 +1,14 @@
|
||||||
|
[[breaking_70_low_level_restclient_changes]]
|
||||||
|
=== Low-level REST client changes
|
||||||
|
|
||||||
|
==== Deprecated flavors of performRequest have been removed
|
||||||
|
|
||||||
|
We deprecated the flavors of `performRequest` and `performRequestAsync` that
|
||||||
|
do not take `Request` objects in 6.4.0 in favor of the flavors that take
|
||||||
|
`Request` objects because those methods can be extended without breaking
|
||||||
|
backwards compatibility.
|
||||||
|
|
||||||
|
==== Removed setHosts
|
||||||
|
|
||||||
|
We deprecated `setHosts` in 6.4.0 in favor of `setNodes` because it supports
|
||||||
|
host metadata used by the `NodeSelector`.
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-calendar-resource]]
|
[[ml-calendar-resource]]
|
||||||
=== Calendar Resources
|
=== Calendar Resources
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-close-job]]
|
[[ml-close-job]]
|
||||||
=== Close Jobs API
|
=== Close Jobs API
|
||||||
++++
|
++++
|
||||||
|
@ -80,7 +81,7 @@ The following example closes the `total-requests` job:
|
||||||
POST _xpack/ml/anomaly_detectors/total-requests/_close
|
POST _xpack/ml/anomaly_detectors/total-requests/_close
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:server_metrics_openjob]
|
// TEST[skip:setup:server_metrics_openjob]
|
||||||
|
|
||||||
When the job is closed, you receive the following results:
|
When the job is closed, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-datafeed-resource]]
|
[[ml-datafeed-resource]]
|
||||||
=== {dfeed-cap} Resources
|
=== {dfeed-cap} Resources
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-delete-calendar-event]]
|
[[ml-delete-calendar-event]]
|
||||||
=== Delete Events from Calendar API
|
=== Delete Events from Calendar API
|
||||||
++++
|
++++
|
||||||
|
@ -44,7 +45,7 @@ calendar:
|
||||||
DELETE _xpack/ml/calendars/planned-outages/events/LS8LJGEBMTCMA-qz49st
|
DELETE _xpack/ml/calendars/planned-outages/events/LS8LJGEBMTCMA-qz49st
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[catch:missing]
|
// TEST[skip:catch:missing]
|
||||||
|
|
||||||
When the event is removed, you receive the following results:
|
When the event is removed, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
||||||
|
@ -53,4 +54,3 @@ When the event is removed, you receive the following results:
|
||||||
"acknowledged": true
|
"acknowledged": true
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
// NOTCONSOLE
|
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-delete-calendar-job]]
|
[[ml-delete-calendar-job]]
|
||||||
=== Delete Jobs from Calendar API
|
=== Delete Jobs from Calendar API
|
||||||
++++
|
++++
|
||||||
|
@ -38,7 +39,7 @@ calendar and `total-requests` job:
|
||||||
DELETE _xpack/ml/calendars/planned-outages/jobs/total-requests
|
DELETE _xpack/ml/calendars/planned-outages/jobs/total-requests
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:calendar_outages_addjob]
|
// TEST[skip:setup:calendar_outages_addjob]
|
||||||
|
|
||||||
When the job is removed from the calendar, you receive the following
|
When the job is removed from the calendar, you receive the following
|
||||||
results:
|
results:
|
||||||
|
@ -50,4 +51,4 @@ results:
|
||||||
"job_ids": []
|
"job_ids": []
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
//TESTRESPONSE
|
// TESTRESPONSE
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-delete-calendar]]
|
[[ml-delete-calendar]]
|
||||||
=== Delete Calendar API
|
=== Delete Calendar API
|
||||||
++++
|
++++
|
||||||
|
@ -40,7 +41,7 @@ The following example deletes the `planned-outages` calendar:
|
||||||
DELETE _xpack/ml/calendars/planned-outages
|
DELETE _xpack/ml/calendars/planned-outages
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:calendar_outages]
|
// TEST[skip:setup:calendar_outages]
|
||||||
|
|
||||||
When the calendar is deleted, you receive the following results:
|
When the calendar is deleted, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
||||||
|
@ -49,4 +50,4 @@ When the calendar is deleted, you receive the following results:
|
||||||
"acknowledged": true
|
"acknowledged": true
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
//TESTRESPONSE
|
// TESTRESPONSE
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-delete-datafeed]]
|
[[ml-delete-datafeed]]
|
||||||
=== Delete {dfeeds-cap} API
|
=== Delete {dfeeds-cap} API
|
||||||
++++
|
++++
|
||||||
|
@ -47,7 +48,7 @@ The following example deletes the `datafeed-total-requests` {dfeed}:
|
||||||
DELETE _xpack/ml/datafeeds/datafeed-total-requests
|
DELETE _xpack/ml/datafeeds/datafeed-total-requests
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:server_metrics_datafeed]
|
// TEST[skip:setup:server_metrics_datafeed]
|
||||||
|
|
||||||
When the {dfeed} is deleted, you receive the following results:
|
When the {dfeed} is deleted, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-delete-filter]]
|
[[ml-delete-filter]]
|
||||||
=== Delete Filter API
|
=== Delete Filter API
|
||||||
++++
|
++++
|
||||||
|
@ -41,7 +42,7 @@ The following example deletes the `safe_domains` filter:
|
||||||
DELETE _xpack/ml/filters/safe_domains
|
DELETE _xpack/ml/filters/safe_domains
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:ml_filter_safe_domains]
|
// TEST[skip:setup:ml_filter_safe_domains]
|
||||||
|
|
||||||
When the filter is deleted, you receive the following results:
|
When the filter is deleted, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
||||||
|
@ -50,4 +51,4 @@ When the filter is deleted, you receive the following results:
|
||||||
"acknowledged": true
|
"acknowledged": true
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
//TESTRESPONSE
|
// TESTRESPONSE
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-delete-job]]
|
[[ml-delete-job]]
|
||||||
=== Delete Jobs API
|
=== Delete Jobs API
|
||||||
++++
|
++++
|
||||||
|
@ -56,7 +57,7 @@ The following example deletes the `total-requests` job:
|
||||||
DELETE _xpack/ml/anomaly_detectors/total-requests
|
DELETE _xpack/ml/anomaly_detectors/total-requests
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:server_metrics_job]
|
// TEST[skip:setup:server_metrics_job]
|
||||||
|
|
||||||
When the job is deleted, you receive the following results:
|
When the job is deleted, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-delete-snapshot]]
|
[[ml-delete-snapshot]]
|
||||||
=== Delete Model Snapshots API
|
=== Delete Model Snapshots API
|
||||||
++++
|
++++
|
||||||
|
@ -32,7 +33,6 @@ the `model_snapshot_id` in the results from the get jobs API.
|
||||||
|
|
||||||
You must have `manage_ml`, or `manage` cluster privileges to use this API.
|
You must have `manage_ml`, or `manage` cluster privileges to use this API.
|
||||||
For more information, see {xpack-ref}/security-privileges.html[Security Privileges].
|
For more information, see {xpack-ref}/security-privileges.html[Security Privileges].
|
||||||
//<<privileges-list-cluster>>.
|
|
||||||
|
|
||||||
|
|
||||||
==== Examples
|
==== Examples
|
||||||
|
@ -53,3 +53,4 @@ When the snapshot is deleted, you receive the following results:
|
||||||
"acknowledged": true
|
"acknowledged": true
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
|
// TESTRESPONSE
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-event-resource]]
|
[[ml-event-resource]]
|
||||||
=== Scheduled Event Resources
|
=== Scheduled Event Resources
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-filter-resource]]
|
[[ml-filter-resource]]
|
||||||
=== Filter Resources
|
=== Filter Resources
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-flush-job]]
|
[[ml-flush-job]]
|
||||||
=== Flush Jobs API
|
=== Flush Jobs API
|
||||||
++++
|
++++
|
||||||
|
@ -74,7 +75,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_flush
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:server_metrics_openjob]
|
// TEST[skip:setup:server_metrics_openjob]
|
||||||
|
|
||||||
When the operation succeeds, you receive the following results:
|
When the operation succeeds, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
||||||
|
@ -84,7 +85,7 @@ When the operation succeeds, you receive the following results:
|
||||||
"last_finalized_bucket_end": 1455234900000
|
"last_finalized_bucket_end": 1455234900000
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
// TESTRESPONSE[s/"last_finalized_bucket_end": 1455234900000/"last_finalized_bucket_end": $body.last_finalized_bucket_end/]
|
//TESTRESPONSE[s/"last_finalized_bucket_end": 1455234900000/"last_finalized_bucket_end": $body.last_finalized_bucket_end/]
|
||||||
|
|
||||||
The `last_finalized_bucket_end` provides the timestamp (in
|
The `last_finalized_bucket_end` provides the timestamp (in
|
||||||
milliseconds-since-the-epoch) of the end of the last bucket that was processed.
|
milliseconds-since-the-epoch) of the end of the last bucket that was processed.
|
||||||
|
@ -101,7 +102,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_flush
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:server_metrics_openjob]
|
// TEST[skip:setup:server_metrics_openjob]
|
||||||
|
|
||||||
When the operation succeeds, you receive the following results:
|
When the operation succeeds, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-forecast]]
|
[[ml-forecast]]
|
||||||
=== Forecast Jobs API
|
=== Forecast Jobs API
|
||||||
++++
|
++++
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-get-bucket]]
|
[[ml-get-bucket]]
|
||||||
=== Get Buckets API
|
=== Get Buckets API
|
||||||
++++
|
++++
|
||||||
|
@ -81,7 +82,6 @@ that stores the results. The `machine_learning_admin` and `machine_learning_user
|
||||||
roles provide these privileges. For more information, see
|
roles provide these privileges. For more information, see
|
||||||
{xpack-ref}/security-privileges.html[Security Privileges] and
|
{xpack-ref}/security-privileges.html[Security Privileges] and
|
||||||
{xpack-ref}/built-in-roles.html[Built-in Roles].
|
{xpack-ref}/built-in-roles.html[Built-in Roles].
|
||||||
//<<security-privileges>> and <<built-in-roles>>.
|
|
||||||
|
|
||||||
|
|
||||||
==== Examples
|
==== Examples
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-get-calendar-event]]
|
[[ml-get-calendar-event]]
|
||||||
=== Get Scheduled Events API
|
=== Get Scheduled Events API
|
||||||
++++
|
++++
|
||||||
|
@ -66,7 +67,7 @@ The following example gets information about the scheduled events in the
|
||||||
GET _xpack/ml/calendars/planned-outages/events
|
GET _xpack/ml/calendars/planned-outages/events
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:calendar_outages_addevent]
|
// TEST[skip:setup:calendar_outages_addevent]
|
||||||
|
|
||||||
The API returns the following results:
|
The API returns the following results:
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-get-calendar]]
|
[[ml-get-calendar]]
|
||||||
=== Get Calendars API
|
=== Get Calendars API
|
||||||
++++
|
++++
|
||||||
|
@ -62,7 +63,7 @@ calendar:
|
||||||
GET _xpack/ml/calendars/planned-outages
|
GET _xpack/ml/calendars/planned-outages
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:calendar_outages_addjob]
|
// TEST[skip:setup:calendar_outages_addjob]
|
||||||
|
|
||||||
The API returns the following results:
|
The API returns the following results:
|
||||||
[source,js]
|
[source,js]
|
||||||
|
@ -79,4 +80,4 @@ The API returns the following results:
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
//TESTRESPONSE
|
// TESTRESPONSE
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-get-category]]
|
[[ml-get-category]]
|
||||||
=== Get Categories API
|
=== Get Categories API
|
||||||
++++
|
++++
|
||||||
|
@ -18,7 +19,6 @@ Retrieves job results for one or more categories.
|
||||||
|
|
||||||
For more information about categories, see
|
For more information about categories, see
|
||||||
{xpack-ref}/ml-configuring-categories.html[Categorizing Log Messages].
|
{xpack-ref}/ml-configuring-categories.html[Categorizing Log Messages].
|
||||||
//<<ml-configuring-categories>>.
|
|
||||||
|
|
||||||
==== Path Parameters
|
==== Path Parameters
|
||||||
|
|
||||||
|
@ -56,7 +56,6 @@ that stores the results. The `machine_learning_admin` and `machine_learning_user
|
||||||
roles provide these privileges. For more information, see
|
roles provide these privileges. For more information, see
|
||||||
{xpack-ref}/security-privileges.html[Security Privileges] and
|
{xpack-ref}/security-privileges.html[Security Privileges] and
|
||||||
{xpack-ref}/built-in-roles.html[Built-in Roles].
|
{xpack-ref}/built-in-roles.html[Built-in Roles].
|
||||||
//<<security-privileges>> and <<built-in-roles>>.
|
|
||||||
|
|
||||||
|
|
||||||
==== Examples
|
==== Examples
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-get-datafeed-stats]]
|
[[ml-get-datafeed-stats]]
|
||||||
=== Get {dfeed-cap} Statistics API
|
=== Get {dfeed-cap} Statistics API
|
||||||
++++
|
++++
|
||||||
|
@ -66,7 +67,7 @@ The following example gets usage information for the
|
||||||
GET _xpack/ml/datafeeds/datafeed-total-requests/_stats
|
GET _xpack/ml/datafeeds/datafeed-total-requests/_stats
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:server_metrics_startdf]
|
// TEST[skip:setup:server_metrics_startdf]
|
||||||
|
|
||||||
The API returns the following results:
|
The API returns the following results:
|
||||||
[source,js]
|
[source,js]
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-get-datafeed]]
|
[[ml-get-datafeed]]
|
||||||
=== Get {dfeeds-cap} API
|
=== Get {dfeeds-cap} API
|
||||||
++++
|
++++
|
||||||
|
@ -60,7 +61,7 @@ The following example gets configuration information for the
|
||||||
GET _xpack/ml/datafeeds/datafeed-total-requests
|
GET _xpack/ml/datafeeds/datafeed-total-requests
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:server_metrics_datafeed]
|
// TEST[skip:setup:server_metrics_datafeed]
|
||||||
|
|
||||||
The API returns the following results:
|
The API returns the following results:
|
||||||
[source,js]
|
[source,js]
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-get-filter]]
|
[[ml-get-filter]]
|
||||||
=== Get Filters API
|
=== Get Filters API
|
||||||
++++
|
++++
|
||||||
|
@ -62,7 +63,7 @@ filter:
|
||||||
GET _xpack/ml/filters/safe_domains
|
GET _xpack/ml/filters/safe_domains
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:ml_filter_safe_domains]
|
// TEST[skip:setup:ml_filter_safe_domains]
|
||||||
|
|
||||||
The API returns the following results:
|
The API returns the following results:
|
||||||
[source,js]
|
[source,js]
|
||||||
|
@ -81,4 +82,4 @@ The API returns the following results:
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
//TESTRESPONSE
|
// TESTRESPONSE
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-get-influencer]]
|
[[ml-get-influencer]]
|
||||||
=== Get Influencers API
|
=== Get Influencers API
|
||||||
++++
|
++++
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-get-job-stats]]
|
[[ml-get-job-stats]]
|
||||||
=== Get Job Statistics API
|
=== Get Job Statistics API
|
||||||
++++
|
++++
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-get-job]]
|
[[ml-get-job]]
|
||||||
=== Get Jobs API
|
=== Get Jobs API
|
||||||
++++
|
++++
|
||||||
|
@ -59,7 +60,7 @@ The following example gets configuration information for the `total-requests` jo
|
||||||
GET _xpack/ml/anomaly_detectors/total-requests
|
GET _xpack/ml/anomaly_detectors/total-requests
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:server_metrics_job]
|
// TEST[skip:setup:server_metrics_job]
|
||||||
|
|
||||||
The API returns the following results:
|
The API returns the following results:
|
||||||
[source,js]
|
[source,js]
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-get-overall-buckets]]
|
[[ml-get-overall-buckets]]
|
||||||
=== Get Overall Buckets API
|
=== Get Overall Buckets API
|
||||||
++++
|
++++
|
||||||
|
@ -93,7 +94,6 @@ that stores the results. The `machine_learning_admin` and `machine_learning_user
|
||||||
roles provide these privileges. For more information, see
|
roles provide these privileges. For more information, see
|
||||||
{xpack-ref}/security-privileges.html[Security Privileges] and
|
{xpack-ref}/security-privileges.html[Security Privileges] and
|
||||||
{xpack-ref}/built-in-roles.html[Built-in Roles].
|
{xpack-ref}/built-in-roles.html[Built-in Roles].
|
||||||
//<<security-privileges>> and <<built-in-roles>>.
|
|
||||||
|
|
||||||
|
|
||||||
==== Examples
|
==== Examples
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-get-record]]
|
[[ml-get-record]]
|
||||||
=== Get Records API
|
=== Get Records API
|
||||||
++++
|
++++
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-get-snapshot]]
|
[[ml-get-snapshot]]
|
||||||
=== Get Model Snapshots API
|
=== Get Model Snapshots API
|
||||||
++++
|
++++
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-jobstats]]
|
[[ml-jobstats]]
|
||||||
=== Job Statistics
|
=== Job Statistics
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-job-resource]]
|
[[ml-job-resource]]
|
||||||
=== Job Resources
|
=== Job Resources
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-apis]]
|
[[ml-apis]]
|
||||||
== Machine Learning APIs
|
== Machine Learning APIs
|
||||||
|
|
||||||
|
@ -70,57 +71,57 @@ machine learning APIs and in advanced job configuration options in Kibana.
|
||||||
* <<ml-get-record,Get records>>
|
* <<ml-get-record,Get records>>
|
||||||
|
|
||||||
//ADD
|
//ADD
|
||||||
include::ml/post-calendar-event.asciidoc[]
|
include::post-calendar-event.asciidoc[]
|
||||||
include::ml/put-calendar-job.asciidoc[]
|
include::put-calendar-job.asciidoc[]
|
||||||
//CLOSE
|
//CLOSE
|
||||||
include::ml/close-job.asciidoc[]
|
include::close-job.asciidoc[]
|
||||||
//CREATE
|
//CREATE
|
||||||
include::ml/put-calendar.asciidoc[]
|
include::put-calendar.asciidoc[]
|
||||||
include::ml/put-datafeed.asciidoc[]
|
include::put-datafeed.asciidoc[]
|
||||||
include::ml/put-filter.asciidoc[]
|
include::put-filter.asciidoc[]
|
||||||
include::ml/put-job.asciidoc[]
|
include::put-job.asciidoc[]
|
||||||
//DELETE
|
//DELETE
|
||||||
include::ml/delete-calendar.asciidoc[]
|
include::delete-calendar.asciidoc[]
|
||||||
include::ml/delete-datafeed.asciidoc[]
|
include::delete-datafeed.asciidoc[]
|
||||||
include::ml/delete-calendar-event.asciidoc[]
|
include::delete-calendar-event.asciidoc[]
|
||||||
include::ml/delete-filter.asciidoc[]
|
include::delete-filter.asciidoc[]
|
||||||
include::ml/delete-job.asciidoc[]
|
include::delete-job.asciidoc[]
|
||||||
include::ml/delete-calendar-job.asciidoc[]
|
include::delete-calendar-job.asciidoc[]
|
||||||
include::ml/delete-snapshot.asciidoc[]
|
include::delete-snapshot.asciidoc[]
|
||||||
//FLUSH
|
//FLUSH
|
||||||
include::ml/flush-job.asciidoc[]
|
include::flush-job.asciidoc[]
|
||||||
//FORECAST
|
//FORECAST
|
||||||
include::ml/forecast.asciidoc[]
|
include::forecast.asciidoc[]
|
||||||
//GET
|
//GET
|
||||||
include::ml/get-calendar.asciidoc[]
|
include::get-calendar.asciidoc[]
|
||||||
include::ml/get-bucket.asciidoc[]
|
include::get-bucket.asciidoc[]
|
||||||
include::ml/get-overall-buckets.asciidoc[]
|
include::get-overall-buckets.asciidoc[]
|
||||||
include::ml/get-category.asciidoc[]
|
include::get-category.asciidoc[]
|
||||||
include::ml/get-datafeed.asciidoc[]
|
include::get-datafeed.asciidoc[]
|
||||||
include::ml/get-datafeed-stats.asciidoc[]
|
include::get-datafeed-stats.asciidoc[]
|
||||||
include::ml/get-influencer.asciidoc[]
|
include::get-influencer.asciidoc[]
|
||||||
include::ml/get-job.asciidoc[]
|
include::get-job.asciidoc[]
|
||||||
include::ml/get-job-stats.asciidoc[]
|
include::get-job-stats.asciidoc[]
|
||||||
include::ml/get-snapshot.asciidoc[]
|
include::get-snapshot.asciidoc[]
|
||||||
include::ml/get-calendar-event.asciidoc[]
|
include::get-calendar-event.asciidoc[]
|
||||||
include::ml/get-filter.asciidoc[]
|
include::get-filter.asciidoc[]
|
||||||
include::ml/get-record.asciidoc[]
|
include::get-record.asciidoc[]
|
||||||
//OPEN
|
//OPEN
|
||||||
include::ml/open-job.asciidoc[]
|
include::open-job.asciidoc[]
|
||||||
//POST
|
//POST
|
||||||
include::ml/post-data.asciidoc[]
|
include::post-data.asciidoc[]
|
||||||
//PREVIEW
|
//PREVIEW
|
||||||
include::ml/preview-datafeed.asciidoc[]
|
include::preview-datafeed.asciidoc[]
|
||||||
//REVERT
|
//REVERT
|
||||||
include::ml/revert-snapshot.asciidoc[]
|
include::revert-snapshot.asciidoc[]
|
||||||
//START/STOP
|
//START/STOP
|
||||||
include::ml/start-datafeed.asciidoc[]
|
include::start-datafeed.asciidoc[]
|
||||||
include::ml/stop-datafeed.asciidoc[]
|
include::stop-datafeed.asciidoc[]
|
||||||
//UPDATE
|
//UPDATE
|
||||||
include::ml/update-datafeed.asciidoc[]
|
include::update-datafeed.asciidoc[]
|
||||||
include::ml/update-filter.asciidoc[]
|
include::update-filter.asciidoc[]
|
||||||
include::ml/update-job.asciidoc[]
|
include::update-job.asciidoc[]
|
||||||
include::ml/update-snapshot.asciidoc[]
|
include::update-snapshot.asciidoc[]
|
||||||
//VALIDATE
|
//VALIDATE
|
||||||
//include::ml/validate-detector.asciidoc[]
|
//include::validate-detector.asciidoc[]
|
||||||
//include::ml/validate-job.asciidoc[]
|
//include::validate-job.asciidoc[]
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-open-job]]
|
[[ml-open-job]]
|
||||||
=== Open Jobs API
|
=== Open Jobs API
|
||||||
++++
|
++++
|
||||||
|
@ -56,7 +57,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_open
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:server_metrics_job]
|
// TEST[skip:setup:server_metrics_job]
|
||||||
|
|
||||||
When the job opens, you receive the following results:
|
When the job opens, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
||||||
|
@ -65,5 +66,4 @@ When the job opens, you receive the following results:
|
||||||
"opened": true
|
"opened": true
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
//CONSOLE
|
|
||||||
// TESTRESPONSE
|
// TESTRESPONSE
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-post-calendar-event]]
|
[[ml-post-calendar-event]]
|
||||||
=== Add Events to Calendar API
|
=== Add Events to Calendar API
|
||||||
++++
|
++++
|
||||||
|
@ -52,7 +53,7 @@ POST _xpack/ml/calendars/planned-outages/events
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:calendar_outages_addjob]
|
// TEST[skip:setup:calendar_outages_addjob]
|
||||||
|
|
||||||
The API returns the following results:
|
The API returns the following results:
|
||||||
|
|
||||||
|
@ -81,7 +82,7 @@ The API returns the following results:
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
//TESTRESPONSE
|
// TESTRESPONSE
|
||||||
|
|
||||||
For more information about these properties, see
|
For more information about these properties, see
|
||||||
<<ml-event-resource,Scheduled Event Resources>>.
|
<<ml-event-resource,Scheduled Event Resources>>.
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-post-data]]
|
[[ml-post-data]]
|
||||||
=== Post Data to Jobs API
|
=== Post Data to Jobs API
|
||||||
++++
|
++++
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-preview-datafeed]]
|
[[ml-preview-datafeed]]
|
||||||
=== Preview {dfeeds-cap} API
|
=== Preview {dfeeds-cap} API
|
||||||
++++
|
++++
|
||||||
|
@ -53,7 +54,7 @@ The following example obtains a preview of the `datafeed-farequote` {dfeed}:
|
||||||
GET _xpack/ml/datafeeds/datafeed-farequote/_preview
|
GET _xpack/ml/datafeeds/datafeed-farequote/_preview
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:farequote_datafeed]
|
// TEST[skip:setup:farequote_datafeed]
|
||||||
|
|
||||||
The data that is returned for this example is as follows:
|
The data that is returned for this example is as follows:
|
||||||
[source,js]
|
[source,js]
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-put-calendar-job]]
|
[[ml-put-calendar-job]]
|
||||||
=== Add Jobs to Calendar API
|
=== Add Jobs to Calendar API
|
||||||
++++
|
++++
|
||||||
|
@ -38,7 +39,7 @@ The following example associates the `planned-outages` calendar with the
|
||||||
PUT _xpack/ml/calendars/planned-outages/jobs/total-requests
|
PUT _xpack/ml/calendars/planned-outages/jobs/total-requests
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:calendar_outages_openjob]
|
// TEST[skip:setup:calendar_outages_openjob]
|
||||||
|
|
||||||
The API returns the following results:
|
The API returns the following results:
|
||||||
|
|
||||||
|
@ -51,4 +52,4 @@ The API returns the following results:
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
//TESTRESPONSE
|
// TESTRESPONSE
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-put-calendar]]
|
[[ml-put-calendar]]
|
||||||
=== Create Calendar API
|
=== Create Calendar API
|
||||||
++++
|
++++
|
||||||
|
@ -44,6 +45,7 @@ The following example creates the `planned-outages` calendar:
|
||||||
PUT _xpack/ml/calendars/planned-outages
|
PUT _xpack/ml/calendars/planned-outages
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
// TEST[skip:need-license]
|
||||||
|
|
||||||
When the calendar is created, you receive the following results:
|
When the calendar is created, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
||||||
|
@ -53,4 +55,4 @@ When the calendar is created, you receive the following results:
|
||||||
"job_ids": []
|
"job_ids": []
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
//TESTRESPONSE
|
// TESTRESPONSE
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-put-datafeed]]
|
[[ml-put-datafeed]]
|
||||||
=== Create {dfeeds-cap} API
|
=== Create {dfeeds-cap} API
|
||||||
++++
|
++++
|
||||||
|
@ -107,7 +108,7 @@ PUT _xpack/ml/datafeeds/datafeed-total-requests
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:server_metrics_job]
|
// TEST[skip:setup:server_metrics_job]
|
||||||
|
|
||||||
When the {dfeed} is created, you receive the following results:
|
When the {dfeed} is created, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-put-filter]]
|
[[ml-put-filter]]
|
||||||
=== Create Filter API
|
=== Create Filter API
|
||||||
++++
|
++++
|
||||||
|
@ -55,6 +56,7 @@ PUT _xpack/ml/filters/safe_domains
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
// TEST[skip:need-licence]
|
||||||
|
|
||||||
When the filter is created, you receive the following response:
|
When the filter is created, you receive the following response:
|
||||||
[source,js]
|
[source,js]
|
||||||
|
@ -65,4 +67,4 @@ When the filter is created, you receive the following response:
|
||||||
"items": ["*.google.com", "wikipedia.org"]
|
"items": ["*.google.com", "wikipedia.org"]
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
//TESTRESPONSE
|
// TESTRESPONSE
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-put-job]]
|
[[ml-put-job]]
|
||||||
=== Create Jobs API
|
=== Create Jobs API
|
||||||
++++
|
++++
|
||||||
|
@ -104,6 +105,7 @@ PUT _xpack/ml/anomaly_detectors/total-requests
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
// TEST[skip:need-licence]
|
||||||
|
|
||||||
When the job is created, you receive the following results:
|
When the job is created, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-results-resource]]
|
[[ml-results-resource]]
|
||||||
=== Results Resources
|
=== Results Resources
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-revert-snapshot]]
|
[[ml-revert-snapshot]]
|
||||||
=== Revert Model Snapshots API
|
=== Revert Model Snapshots API
|
||||||
++++
|
++++
|
||||||
|
@ -22,33 +23,6 @@ then it might be appropriate to reset the model state to a time before this
|
||||||
event. For example, you might consider reverting to a saved snapshot after Black
|
event. For example, you might consider reverting to a saved snapshot after Black
|
||||||
Friday or a critical system failure.
|
Friday or a critical system failure.
|
||||||
|
|
||||||
////
|
|
||||||
To revert to a saved snapshot, you must follow this sequence:
|
|
||||||
. Close the job
|
|
||||||
. Revert to a snapshot
|
|
||||||
. Open the job
|
|
||||||
. Send new data to the job
|
|
||||||
|
|
||||||
When reverting to a snapshot, there is a choice to make about whether or not
|
|
||||||
you want to keep the results that were created between the time of the snapshot
|
|
||||||
and the current time. In the case of Black Friday for instance, you might want
|
|
||||||
to keep the results and carry on processing data from the current time,
|
|
||||||
though without the models learning the one-off behavior and compensating for it.
|
|
||||||
However, say in the event of a critical system failure and you decide to reset
|
|
||||||
and models to a previous known good state and process data from that time,
|
|
||||||
it makes sense to delete the intervening results for the known bad period and
|
|
||||||
resend data from that earlier time.
|
|
||||||
|
|
||||||
Any gaps in data since the snapshot time will be treated as nulls and not modeled.
|
|
||||||
If there is a partial bucket at the end of the snapshot and/or at the beginning
|
|
||||||
of the new input data, then this will be ignored and treated as a gap.
|
|
||||||
|
|
||||||
For jobs with many entities, the model state may be very large.
|
|
||||||
If a model state is several GB, this could take 10-20 mins to revert depending
|
|
||||||
upon machine spec and resources. If this is the case, please ensure this time
|
|
||||||
is planned for.
|
|
||||||
Model size (in bytes) is available as part of the Job Resource Model Size Stats.
|
|
||||||
////
|
|
||||||
IMPORTANT: Before you revert to a saved snapshot, you must close the job.
|
IMPORTANT: Before you revert to a saved snapshot, you must close the job.
|
||||||
|
|
||||||
|
|
||||||
|
@ -77,7 +51,6 @@ If you want to resend data, then delete the intervening results.
|
||||||
You must have `manage_ml`, or `manage` cluster privileges to use this API.
|
You must have `manage_ml`, or `manage` cluster privileges to use this API.
|
||||||
For more information, see
|
For more information, see
|
||||||
{xpack-ref}/security-privileges.html[Security Privileges].
|
{xpack-ref}/security-privileges.html[Security Privileges].
|
||||||
//<<privileges-list-cluster>>.
|
|
||||||
|
|
||||||
|
|
||||||
==== Examples
|
==== Examples
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-snapshot-resource]]
|
[[ml-snapshot-resource]]
|
||||||
=== Model Snapshot Resources
|
=== Model Snapshot Resources
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-start-datafeed]]
|
[[ml-start-datafeed]]
|
||||||
=== Start {dfeeds-cap} API
|
=== Start {dfeeds-cap} API
|
||||||
++++
|
++++
|
||||||
|
@ -79,7 +80,6 @@ of the latest processed record.
|
||||||
You must have `manage_ml`, or `manage` cluster privileges to use this API.
|
You must have `manage_ml`, or `manage` cluster privileges to use this API.
|
||||||
For more information, see
|
For more information, see
|
||||||
{xpack-ref}/security-privileges.html[Security Privileges].
|
{xpack-ref}/security-privileges.html[Security Privileges].
|
||||||
//<<privileges-list-cluster>>.
|
|
||||||
|
|
||||||
|
|
||||||
==== Security Integration
|
==== Security Integration
|
||||||
|
@ -101,7 +101,7 @@ POST _xpack/ml/datafeeds/datafeed-total-requests/_start
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:server_metrics_openjob]
|
// TEST[skip:setup:server_metrics_openjob]
|
||||||
|
|
||||||
When the {dfeed} starts, you receive the following results:
|
When the {dfeed} starts, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
||||||
|
@ -110,5 +110,4 @@ When the {dfeed} starts, you receive the following results:
|
||||||
"started": true
|
"started": true
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
// CONSOLE
|
|
||||||
// TESTRESPONSE
|
// TESTRESPONSE
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-stop-datafeed]]
|
[[ml-stop-datafeed]]
|
||||||
=== Stop {dfeeds-cap} API
|
=== Stop {dfeeds-cap} API
|
||||||
++++
|
++++
|
||||||
|
@ -18,7 +19,6 @@ A {dfeed} can be started and stopped multiple times throughout its lifecycle.
|
||||||
|
|
||||||
`POST _xpack/ml/datafeeds/_all/_stop`
|
`POST _xpack/ml/datafeeds/_all/_stop`
|
||||||
|
|
||||||
//TBD: Can there be spaces between the items in the list?
|
|
||||||
|
|
||||||
===== Description
|
===== Description
|
||||||
|
|
||||||
|
@ -63,14 +63,14 @@ POST _xpack/ml/datafeeds/datafeed-total-requests/_stop
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:server_metrics_startdf]
|
// TEST[skip:setup:server_metrics_startdf]
|
||||||
|
|
||||||
When the {dfeed} stops, you receive the following results:
|
When the {dfeed} stops, you receive the following results:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
----
|
----
|
||||||
{
|
{
|
||||||
"stopped": true
|
"stopped": true
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
// CONSOLE
|
|
||||||
// TESTRESPONSE
|
// TESTRESPONSE
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-update-datafeed]]
|
[[ml-update-datafeed]]
|
||||||
=== Update {dfeeds-cap} API
|
=== Update {dfeeds-cap} API
|
||||||
++++
|
++++
|
||||||
|
@ -106,7 +107,7 @@ POST _xpack/ml/datafeeds/datafeed-total-requests/_update
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:server_metrics_datafeed]
|
// TEST[skip:setup:server_metrics_datafeed]
|
||||||
|
|
||||||
When the {dfeed} is updated, you receive the full {dfeed} configuration with
|
When the {dfeed} is updated, you receive the full {dfeed} configuration with
|
||||||
with the updated values:
|
with the updated values:
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-update-filter]]
|
[[ml-update-filter]]
|
||||||
=== Update Filter API
|
=== Update Filter API
|
||||||
++++
|
++++
|
||||||
|
@ -52,7 +53,7 @@ POST _xpack/ml/filters/safe_domains/_update
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:ml_filter_safe_domains]
|
// TEST[skip:setup:ml_filter_safe_domains]
|
||||||
|
|
||||||
The API returns the following results:
|
The API returns the following results:
|
||||||
|
|
||||||
|
@ -64,4 +65,4 @@ The API returns the following results:
|
||||||
"items": ["*.google.com", "*.myorg.com"]
|
"items": ["*.google.com", "*.myorg.com"]
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
//TESTRESPONSE
|
// TESTRESPONSE
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-update-job]]
|
[[ml-update-job]]
|
||||||
=== Update Jobs API
|
=== Update Jobs API
|
||||||
++++
|
++++
|
||||||
|
@ -121,7 +122,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_update
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:server_metrics_job]
|
// TEST[skip:setup:server_metrics_job]
|
||||||
|
|
||||||
When the job is updated, you receive a summary of the job configuration
|
When the job is updated, you receive a summary of the job configuration
|
||||||
information, including the updated property values. For example:
|
information, including the updated property values. For example:
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-update-snapshot]]
|
[[ml-update-snapshot]]
|
||||||
=== Update Model Snapshots API
|
=== Update Model Snapshots API
|
||||||
++++
|
++++
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-valid-detector]]
|
[[ml-valid-detector]]
|
||||||
=== Validate Detectors API
|
=== Validate Detectors API
|
||||||
++++
|
++++
|
||||||
|
@ -44,6 +45,7 @@ POST _xpack/ml/anomaly_detectors/_validate/detector
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
// TEST[skip:needs-licence]
|
||||||
|
|
||||||
When the validation completes, you receive the following results:
|
When the validation completes, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
|
@ -1,4 +1,5 @@
|
||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
|
[testenv="platinum"]
|
||||||
[[ml-valid-job]]
|
[[ml-valid-job]]
|
||||||
=== Validate Jobs API
|
=== Validate Jobs API
|
||||||
++++
|
++++
|
||||||
|
@ -55,6 +56,7 @@ POST _xpack/ml/anomaly_detectors/_validate
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
// TEST[skip:needs-licence]
|
||||||
|
|
||||||
When the validation is complete, you receive the following results:
|
When the validation is complete, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
|
@ -549,3 +549,9 @@ See <<security-api-has-privileges>>.
|
||||||
=== X-Pack commands
|
=== X-Pack commands
|
||||||
|
|
||||||
See <<commands>>.
|
See <<commands>>.
|
||||||
|
|
||||||
|
[role="exclude",id="ml-api-definitions"]
|
||||||
|
=== Machine learning API definitions
|
||||||
|
|
||||||
|
See <<api-definitions>>.
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,27 @@
|
||||||
|
[role="xpack"]
|
||||||
|
[[api-definitions]]
|
||||||
|
== Definitions
|
||||||
|
|
||||||
|
These resource definitions are used in {ml} and {security} APIs and in {kib}
|
||||||
|
advanced {ml} job configuration options.
|
||||||
|
|
||||||
|
* <<ml-calendar-resource,Calendars>>
|
||||||
|
* <<ml-datafeed-resource,{dfeeds-cap}>>
|
||||||
|
* <<ml-datafeed-counts,{dfeed-cap} counts>>
|
||||||
|
* <<ml-filter-resource,Filters>>
|
||||||
|
* <<ml-job-resource,Jobs>>
|
||||||
|
* <<ml-jobstats,Job statistics>>
|
||||||
|
* <<ml-snapshot-resource,Model snapshots>>
|
||||||
|
* <<ml-results-resource,Results>>
|
||||||
|
* <<role-mapping-resources,Role mappings>>
|
||||||
|
* <<ml-event-resource,Scheduled Events>>
|
||||||
|
|
||||||
|
include::{es-repo-dir}/ml/apis/calendarresource.asciidoc[]
|
||||||
|
include::{es-repo-dir}/ml/apis/datafeedresource.asciidoc[]
|
||||||
|
include::{es-repo-dir}/ml/apis/filterresource.asciidoc[]
|
||||||
|
include::{es-repo-dir}/ml/apis/jobresource.asciidoc[]
|
||||||
|
include::{es-repo-dir}/ml/apis/jobcounts.asciidoc[]
|
||||||
|
include::{es-repo-dir}/ml/apis/snapshotresource.asciidoc[]
|
||||||
|
include::{xes-repo-dir}/rest-api/security/role-mapping-resources.asciidoc[]
|
||||||
|
include::{es-repo-dir}/ml/apis/resultsresource.asciidoc[]
|
||||||
|
include::{es-repo-dir}/ml/apis/eventresource.asciidoc[]
|
|
@ -22,8 +22,8 @@ include::info.asciidoc[]
|
||||||
include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[]
|
include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[]
|
||||||
include::{es-repo-dir}/licensing/index.asciidoc[]
|
include::{es-repo-dir}/licensing/index.asciidoc[]
|
||||||
include::{es-repo-dir}/migration/migration.asciidoc[]
|
include::{es-repo-dir}/migration/migration.asciidoc[]
|
||||||
include::{xes-repo-dir}/rest-api/ml-api.asciidoc[]
|
include::{es-repo-dir}/ml/apis/ml-api.asciidoc[]
|
||||||
include::{es-repo-dir}/rollup/rollup-api.asciidoc[]
|
include::{es-repo-dir}/rollup/rollup-api.asciidoc[]
|
||||||
include::{xes-repo-dir}/rest-api/security.asciidoc[]
|
include::{xes-repo-dir}/rest-api/security.asciidoc[]
|
||||||
include::{xes-repo-dir}/rest-api/watcher.asciidoc[]
|
include::{xes-repo-dir}/rest-api/watcher.asciidoc[]
|
||||||
include::{xes-repo-dir}/rest-api/defs.asciidoc[]
|
include::defs.asciidoc[]
|
||||||
|
|
|
@ -125,6 +125,7 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase {
|
||||||
closeShards(shard);
|
closeShards(shard);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33330")
|
||||||
public void testSyncerOnClosingShard() throws Exception {
|
public void testSyncerOnClosingShard() throws Exception {
|
||||||
IndexShard shard = newStartedShard(true);
|
IndexShard shard = newStartedShard(true);
|
||||||
AtomicBoolean syncActionCalled = new AtomicBoolean();
|
AtomicBoolean syncActionCalled = new AtomicBoolean();
|
||||||
|
|
|
@ -14,17 +14,6 @@ buildRestTests.expectedUnconvertedCandidates = [
|
||||||
'en/security/authorization/run-as-privilege.asciidoc',
|
'en/security/authorization/run-as-privilege.asciidoc',
|
||||||
'en/security/ccs-clients-integrations/http.asciidoc',
|
'en/security/ccs-clients-integrations/http.asciidoc',
|
||||||
'en/security/authorization/custom-roles-provider.asciidoc',
|
'en/security/authorization/custom-roles-provider.asciidoc',
|
||||||
'en/rest-api/ml/delete-snapshot.asciidoc',
|
|
||||||
'en/rest-api/ml/get-bucket.asciidoc',
|
|
||||||
'en/rest-api/ml/get-job-stats.asciidoc',
|
|
||||||
'en/rest-api/ml/get-overall-buckets.asciidoc',
|
|
||||||
'en/rest-api/ml/get-category.asciidoc',
|
|
||||||
'en/rest-api/ml/get-record.asciidoc',
|
|
||||||
'en/rest-api/ml/get-influencer.asciidoc',
|
|
||||||
'en/rest-api/ml/get-snapshot.asciidoc',
|
|
||||||
'en/rest-api/ml/post-data.asciidoc',
|
|
||||||
'en/rest-api/ml/revert-snapshot.asciidoc',
|
|
||||||
'en/rest-api/ml/update-snapshot.asciidoc',
|
|
||||||
'en/rest-api/watcher/stats.asciidoc',
|
'en/rest-api/watcher/stats.asciidoc',
|
||||||
'en/watcher/example-watches/watching-time-series-data.asciidoc',
|
'en/watcher/example-watches/watching-time-series-data.asciidoc',
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,36 +0,0 @@
|
||||||
[role="xpack"]
|
|
||||||
[[ml-api-definitions]]
|
|
||||||
== Definitions
|
|
||||||
|
|
||||||
These resource definitions are used in {ml} and {security} APIs and in {kib}
|
|
||||||
advanced {ml} job configuration options.
|
|
||||||
|
|
||||||
* <<ml-calendar-resource,Calendars>>
|
|
||||||
* <<ml-datafeed-resource,{dfeeds-cap}>>
|
|
||||||
* <<ml-datafeed-counts,{dfeed-cap} counts>>
|
|
||||||
* <<ml-filter-resource,Filters>>
|
|
||||||
* <<ml-job-resource,Jobs>>
|
|
||||||
* <<ml-jobstats,Job statistics>>
|
|
||||||
* <<ml-snapshot-resource,Model snapshots>>
|
|
||||||
* <<ml-results-resource,Results>>
|
|
||||||
* <<role-mapping-resources,Role mappings>>
|
|
||||||
* <<ml-event-resource,Scheduled Events>>
|
|
||||||
|
|
||||||
[role="xpack"]
|
|
||||||
include::ml/calendarresource.asciidoc[]
|
|
||||||
[role="xpack"]
|
|
||||||
include::ml/datafeedresource.asciidoc[]
|
|
||||||
[role="xpack"]
|
|
||||||
include::ml/filterresource.asciidoc[]
|
|
||||||
[role="xpack"]
|
|
||||||
include::ml/jobresource.asciidoc[]
|
|
||||||
[role="xpack"]
|
|
||||||
include::ml/jobcounts.asciidoc[]
|
|
||||||
[role="xpack"]
|
|
||||||
include::security/role-mapping-resources.asciidoc[]
|
|
||||||
[role="xpack"]
|
|
||||||
include::ml/snapshotresource.asciidoc[]
|
|
||||||
[role="xpack"]
|
|
||||||
include::ml/resultsresource.asciidoc[]
|
|
||||||
[role="xpack"]
|
|
||||||
include::ml/eventresource.asciidoc[]
|
|
Loading…
Reference in New Issue