Merge remote-tracking branch 'es/7.x' into enrich-7.x

This commit is contained in:
Martijn van Groningen 2019-06-27 08:35:37 +02:00
commit 683e116601
No known key found for this signature in database
GPG Key ID: AB236F4FCF2AF12A
450 changed files with 28489 additions and 4541 deletions

View File

@ -217,11 +217,19 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
@Override
public void start() {
String nodeNames = nodes.stream().map(ElasticsearchNode::getName).collect(Collectors.joining(","));
final String nodeNames;
if (nodes.stream().map(ElasticsearchNode::getName).anyMatch( name -> name == null)) {
nodeNames = null;
} else {
nodeNames = nodes.stream().map(ElasticsearchNode::getName).collect(Collectors.joining(","));
};
for (ElasticsearchNode node : nodes) {
if (Version.fromString(node.getVersion()).getMajor() >= 7) {
node.defaultConfig.put("cluster.initial_master_nodes", "[" + nodeNames + "]");
node.defaultConfig.put("discovery.seed_providers", "file");
if (nodeNames != null) {
// Can only configure master nodes if we have node names defined
if (Version.fromString(node.getVersion()).getMajor() >= 7) {
node.defaultConfig.put("cluster.initial_master_nodes", "[" + nodeNames + "]");
node.defaultConfig.put("discovery.seed_providers", "file");
}
}
node.start();
}

View File

@ -732,7 +732,10 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
private void createConfiguration() {
defaultConfig.put("node.name", nameCustomization.apply(safeName(name)));
String nodeName = nameCustomization.apply(safeName(name));
if (nodeName != null) {
defaultConfig.put("node.name", nodeName);
}
defaultConfig.put("path.repo", confPathRepo.toAbsolutePath().toString());
defaultConfig.put("path.data", confPathData.toAbsolutePath().toString());
defaultConfig.put("path.logs", confPathLogs.toAbsolutePath().toString());

View File

@ -37,6 +37,7 @@ import java.io.IOException;
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
import static org.elasticsearch.client.RequestConverters.createEntity;
import static org.elasticsearch.client.dataframe.GetDataFrameTransformRequest.ALLOW_NO_MATCH;
final class DataFrameRequestConverters {
@ -64,6 +65,9 @@ final class DataFrameRequestConverters {
if (getRequest.getPageParams() != null && getRequest.getPageParams().getSize() != null) {
request.addParameter(PageParams.SIZE.getPreferredName(), getRequest.getPageParams().getSize().toString());
}
if (getRequest.getAllowNoMatch() != null) {
request.addParameter(ALLOW_NO_MATCH, getRequest.getAllowNoMatch().toString());
}
return request;
}
@ -91,21 +95,24 @@ final class DataFrameRequestConverters {
}
static Request stopDataFrameTransform(StopDataFrameTransformRequest stopRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_frame", "transforms")
.addPathPart(stopRequest.getId())
.addPathPartAsIs("_stop")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
if (stopRequest.getWaitForCompletion() != null) {
params.withWaitForCompletion(stopRequest.getWaitForCompletion());
}
if (stopRequest.getTimeout() != null) {
params.withTimeout(stopRequest.getTimeout());
}
request.addParameters(params.asMap());
return request;
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_frame", "transforms")
.addPathPart(stopRequest.getId())
.addPathPartAsIs("_stop")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
if (stopRequest.getWaitForCompletion() != null) {
params.withWaitForCompletion(stopRequest.getWaitForCompletion());
}
if (stopRequest.getTimeout() != null) {
params.withTimeout(stopRequest.getTimeout());
}
if (stopRequest.getAllowNoMatch() != null) {
request.addParameter(ALLOW_NO_MATCH, stopRequest.getAllowNoMatch().toString());
}
request.addParameters(params.asMap());
return request;
}
static Request previewDataFrameTransform(PreviewDataFrameTransformRequest previewRequest) throws IOException {
@ -130,6 +137,9 @@ final class DataFrameRequestConverters {
if (statsRequest.getPageParams() != null && statsRequest.getPageParams().getSize() != null) {
request.addParameter(PageParams.SIZE.getPreferredName(), statsRequest.getPageParams().getSize().toString());
}
if (statsRequest.getAllowNoMatch() != null) {
request.addParameter(ALLOW_NO_MATCH, statsRequest.getAllowNoMatch().toString());
}
return request;
}
}

View File

@ -32,12 +32,14 @@ import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarEventRequest;
import org.elasticsearch.client.ml.DeleteCalendarJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarRequest;
import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteExpiredDataRequest;
import org.elasticsearch.client.ml.DeleteFilterRequest;
import org.elasticsearch.client.ml.DeleteForecastRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
import org.elasticsearch.client.ml.EvaluateDataFrameRequest;
import org.elasticsearch.client.ml.FindFileStructureRequest;
import org.elasticsearch.client.ml.FlushJobRequest;
import org.elasticsearch.client.ml.ForecastJobRequest;
@ -45,6 +47,8 @@ import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetCalendarEventsRequest;
import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCategoriesRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest;
import org.elasticsearch.client.ml.GetDatafeedRequest;
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
import org.elasticsearch.client.ml.GetFiltersRequest;
@ -61,12 +65,15 @@ import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PutCalendarJobRequest;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.PutDatafeedRequest;
import org.elasticsearch.client.ml.PutFilterRequest;
import org.elasticsearch.client.ml.PutJobRequest;
import org.elasticsearch.client.ml.RevertModelSnapshotRequest;
import org.elasticsearch.client.ml.SetUpgradeModeRequest;
import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.StartDatafeedRequest;
import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.StopDatafeedRequest;
import org.elasticsearch.client.ml.UpdateDatafeedRequest;
import org.elasticsearch.client.ml.UpdateFilterRequest;
@ -581,6 +588,115 @@ final class MLRequestConverters {
return new Request(HttpDelete.METHOD_NAME, endpoint);
}
static Request putDataFrameAnalytics(PutDataFrameAnalyticsRequest putRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_ml", "data_frame", "analytics")
.addPathPart(putRequest.getConfig().getId())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
request.setEntity(createEntity(putRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request getDataFrameAnalytics(GetDataFrameAnalyticsRequest getRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_ml", "data_frame", "analytics")
.addPathPart(Strings.collectionToCommaDelimitedString(getRequest.getIds()))
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
if (getRequest.getPageParams() != null) {
PageParams pageParams = getRequest.getPageParams();
if (pageParams.getFrom() != null) {
params.putParam(PageParams.FROM.getPreferredName(), pageParams.getFrom().toString());
}
if (pageParams.getSize() != null) {
params.putParam(PageParams.SIZE.getPreferredName(), pageParams.getSize().toString());
}
}
if (getRequest.getAllowNoMatch() != null) {
params.putParam(GetDataFrameAnalyticsRequest.ALLOW_NO_MATCH.getPreferredName(), Boolean.toString(getRequest.getAllowNoMatch()));
}
request.addParameters(params.asMap());
return request;
}
static Request getDataFrameAnalyticsStats(GetDataFrameAnalyticsStatsRequest getStatsRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_ml", "data_frame", "analytics")
.addPathPart(Strings.collectionToCommaDelimitedString(getStatsRequest.getIds()))
.addPathPartAsIs("_stats")
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
if (getStatsRequest.getPageParams() != null) {
PageParams pageParams = getStatsRequest.getPageParams();
if (pageParams.getFrom() != null) {
params.putParam(PageParams.FROM.getPreferredName(), pageParams.getFrom().toString());
}
if (pageParams.getSize() != null) {
params.putParam(PageParams.SIZE.getPreferredName(), pageParams.getSize().toString());
}
}
if (getStatsRequest.getAllowNoMatch() != null) {
params.putParam(GetDataFrameAnalyticsStatsRequest.ALLOW_NO_MATCH.getPreferredName(),
Boolean.toString(getStatsRequest.getAllowNoMatch()));
}
request.addParameters(params.asMap());
return request;
}
static Request startDataFrameAnalytics(StartDataFrameAnalyticsRequest startRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_ml", "data_frame", "analytics")
.addPathPart(startRequest.getId())
.addPathPartAsIs("_start")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
if (startRequest.getTimeout() != null) {
params.withTimeout(startRequest.getTimeout());
}
request.addParameters(params.asMap());
return request;
}
static Request stopDataFrameAnalytics(StopDataFrameAnalyticsRequest stopRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_ml", "data_frame", "analytics")
.addPathPart(stopRequest.getId())
.addPathPartAsIs("_stop")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
if (stopRequest.getTimeout() != null) {
params.withTimeout(stopRequest.getTimeout());
}
if (stopRequest.getAllowNoMatch() != null) {
params.putParam(
StopDataFrameAnalyticsRequest.ALLOW_NO_MATCH.getPreferredName(), Boolean.toString(stopRequest.getAllowNoMatch()));
}
request.addParameters(params.asMap());
return request;
}
static Request deleteDataFrameAnalytics(DeleteDataFrameAnalyticsRequest deleteRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_ml", "data_frame", "analytics")
.addPathPart(deleteRequest.getId())
.build();
return new Request(HttpDelete.METHOD_NAME, endpoint);
}
static Request evaluateDataFrame(EvaluateDataFrameRequest evaluateRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_ml", "data_frame", "_evaluate")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
request.setEntity(createEntity(evaluateRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request putFilter(PutFilterRequest putFilterRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_ml")

View File

@ -25,6 +25,7 @@ import org.elasticsearch.client.ml.CloseJobResponse;
import org.elasticsearch.client.ml.DeleteCalendarEventRequest;
import org.elasticsearch.client.ml.DeleteCalendarJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarRequest;
import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteExpiredDataRequest;
import org.elasticsearch.client.ml.DeleteExpiredDataResponse;
@ -33,6 +34,8 @@ import org.elasticsearch.client.ml.DeleteForecastRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteJobResponse;
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
import org.elasticsearch.client.ml.EvaluateDataFrameRequest;
import org.elasticsearch.client.ml.EvaluateDataFrameResponse;
import org.elasticsearch.client.ml.FindFileStructureRequest;
import org.elasticsearch.client.ml.FindFileStructureResponse;
import org.elasticsearch.client.ml.FlushJobRequest;
@ -47,6 +50,10 @@ import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCalendarsResponse;
import org.elasticsearch.client.ml.GetCategoriesRequest;
import org.elasticsearch.client.ml.GetCategoriesResponse;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsResponse;
import org.elasticsearch.client.ml.GetDatafeedRequest;
import org.elasticsearch.client.ml.GetDatafeedResponse;
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
@ -78,6 +85,8 @@ import org.elasticsearch.client.ml.PreviewDatafeedResponse;
import org.elasticsearch.client.ml.PutCalendarJobRequest;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutCalendarResponse;
import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.PutDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.PutDatafeedRequest;
import org.elasticsearch.client.ml.PutDatafeedResponse;
import org.elasticsearch.client.ml.PutFilterRequest;
@ -87,8 +96,11 @@ import org.elasticsearch.client.ml.PutJobResponse;
import org.elasticsearch.client.ml.RevertModelSnapshotRequest;
import org.elasticsearch.client.ml.RevertModelSnapshotResponse;
import org.elasticsearch.client.ml.SetUpgradeModeRequest;
import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.StartDatafeedRequest;
import org.elasticsearch.client.ml.StartDatafeedResponse;
import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.StopDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.StopDatafeedRequest;
import org.elasticsearch.client.ml.StopDatafeedResponse;
import org.elasticsearch.client.ml.UpdateDatafeedRequest;
@ -1877,4 +1889,286 @@ public final class MachineLearningClient {
listener,
Collections.emptySet());
}
/**
* Creates a new Data Frame Analytics config
* <p>
* For additional info
* see <a href="https://www.TODO.com">PUT Data Frame Analytics documentation</a>
*
* @param request The {@link PutDataFrameAnalyticsRequest} containing the
* {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return The {@link PutDataFrameAnalyticsResponse} containing the created
* {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig}
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public PutDataFrameAnalyticsResponse putDataFrameAnalytics(PutDataFrameAnalyticsRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::putDataFrameAnalytics,
options,
PutDataFrameAnalyticsResponse::fromXContent,
Collections.emptySet());
}
/**
* Creates a new Data Frame Analytics config asynchronously and notifies listener upon completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">PUT Data Frame Analytics documentation</a>
*
* @param request The {@link PutDataFrameAnalyticsRequest} containing the
* {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void putDataFrameAnalyticsAsync(PutDataFrameAnalyticsRequest request, RequestOptions options,
ActionListener<PutDataFrameAnalyticsResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::putDataFrameAnalytics,
options,
PutDataFrameAnalyticsResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Gets a single or multiple Data Frame Analytics configs
* <p>
* For additional info
* see <a href="https://www.TODO.com">GET Data Frame Analytics documentation</a>
*
* @param request The {@link GetDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return {@link GetDataFrameAnalyticsResponse} response object containing the
* {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} objects
*/
public GetDataFrameAnalyticsResponse getDataFrameAnalytics(GetDataFrameAnalyticsRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::getDataFrameAnalytics,
options,
GetDataFrameAnalyticsResponse::fromXContent,
Collections.emptySet());
}
/**
* Gets a single or multiple Data Frame Analytics configs asynchronously and notifies listener upon completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">GET Data Frame Analytics documentation</a>
*
* @param request The {@link GetDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void getDataFrameAnalyticsAsync(GetDataFrameAnalyticsRequest request, RequestOptions options,
ActionListener<GetDataFrameAnalyticsResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::getDataFrameAnalytics,
options,
GetDataFrameAnalyticsResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Gets the running statistics of a Data Frame Analytics
* <p>
* For additional info
* see <a href="https://www.TODO.com">GET Data Frame Analytics Stats documentation</a>
*
* @param request The {@link GetDataFrameAnalyticsStatsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return {@link GetDataFrameAnalyticsStatsResponse} response object
*/
public GetDataFrameAnalyticsStatsResponse getDataFrameAnalyticsStats(GetDataFrameAnalyticsStatsRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::getDataFrameAnalyticsStats,
options,
GetDataFrameAnalyticsStatsResponse::fromXContent,
Collections.emptySet());
}
/**
* Gets the running statistics of a Data Frame Analytics asynchronously and notifies listener upon completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">GET Data Frame Analytics Stats documentation</a>
*
* @param request The {@link GetDataFrameAnalyticsStatsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void getDataFrameAnalyticsStatsAsync(GetDataFrameAnalyticsStatsRequest request, RequestOptions options,
ActionListener<GetDataFrameAnalyticsStatsResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::getDataFrameAnalyticsStats,
options,
GetDataFrameAnalyticsStatsResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Starts Data Frame Analytics
* <p>
* For additional info
* see <a href="https://www.TODO.com">Start Data Frame Analytics documentation</a>
*
* @param request The {@link StartDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return action acknowledgement
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public AcknowledgedResponse startDataFrameAnalytics(StartDataFrameAnalyticsRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::startDataFrameAnalytics,
options,
AcknowledgedResponse::fromXContent,
Collections.emptySet());
}
/**
* Starts Data Frame Analytics asynchronously and notifies listener upon completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">Start Data Frame Analytics documentation</a>
*
* @param request The {@link StartDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void startDataFrameAnalyticsAsync(StartDataFrameAnalyticsRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::startDataFrameAnalytics,
options,
AcknowledgedResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Stops Data Frame Analytics
* <p>
* For additional info
* see <a href="https://www.TODO.com">Stop Data Frame Analytics documentation</a>
*
* @param request The {@link StopDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return {@link StopDataFrameAnalyticsResponse}
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public StopDataFrameAnalyticsResponse stopDataFrameAnalytics(StopDataFrameAnalyticsRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::stopDataFrameAnalytics,
options,
StopDataFrameAnalyticsResponse::fromXContent,
Collections.emptySet());
}
/**
* Stops Data Frame Analytics asynchronously and notifies listener upon completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">Stop Data Frame Analytics documentation</a>
*
* @param request The {@link StopDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void stopDataFrameAnalyticsAsync(StopDataFrameAnalyticsRequest request, RequestOptions options,
ActionListener<StopDataFrameAnalyticsResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::stopDataFrameAnalytics,
options,
StopDataFrameAnalyticsResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Deletes the given Data Frame Analytics config
* <p>
* For additional info
* see <a href="https://www.TODO.com">DELETE Data Frame Analytics documentation</a>
*
* @param request The {@link DeleteDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return action acknowledgement
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public AcknowledgedResponse deleteDataFrameAnalytics(DeleteDataFrameAnalyticsRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::deleteDataFrameAnalytics,
options,
AcknowledgedResponse::fromXContent,
Collections.emptySet());
}
/**
* Deletes the given Data Frame Analytics config asynchronously and notifies listener upon completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">DELETE Data Frame Analytics documentation</a>
*
* @param request The {@link DeleteDataFrameAnalyticsRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void deleteDataFrameAnalyticsAsync(DeleteDataFrameAnalyticsRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::deleteDataFrameAnalytics,
options,
AcknowledgedResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Evaluates the given Data Frame
* <p>
* For additional info
* see <a href="https://www.TODO.com">Evaluate Data Frame documentation</a>
*
* @param request The {@link EvaluateDataFrameRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return {@link EvaluateDataFrameResponse} response object
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public EvaluateDataFrameResponse evaluateDataFrame(EvaluateDataFrameRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::evaluateDataFrame,
options,
EvaluateDataFrameResponse::fromXContent,
Collections.emptySet());
}
/**
* Evaluates the given Data Frame asynchronously and notifies listener upon completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">Evaluate Data Frame documentation</a>
*
* @param request The {@link EvaluateDataFrameRequest}
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void evaluateDataFrameAsync(EvaluateDataFrameRequest request, RequestOptions options,
ActionListener<EvaluateDataFrameResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::evaluateDataFrame,
options,
EvaluateDataFrameResponse::fromXContent,
listener,
Collections.emptySet());
}
}

View File

@ -1041,13 +1041,6 @@ final class RequestConverters {
return this;
}
Params withVerify(boolean verify) {
if (verify) {
return putParam("verify", Boolean.TRUE.toString());
}
return this;
}
Params withWaitForStatus(ClusterHealthStatus status) {
if (status != null) {
return putParam("wait_for_status", status.name().toLowerCase(Locale.ROOT));

View File

@ -60,7 +60,9 @@ final class SnapshotRequestConverters {
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withMasterTimeout(putRepositoryRequest.masterNodeTimeout());
parameters.withTimeout(putRepositoryRequest.timeout());
parameters.withVerify(putRepositoryRequest.verify());
if (putRepositoryRequest.verify() == false) {
parameters.putParam("verify", "false");
}
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(putRepositoryRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;

View File

@ -21,12 +21,36 @@ package org.elasticsearch.client;
import org.elasticsearch.common.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Encapsulates an accumulation of validation errors
*/
public class ValidationException extends IllegalArgumentException {
/**
* Creates {@link ValidationException} instance initialized with given error messages.
* @param error the errors to add
* @return {@link ValidationException} instance
*/
public static ValidationException withError(String... error) {
return withErrors(Arrays.asList(error));
}
/**
* Creates {@link ValidationException} instance initialized with given error messages.
* @param errors the list of errors to add
* @return {@link ValidationException} instance
*/
public static ValidationException withErrors(List<String> errors) {
ValidationException e = new ValidationException();
for (String error : errors) {
e.addValidationError(error);
}
return e;
}
private final List<String> validationErrors = new ArrayList<>();
/**

View File

@ -35,8 +35,8 @@ import static java.util.Collections.emptySet;
* default distribution of Elasticsearch. All of these APIs will 404 if run
* against the OSS distribution of Elasticsearch.
* <p>
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/xpack-api.html">
* X-Pack APIs on elastic.co</a> for more information.
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rest-apis.html">
* REST APIs on elastic.co</a> for more information.
*/
public final class XPackClient {

View File

@ -0,0 +1,41 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.dataframe;
import org.elasticsearch.client.dataframe.transforms.SyncConfig;
import org.elasticsearch.client.dataframe.transforms.TimeSyncConfig;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.plugins.spi.NamedXContentProvider;
import java.util.Arrays;
import java.util.List;
public class DataFrameNamedXContentProvider implements NamedXContentProvider {
@Override
public List<NamedXContentRegistry.Entry> getNamedXContentParsers() {
return Arrays.asList(
new NamedXContentRegistry.Entry(SyncConfig.class,
new ParseField(TimeSyncConfig.NAME),
TimeSyncConfig::fromXContent));
}
}

View File

@ -30,6 +30,7 @@ import java.util.Optional;
public class GetDataFrameTransformRequest implements Validatable {
public static final String ALLOW_NO_MATCH = "allow_no_match";
/**
* Helper method to create a request that will get ALL Data Frame Transforms
* @return new {@link GetDataFrameTransformRequest} object for the id "_all"
@ -40,6 +41,7 @@ public class GetDataFrameTransformRequest implements Validatable {
private final List<String> ids;
private PageParams pageParams;
private Boolean allowNoMatch;
public GetDataFrameTransformRequest(String... ids) {
this.ids = Arrays.asList(ids);
@ -57,6 +59,14 @@ public class GetDataFrameTransformRequest implements Validatable {
this.pageParams = pageParams;
}
public Boolean getAllowNoMatch() {
return allowNoMatch;
}
public void setAllowNoMatch(Boolean allowNoMatch) {
this.allowNoMatch = allowNoMatch;
}
@Override
public Optional<ValidationException> validate() {
if (ids == null || ids.isEmpty()) {
@ -70,7 +80,7 @@ public class GetDataFrameTransformRequest implements Validatable {
@Override
public int hashCode() {
return Objects.hash(ids, pageParams);
return Objects.hash(ids, pageParams, allowNoMatch);
}
@Override
@ -83,6 +93,8 @@ public class GetDataFrameTransformRequest implements Validatable {
return false;
}
GetDataFrameTransformRequest other = (GetDataFrameTransformRequest) obj;
return Objects.equals(ids, other.ids) && Objects.equals(pageParams, other.pageParams);
return Objects.equals(ids, other.ids)
&& Objects.equals(pageParams, other.pageParams)
&& Objects.equals(allowNoMatch, other.allowNoMatch);
}
}

View File

@ -29,6 +29,7 @@ import java.util.Optional;
public class GetDataFrameTransformStatsRequest implements Validatable {
private final String id;
private PageParams pageParams;
private Boolean allowNoMatch;
public GetDataFrameTransformStatsRequest(String id) {
this.id = id;
@ -46,6 +47,14 @@ public class GetDataFrameTransformStatsRequest implements Validatable {
this.pageParams = pageParams;
}
public Boolean getAllowNoMatch() {
return allowNoMatch;
}
public void setAllowNoMatch(Boolean allowNoMatch) {
this.allowNoMatch = allowNoMatch;
}
@Override
public Optional<ValidationException> validate() {
if (id == null) {
@ -59,7 +68,7 @@ public class GetDataFrameTransformStatsRequest implements Validatable {
@Override
public int hashCode() {
return Objects.hash(id, pageParams);
return Objects.hash(id, pageParams, allowNoMatch);
}
@Override
@ -72,6 +81,8 @@ public class GetDataFrameTransformStatsRequest implements Validatable {
return false;
}
GetDataFrameTransformStatsRequest other = (GetDataFrameTransformStatsRequest) obj;
return Objects.equals(id, other.id) && Objects.equals(pageParams, other.pageParams);
return Objects.equals(id, other.id)
&& Objects.equals(pageParams, other.pageParams)
&& Objects.equals(allowNoMatch, other.allowNoMatch);
}
}

View File

@ -31,6 +31,7 @@ public class StopDataFrameTransformRequest implements Validatable {
private final String id;
private Boolean waitForCompletion;
private TimeValue timeout;
private Boolean allowNoMatch;
public StopDataFrameTransformRequest(String id) {
this.id = id;
@ -64,6 +65,14 @@ public class StopDataFrameTransformRequest implements Validatable {
return timeout;
}
public Boolean getAllowNoMatch() {
return allowNoMatch;
}
public void setAllowNoMatch(Boolean allowNoMatch) {
this.allowNoMatch = allowNoMatch;
}
@Override
public Optional<ValidationException> validate() {
if (id == null) {
@ -77,7 +86,7 @@ public class StopDataFrameTransformRequest implements Validatable {
@Override
public int hashCode() {
return Objects.hash(id, waitForCompletion, timeout);
return Objects.hash(id, waitForCompletion, timeout, allowNoMatch);
}
@Override
@ -92,7 +101,8 @@ public class StopDataFrameTransformRequest implements Validatable {
StopDataFrameTransformRequest other = (StopDataFrameTransformRequest) obj;
return Objects.equals(this.id, other.id)
&& Objects.equals(this.waitForCompletion, other.waitForCompletion)
&& Objects.equals(this.timeout, other.timeout);
&& Objects.equals(this.timeout, other.timeout)
&& Objects.equals(this.allowNoMatch, other.allowNoMatch);
}
}

View File

@ -30,6 +30,7 @@ import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import java.io.IOException;
import java.time.Instant;
@ -44,6 +45,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
public static final ParseField SOURCE = new ParseField("source");
public static final ParseField DEST = new ParseField("dest");
public static final ParseField DESCRIPTION = new ParseField("description");
public static final ParseField SYNC = new ParseField("sync");
public static final ParseField VERSION = new ParseField("version");
public static final ParseField CREATE_TIME = new ParseField("create_time");
// types of transforms
@ -52,6 +54,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
private final String id;
private final SourceConfig source;
private final DestConfig dest;
private final SyncConfig syncConfig;
private final PivotConfig pivotConfig;
private final String description;
private final Version transformVersion;
@ -63,17 +66,26 @@ public class DataFrameTransformConfig implements ToXContentObject {
String id = (String) args[0];
SourceConfig source = (SourceConfig) args[1];
DestConfig dest = (DestConfig) args[2];
PivotConfig pivotConfig = (PivotConfig) args[3];
String description = (String)args[4];
Instant createTime = (Instant)args[5];
String transformVersion = (String)args[6];
return new DataFrameTransformConfig(id, source, dest, pivotConfig, description, createTime, transformVersion);
SyncConfig syncConfig = (SyncConfig) args[3];
PivotConfig pivotConfig = (PivotConfig) args[4];
String description = (String)args[5];
Instant createTime = (Instant)args[6];
String transformVersion = (String)args[7];
return new DataFrameTransformConfig(id,
source,
dest,
syncConfig,
pivotConfig,
description,
createTime,
transformVersion);
});
static {
PARSER.declareString(constructorArg(), ID);
PARSER.declareObject(constructorArg(), (p, c) -> SourceConfig.PARSER.apply(p, null), SOURCE);
PARSER.declareObject(constructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), DEST);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), SYNC);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> PivotConfig.fromXContent(p), PIVOT_TRANSFORM);
PARSER.declareString(optionalConstructorArg(), DESCRIPTION);
PARSER.declareField(optionalConstructorArg(),
@ -81,6 +93,15 @@ public class DataFrameTransformConfig implements ToXContentObject {
PARSER.declareString(optionalConstructorArg(), VERSION);
}
private static SyncConfig parseSyncConfig(XContentParser parser) throws IOException {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
SyncConfig syncConfig = parser.namedObject(SyncConfig.class, parser.currentName(), true);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
return syncConfig;
}
public static DataFrameTransformConfig fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
@ -97,12 +118,13 @@ public class DataFrameTransformConfig implements ToXContentObject {
* @return A DataFrameTransformConfig to preview, NOTE it will have a {@code null} id, destination and index.
*/
public static DataFrameTransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) {
return new DataFrameTransformConfig(null, source, null, pivotConfig, null, null, null);
return new DataFrameTransformConfig(null, source, null, null, pivotConfig, null, null, null);
}
DataFrameTransformConfig(final String id,
final SourceConfig source,
final DestConfig dest,
final SyncConfig syncConfig,
final PivotConfig pivotConfig,
final String description,
final Instant createTime,
@ -110,6 +132,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
this.id = id;
this.source = source;
this.dest = dest;
this.syncConfig = syncConfig;
this.pivotConfig = pivotConfig;
this.description = description;
this.createTime = createTime == null ? null : Instant.ofEpochMilli(createTime.toEpochMilli());
@ -128,6 +151,10 @@ public class DataFrameTransformConfig implements ToXContentObject {
return dest;
}
public SyncConfig getSyncConfig() {
return syncConfig;
}
public PivotConfig getPivotConfig() {
return pivotConfig;
}
@ -157,6 +184,11 @@ public class DataFrameTransformConfig implements ToXContentObject {
if (dest != null) {
builder.field(DEST.getPreferredName(), dest);
}
if (syncConfig != null) {
builder.startObject(SYNC.getPreferredName());
builder.field(syncConfig.getName(), syncConfig);
builder.endObject();
}
if (pivotConfig != null) {
builder.field(PIVOT_TRANSFORM.getPreferredName(), pivotConfig);
}
@ -189,6 +221,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
&& Objects.equals(this.source, that.source)
&& Objects.equals(this.dest, that.dest)
&& Objects.equals(this.description, that.description)
&& Objects.equals(this.syncConfig, that.syncConfig)
&& Objects.equals(this.transformVersion, that.transformVersion)
&& Objects.equals(this.createTime, that.createTime)
&& Objects.equals(this.pivotConfig, that.pivotConfig);
@ -196,7 +229,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
@Override
public int hashCode() {
return Objects.hash(id, source, dest, pivotConfig, description, createTime, transformVersion);
return Objects.hash(id, source, dest, syncConfig, pivotConfig, description);
}
@Override
@ -213,6 +246,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
private String id;
private SourceConfig source;
private DestConfig dest;
private SyncConfig syncConfig;
private PivotConfig pivotConfig;
private String description;
@ -231,6 +265,11 @@ public class DataFrameTransformConfig implements ToXContentObject {
return this;
}
public Builder setSyncConfig(SyncConfig syncConfig) {
this.syncConfig = syncConfig;
return this;
}
public Builder setPivotConfig(PivotConfig pivotConfig) {
this.pivotConfig = pivotConfig;
return this;
@ -242,7 +281,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
}
public DataFrameTransformConfig build() {
return new DataFrameTransformConfig(id, source, dest, pivotConfig, description, null, null);
return new DataFrameTransformConfig(id, source, dest, syncConfig, pivotConfig, description, null, null);
}
}
}

View File

@ -0,0 +1,30 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.dataframe.transforms;
import org.elasticsearch.common.xcontent.ToXContentObject;
public interface SyncConfig extends ToXContentObject {
/**
* Returns the name of the writeable object
*/
String getName();
}

View File

@ -0,0 +1,108 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.dataframe.transforms;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class TimeSyncConfig implements SyncConfig {
public static final String NAME = "time";
private static final ParseField FIELD = new ParseField("field");
private static final ParseField DELAY = new ParseField("delay");
private final String field;
private final TimeValue delay;
private static final ConstructingObjectParser<TimeSyncConfig, Void> PARSER = new ConstructingObjectParser<>("time_sync_config", true,
args -> new TimeSyncConfig((String) args[0], args[1] != null ? (TimeValue) args[1] : TimeValue.ZERO));
static {
PARSER.declareString(constructorArg(), FIELD);
PARSER.declareField(optionalConstructorArg(), (p, c) -> TimeValue.parseTimeValue(p.textOrNull(), DELAY.getPreferredName()), DELAY,
ObjectParser.ValueType.STRING_OR_NULL);
}
public static TimeSyncConfig fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
public TimeSyncConfig(String field, TimeValue delay) {
this.field = field;
this.delay = delay;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(FIELD.getPreferredName(), field);
if (delay.duration() > 0) {
builder.field(DELAY.getPreferredName(), delay.getStringRep());
}
builder.endObject();
return builder;
}
public String getField() {
return field;
}
public TimeValue getDelay() {
return delay;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final TimeSyncConfig that = (TimeSyncConfig) other;
return Objects.equals(this.field, that.field)
&& Objects.equals(this.delay, that.delay);
}
@Override
public int hashCode() {
return Objects.hash(field, delay);
}
@Override
public String getName() {
return NAME;
}
}

View File

@ -0,0 +1,64 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import java.util.Objects;
import java.util.Optional;
/**
* Request to delete a data frame analytics config
*/
public class DeleteDataFrameAnalyticsRequest implements Validatable {
private final String id;
public DeleteDataFrameAnalyticsRequest(String id) {
this.id = id;
}
public String getId() {
return id;
}
@Override
public Optional<ValidationException> validate() {
if (id == null) {
return Optional.of(ValidationException.withError("data frame analytics id must not be null"));
}
return Optional.empty();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DeleteDataFrameAnalyticsRequest other = (DeleteDataFrameAnalyticsRequest) o;
return Objects.equals(id, other.id);
}
@Override
public int hashCode() {
return Objects.hash(id);
}
}

View File

@ -0,0 +1,136 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
public class EvaluateDataFrameRequest implements ToXContentObject, Validatable {
private static final ParseField INDEX = new ParseField("index");
private static final ParseField EVALUATION = new ParseField("evaluation");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<EvaluateDataFrameRequest, Void> PARSER =
new ConstructingObjectParser<>(
"evaluate_data_frame_request", true, args -> new EvaluateDataFrameRequest((List<String>) args[0], (Evaluation) args[1]));
static {
PARSER.declareStringArray(constructorArg(), INDEX);
PARSER.declareObject(constructorArg(), (p, c) -> parseEvaluation(p), EVALUATION);
}
private static Evaluation parseEvaluation(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
Evaluation evaluation = parser.namedObject(Evaluation.class, parser.currentName(), null);
ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
return evaluation;
}
public static EvaluateDataFrameRequest fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private List<String> indices;
private Evaluation evaluation;
public EvaluateDataFrameRequest(String index, Evaluation evaluation) {
this(Arrays.asList(index), evaluation);
}
public EvaluateDataFrameRequest(List<String> indices, Evaluation evaluation) {
setIndices(indices);
setEvaluation(evaluation);
}
public List<String> getIndices() {
return Collections.unmodifiableList(indices);
}
public final void setIndices(List<String> indices) {
Objects.requireNonNull(indices);
this.indices = new ArrayList<>(indices);
}
public Evaluation getEvaluation() {
return evaluation;
}
public final void setEvaluation(Evaluation evaluation) {
this.evaluation = evaluation;
}
@Override
public Optional<ValidationException> validate() {
List<String> errors = new ArrayList<>();
if (indices.isEmpty()) {
errors.add("At least one index must be specified");
}
if (evaluation == null) {
errors.add("evaluation must not be null");
}
return errors.isEmpty()
? Optional.empty()
: Optional.of(ValidationException.withErrors(errors));
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder
.startObject()
.array(INDEX.getPreferredName(), indices.toArray())
.startObject(EVALUATION.getPreferredName())
.field(evaluation.getName(), evaluation)
.endObject()
.endObject();
}
@Override
public int hashCode() {
return Objects.hash(indices, evaluation);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
EvaluateDataFrameRequest that = (EvaluateDataFrameRequest) o;
return Objects.equals(indices, that.indices)
&& Objects.equals(evaluation, that.evaluation);
}
}

View File

@ -0,0 +1,119 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.NamedObjectNotFoundException;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
public class EvaluateDataFrameResponse implements ToXContentObject {
public static EvaluateDataFrameResponse fromXContent(XContentParser parser) throws IOException {
if (parser.currentToken() == null) {
parser.nextToken();
}
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
String evaluationName = parser.currentName();
parser.nextToken();
Map<String, EvaluationMetric.Result> metrics = parser.map(LinkedHashMap::new, EvaluateDataFrameResponse::parseMetric);
List<EvaluationMetric.Result> knownMetrics =
metrics.values().stream()
.filter(Objects::nonNull) // Filter out null values returned by {@link EvaluateDataFrameResponse::parseMetric}.
.collect(Collectors.toList());
ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
return new EvaluateDataFrameResponse(evaluationName, knownMetrics);
}
private static EvaluationMetric.Result parseMetric(XContentParser parser) throws IOException {
String metricName = parser.currentName();
try {
return parser.namedObject(EvaluationMetric.Result.class, metricName, null);
} catch (NamedObjectNotFoundException e) {
parser.skipChildren();
// Metric name not recognized. Return {@code null} value here and filter it out later.
return null;
}
}
private final String evaluationName;
private final Map<String, EvaluationMetric.Result> metrics;
public EvaluateDataFrameResponse(String evaluationName, List<EvaluationMetric.Result> metrics) {
this.evaluationName = Objects.requireNonNull(evaluationName);
this.metrics = Collections.unmodifiableMap(Objects.requireNonNull(metrics)
.stream().collect(Collectors.toMap(m -> m.getMetricName(), m -> m)));
}
public String getEvaluationName() {
return evaluationName;
}
public List<EvaluationMetric.Result> getMetrics() {
return metrics.values().stream().collect(Collectors.toList());
}
@SuppressWarnings("unchecked")
public <T extends EvaluationMetric.Result> T getMetricByName(String metricName) {
Objects.requireNonNull(metricName);
return (T) metrics.get(metricName);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
return builder
.startObject()
.field(evaluationName, metrics)
.endObject();
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null || getClass() != o.getClass()) return false;
EvaluateDataFrameResponse that = (EvaluateDataFrameResponse) o;
return Objects.equals(evaluationName, that.evaluationName)
&& Objects.equals(metrics, that.metrics);
}
@Override
public int hashCode() {
return Objects.hash(evaluationName, metrics);
}
@Override
public final String toString() {
return Strings.toString(this);
}
}

View File

@ -0,0 +1,104 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
public class GetDataFrameAnalyticsRequest implements Validatable {
public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match");
private final List<String> ids;
private Boolean allowNoMatch;
private PageParams pageParams;
/**
* Helper method to create a request that will get ALL Data Frame Analytics
* @return new {@link GetDataFrameAnalyticsRequest} object for the id "_all"
*/
public static GetDataFrameAnalyticsRequest getAllDataFrameAnalyticsRequest() {
return new GetDataFrameAnalyticsRequest("_all");
}
public GetDataFrameAnalyticsRequest(String... ids) {
this.ids = Arrays.asList(ids);
}
public List<String> getIds() {
return ids;
}
public Boolean getAllowNoMatch() {
return allowNoMatch;
}
/**
* Whether to ignore if a wildcard expression matches no data frame analytics.
*
* @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all})
* does not match any data frame analytics
*/
public GetDataFrameAnalyticsRequest setAllowNoMatch(boolean allowNoMatch) {
this.allowNoMatch = allowNoMatch;
return this;
}
public PageParams getPageParams() {
return pageParams;
}
public GetDataFrameAnalyticsRequest setPageParams(@Nullable PageParams pageParams) {
this.pageParams = pageParams;
return this;
}
@Override
public Optional<ValidationException> validate() {
if (ids == null || ids.isEmpty()) {
return Optional.of(ValidationException.withError("data frame analytics id must not be null"));
}
return Optional.empty();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetDataFrameAnalyticsRequest other = (GetDataFrameAnalyticsRequest) o;
return Objects.equals(ids, other.ids)
&& Objects.equals(allowNoMatch, other.allowNoMatch)
&& Objects.equals(pageParams, other.pageParams);
}
@Override
public int hashCode() {
return Objects.hash(ids, allowNoMatch, pageParams);
}
}

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
public class GetDataFrameAnalyticsResponse {
public static final ParseField DATA_FRAME_ANALYTICS = new ParseField("data_frame_analytics");
@SuppressWarnings("unchecked")
static final ConstructingObjectParser<GetDataFrameAnalyticsResponse, Void> PARSER =
new ConstructingObjectParser<>(
"get_data_frame_analytics",
true,
args -> new GetDataFrameAnalyticsResponse((List<DataFrameAnalyticsConfig>) args[0]));
static {
PARSER.declareObjectArray(constructorArg(), (p, c) -> DataFrameAnalyticsConfig.fromXContent(p), DATA_FRAME_ANALYTICS);
}
public static GetDataFrameAnalyticsResponse fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
private List<DataFrameAnalyticsConfig> analytics;
public GetDataFrameAnalyticsResponse(List<DataFrameAnalyticsConfig> analytics) {
this.analytics = analytics;
}
public List<DataFrameAnalyticsConfig> getAnalytics() {
return analytics;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetDataFrameAnalyticsResponse other = (GetDataFrameAnalyticsResponse) o;
return Objects.equals(this.analytics, other.analytics);
}
@Override
public int hashCode() {
return Objects.hash(analytics);
}
}

View File

@ -0,0 +1,99 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
/**
* Request to get data frame analytics stats
*/
public class GetDataFrameAnalyticsStatsRequest implements Validatable {
public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match");
private final List<String> ids;
private Boolean allowNoMatch;
private PageParams pageParams;
public GetDataFrameAnalyticsStatsRequest(String... ids) {
this.ids = Arrays.asList(ids);
}
public List<String> getIds() {
return ids;
}
public Boolean getAllowNoMatch() {
return allowNoMatch;
}
/**
* Whether to ignore if a wildcard expression matches no data frame analytics.
*
* @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all})
* does not match any data frame analytics
*/
public GetDataFrameAnalyticsStatsRequest setAllowNoMatch(boolean allowNoMatch) {
this.allowNoMatch = allowNoMatch;
return this;
}
public PageParams getPageParams() {
return pageParams;
}
public GetDataFrameAnalyticsStatsRequest setPageParams(@Nullable PageParams pageParams) {
this.pageParams = pageParams;
return this;
}
@Override
public Optional<ValidationException> validate() {
if (ids == null || ids.isEmpty()) {
return Optional.of(ValidationException.withError("data frame analytics id must not be null"));
}
return Optional.empty();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetDataFrameAnalyticsStatsRequest other = (GetDataFrameAnalyticsStatsRequest) o;
return Objects.equals(ids, other.ids)
&& Objects.equals(allowNoMatch, other.allowNoMatch)
&& Objects.equals(pageParams, other.pageParams);
}
@Override
public int hashCode() {
return Objects.hash(ids, allowNoMatch, pageParams);
}
}

View File

@ -0,0 +1,102 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.client.dataframe.AcknowledgedTasksResponse;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class GetDataFrameAnalyticsStatsResponse {
public static GetDataFrameAnalyticsStatsResponse fromXContent(XContentParser parser) {
return GetDataFrameAnalyticsStatsResponse.PARSER.apply(parser, null);
}
private static final ParseField DATA_FRAME_ANALYTICS = new ParseField("data_frame_analytics");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<GetDataFrameAnalyticsStatsResponse, Void> PARSER =
new ConstructingObjectParser<>(
"get_data_frame_analytics_stats_response", true,
args -> new GetDataFrameAnalyticsStatsResponse(
(List<DataFrameAnalyticsStats>) args[0],
(List<TaskOperationFailure>) args[1],
(List<ElasticsearchException>) args[2]));
static {
PARSER.declareObjectArray(constructorArg(), (p, c) -> DataFrameAnalyticsStats.fromXContent(p), DATA_FRAME_ANALYTICS);
PARSER.declareObjectArray(
optionalConstructorArg(), (p, c) -> TaskOperationFailure.fromXContent(p), AcknowledgedTasksResponse.TASK_FAILURES);
PARSER.declareObjectArray(
optionalConstructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), AcknowledgedTasksResponse.NODE_FAILURES);
}
private final List<DataFrameAnalyticsStats> analyticsStats;
private final List<TaskOperationFailure> taskFailures;
private final List<ElasticsearchException> nodeFailures;
public GetDataFrameAnalyticsStatsResponse(List<DataFrameAnalyticsStats> analyticsStats,
@Nullable List<TaskOperationFailure> taskFailures,
@Nullable List<? extends ElasticsearchException> nodeFailures) {
this.analyticsStats = analyticsStats;
this.taskFailures = taskFailures == null ? Collections.emptyList() : Collections.unmodifiableList(taskFailures);
this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(nodeFailures);
}
public List<DataFrameAnalyticsStats> getAnalyticsStats() {
return analyticsStats;
}
public List<ElasticsearchException> getNodeFailures() {
return nodeFailures;
}
public List<TaskOperationFailure> getTaskFailures() {
return taskFailures;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetDataFrameAnalyticsStatsResponse other = (GetDataFrameAnalyticsStatsResponse) o;
return Objects.equals(analyticsStats, other.analyticsStats)
&& Objects.equals(nodeFailures, other.nodeFailures)
&& Objects.equals(taskFailures, other.taskFailures);
}
@Override
public int hashCode() {
return Objects.hash(analyticsStats, nodeFailures, taskFailures);
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.client.ml;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -147,4 +148,9 @@ public class NodeAttributes implements ToXContentObject {
Objects.equals(transportAddress, that.transportAddress) &&
Objects.equals(attributes, that.attributes);
}
@Override
public String toString() {
return Strings.toString(this);
}
}

View File

@ -0,0 +1,70 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
import java.util.Optional;
public class PutDataFrameAnalyticsRequest implements ToXContentObject, Validatable {
private final DataFrameAnalyticsConfig config;
public PutDataFrameAnalyticsRequest(DataFrameAnalyticsConfig config) {
this.config = config;
}
public DataFrameAnalyticsConfig getConfig() {
return config;
}
@Override
public Optional<ValidationException> validate() {
if (config == null) {
return Optional.of(ValidationException.withError("put requires a non-null data frame analytics config"));
}
return Optional.empty();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return config.toXContent(builder, params);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PutDataFrameAnalyticsRequest other = (PutDataFrameAnalyticsRequest) o;
return Objects.equals(config, other.config);
}
@Override
public int hashCode() {
return Objects.hash(config);
}
}

View File

@ -0,0 +1,57 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
public class PutDataFrameAnalyticsResponse {
public static PutDataFrameAnalyticsResponse fromXContent(XContentParser parser) throws IOException {
return new PutDataFrameAnalyticsResponse(DataFrameAnalyticsConfig.fromXContent(parser));
}
private final DataFrameAnalyticsConfig config;
public PutDataFrameAnalyticsResponse(DataFrameAnalyticsConfig config) {
this.config = config;
}
public DataFrameAnalyticsConfig getConfig() {
return config;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PutDataFrameAnalyticsResponse other = (PutDataFrameAnalyticsResponse) o;
return Objects.equals(config, other.config);
}
@Override
public int hashCode() {
return Objects.hash(config);
}
}

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.unit.TimeValue;
import java.util.Objects;
import java.util.Optional;
public class StartDataFrameAnalyticsRequest implements Validatable {
private final String id;
private TimeValue timeout;
public StartDataFrameAnalyticsRequest(String id) {
this.id = id;
}
public String getId() {
return id;
}
public TimeValue getTimeout() {
return timeout;
}
public StartDataFrameAnalyticsRequest setTimeout(@Nullable TimeValue timeout) {
this.timeout = timeout;
return this;
}
@Override
public Optional<ValidationException> validate() {
if (id == null) {
return Optional.of(ValidationException.withError("data frame analytics id must not be null"));
}
return Optional.empty();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
StartDataFrameAnalyticsRequest other = (StartDataFrameAnalyticsRequest) o;
return Objects.equals(id, other.id)
&& Objects.equals(timeout, other.timeout);
}
@Override
public int hashCode() {
return Objects.hash(id, timeout);
}
}

View File

@ -0,0 +1,88 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.TimeValue;
import java.util.Objects;
import java.util.Optional;
public class StopDataFrameAnalyticsRequest implements Validatable {
public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match");
private final String id;
private TimeValue timeout;
private Boolean allowNoMatch;
public StopDataFrameAnalyticsRequest(String id) {
this.id = id;
}
public String getId() {
return id;
}
public TimeValue getTimeout() {
return timeout;
}
public StopDataFrameAnalyticsRequest setTimeout(@Nullable TimeValue timeout) {
this.timeout = timeout;
return this;
}
public Boolean getAllowNoMatch() {
return allowNoMatch;
}
public StopDataFrameAnalyticsRequest setAllowNoMatch(boolean allowNoMatch) {
this.allowNoMatch = allowNoMatch;
return this;
}
@Override
public Optional<ValidationException> validate() {
if (id == null) {
return Optional.of(ValidationException.withError("data frame analytics id must not be null"));
}
return Optional.empty();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
StopDataFrameAnalyticsRequest other = (StopDataFrameAnalyticsRequest) o;
return Objects.equals(id, other.id)
&& Objects.equals(timeout, other.timeout)
&& Objects.equals(allowNoMatch, other.allowNoMatch);
}
@Override
public int hashCode() {
return Objects.hash(id, timeout, allowNoMatch);
}
}

View File

@ -0,0 +1,87 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
/**
* Response indicating if the Machine Learning Data Frame Analytics is now stopped or not
*/
public class StopDataFrameAnalyticsResponse implements ToXContentObject {
private static final ParseField STOPPED = new ParseField("stopped");
public static final ConstructingObjectParser<StopDataFrameAnalyticsResponse, Void> PARSER =
new ConstructingObjectParser<>(
"stop_data_frame_analytics_response",
true,
args -> new StopDataFrameAnalyticsResponse((Boolean) args[0]));
static {
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), STOPPED);
}
public static StopDataFrameAnalyticsResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private final boolean stopped;
public StopDataFrameAnalyticsResponse(boolean stopped) {
this.stopped = stopped;
}
/**
* Has the Data Frame Analytics stopped or not
*
* @return boolean value indicating the Data Frame Analytics stopped status
*/
public boolean isStopped() {
return stopped;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
StopDataFrameAnalyticsResponse other = (StopDataFrameAnalyticsResponse) o;
return stopped == other.stopped;
}
@Override
public int hashCode() {
return Objects.hash(stopped);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder
.startObject()
.field(STOPPED.getPreferredName(), stopped)
.endObject();
}
}

View File

@ -0,0 +1,27 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.common.xcontent.ToXContentObject;
public interface DataFrameAnalysis extends ToXContentObject {
String getName();
}

View File

@ -0,0 +1,208 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING;
import static org.elasticsearch.common.xcontent.ObjectParser.ValueType.VALUE;
public class DataFrameAnalyticsConfig implements ToXContentObject {
public static DataFrameAnalyticsConfig fromXContent(XContentParser parser) {
return PARSER.apply(parser, null).build();
}
public static Builder builder(String id) {
return new Builder().setId(id);
}
private static final ParseField ID = new ParseField("id");
private static final ParseField SOURCE = new ParseField("source");
private static final ParseField DEST = new ParseField("dest");
private static final ParseField ANALYSIS = new ParseField("analysis");
private static final ParseField ANALYZED_FIELDS = new ParseField("analyzed_fields");
private static final ParseField MODEL_MEMORY_LIMIT = new ParseField("model_memory_limit");
private static ObjectParser<Builder, Void> PARSER = new ObjectParser<>("data_frame_analytics_config", true, Builder::new);
static {
PARSER.declareString(Builder::setId, ID);
PARSER.declareObject(Builder::setSource, (p, c) -> DataFrameAnalyticsSource.fromXContent(p), SOURCE);
PARSER.declareObject(Builder::setDest, (p, c) -> DataFrameAnalyticsDest.fromXContent(p), DEST);
PARSER.declareObject(Builder::setAnalysis, (p, c) -> parseAnalysis(p), ANALYSIS);
PARSER.declareField(Builder::setAnalyzedFields,
(p, c) -> FetchSourceContext.fromXContent(p),
ANALYZED_FIELDS,
OBJECT_ARRAY_BOOLEAN_OR_STRING);
PARSER.declareField(Builder::setModelMemoryLimit,
(p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()), MODEL_MEMORY_LIMIT, VALUE);
}
private static DataFrameAnalysis parseAnalysis(XContentParser parser) throws IOException {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
DataFrameAnalysis analysis = parser.namedObject(DataFrameAnalysis.class, parser.currentName(), true);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
return analysis;
}
private final String id;
private final DataFrameAnalyticsSource source;
private final DataFrameAnalyticsDest dest;
private final DataFrameAnalysis analysis;
private final FetchSourceContext analyzedFields;
private final ByteSizeValue modelMemoryLimit;
private DataFrameAnalyticsConfig(String id, DataFrameAnalyticsSource source, DataFrameAnalyticsDest dest, DataFrameAnalysis analysis,
@Nullable FetchSourceContext analyzedFields, @Nullable ByteSizeValue modelMemoryLimit) {
this.id = Objects.requireNonNull(id);
this.source = Objects.requireNonNull(source);
this.dest = Objects.requireNonNull(dest);
this.analysis = Objects.requireNonNull(analysis);
this.analyzedFields = analyzedFields;
this.modelMemoryLimit = modelMemoryLimit;
}
public String getId() {
return id;
}
public DataFrameAnalyticsSource getSource() {
return source;
}
public DataFrameAnalyticsDest getDest() {
return dest;
}
public DataFrameAnalysis getAnalysis() {
return analysis;
}
public FetchSourceContext getAnalyzedFields() {
return analyzedFields;
}
public ByteSizeValue getModelMemoryLimit() {
return modelMemoryLimit;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(ID.getPreferredName(), id);
builder.field(SOURCE.getPreferredName(), source);
builder.field(DEST.getPreferredName(), dest);
builder.startObject(ANALYSIS.getPreferredName());
builder.field(analysis.getName(), analysis);
builder.endObject();
if (analyzedFields != null) {
builder.field(ANALYZED_FIELDS.getPreferredName(), analyzedFields);
}
if (modelMemoryLimit != null) {
builder.field(MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit.getStringRep());
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null || getClass() != o.getClass()) return false;
DataFrameAnalyticsConfig other = (DataFrameAnalyticsConfig) o;
return Objects.equals(id, other.id)
&& Objects.equals(source, other.source)
&& Objects.equals(dest, other.dest)
&& Objects.equals(analysis, other.analysis)
&& Objects.equals(analyzedFields, other.analyzedFields)
&& Objects.equals(modelMemoryLimit, other.modelMemoryLimit);
}
@Override
public int hashCode() {
return Objects.hash(id, source, dest, analysis, analyzedFields, getModelMemoryLimit());
}
@Override
public String toString() {
return Strings.toString(this);
}
public static class Builder {
private String id;
private DataFrameAnalyticsSource source;
private DataFrameAnalyticsDest dest;
private DataFrameAnalysis analysis;
private FetchSourceContext analyzedFields;
private ByteSizeValue modelMemoryLimit;
private Builder() {}
public Builder setId(String id) {
this.id = Objects.requireNonNull(id);
return this;
}
public Builder setSource(DataFrameAnalyticsSource source) {
this.source = Objects.requireNonNull(source);
return this;
}
public Builder setDest(DataFrameAnalyticsDest dest) {
this.dest = Objects.requireNonNull(dest);
return this;
}
public Builder setAnalysis(DataFrameAnalysis analysis) {
this.analysis = Objects.requireNonNull(analysis);
return this;
}
public Builder setAnalyzedFields(FetchSourceContext fields) {
this.analyzedFields = fields;
return this;
}
public Builder setModelMemoryLimit(ByteSizeValue modelMemoryLimit) {
this.modelMemoryLimit = modelMemoryLimit;
return this;
}
public DataFrameAnalyticsConfig build() {
return new DataFrameAnalyticsConfig(id, source, dest, analysis, analyzedFields, modelMemoryLimit);
}
}
}

View File

@ -0,0 +1,123 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static java.util.Objects.requireNonNull;
public class DataFrameAnalyticsDest implements ToXContentObject {
public static DataFrameAnalyticsDest fromXContent(XContentParser parser) {
return PARSER.apply(parser, null).build();
}
public static Builder builder() {
return new Builder();
}
private static final ParseField INDEX = new ParseField("index");
private static final ParseField RESULTS_FIELD = new ParseField("results_field");
private static ObjectParser<Builder, Void> PARSER = new ObjectParser<>("data_frame_analytics_dest", true, Builder::new);
static {
PARSER.declareString(Builder::setIndex, INDEX);
PARSER.declareString(Builder::setResultsField, RESULTS_FIELD);
}
private final String index;
private final String resultsField;
private DataFrameAnalyticsDest(String index, @Nullable String resultsField) {
this.index = requireNonNull(index);
this.resultsField = resultsField;
}
public String getIndex() {
return index;
}
public String getResultsField() {
return resultsField;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(INDEX.getPreferredName(), index);
if (resultsField != null) {
builder.field(RESULTS_FIELD.getPreferredName(), resultsField);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null || getClass() != o.getClass()) return false;
DataFrameAnalyticsDest other = (DataFrameAnalyticsDest) o;
return Objects.equals(index, other.index)
&& Objects.equals(resultsField, other.resultsField);
}
@Override
public int hashCode() {
return Objects.hash(index, resultsField);
}
@Override
public String toString() {
return Strings.toString(this);
}
public static class Builder {
private String index;
private String resultsField;
private Builder() {}
public Builder setIndex(String index) {
this.index = index;
return this;
}
public Builder setResultsField(String resultsField) {
this.resultsField = resultsField;
return this;
}
public DataFrameAnalyticsDest build() {
return new DataFrameAnalyticsDest(index, resultsField);
}
}
}

View File

@ -0,0 +1,121 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
public class DataFrameAnalyticsSource implements ToXContentObject {
public static DataFrameAnalyticsSource fromXContent(XContentParser parser) {
return PARSER.apply(parser, null).build();
}
public static Builder builder() {
return new Builder();
}
private static final ParseField INDEX = new ParseField("index");
private static final ParseField QUERY = new ParseField("query");
private static ObjectParser<Builder, Void> PARSER = new ObjectParser<>("data_frame_analytics_source", true, Builder::new);
static {
PARSER.declareString(Builder::setIndex, INDEX);
PARSER.declareObject(Builder::setQueryConfig, (p, c) -> QueryConfig.fromXContent(p), QUERY);
}
private final String index;
private final QueryConfig queryConfig;
private DataFrameAnalyticsSource(String index, @Nullable QueryConfig queryConfig) {
this.index = Objects.requireNonNull(index);
this.queryConfig = queryConfig;
}
public String getIndex() {
return index;
}
public QueryConfig getQueryConfig() {
return queryConfig;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(INDEX.getPreferredName(), index);
if (queryConfig != null) {
builder.field(QUERY.getPreferredName(), queryConfig.getQuery());
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null || getClass() != o.getClass()) return false;
DataFrameAnalyticsSource other = (DataFrameAnalyticsSource) o;
return Objects.equals(index, other.index)
&& Objects.equals(queryConfig, other.queryConfig);
}
@Override
public int hashCode() {
return Objects.hash(index, queryConfig);
}
@Override
public String toString() {
return Strings.toString(this);
}
public static class Builder {
private String index;
private QueryConfig queryConfig;
private Builder() {}
public Builder setIndex(String index) {
this.index = index;
return this;
}
public Builder setQueryConfig(QueryConfig queryConfig) {
this.queryConfig = queryConfig;
return this;
}
public DataFrameAnalyticsSource build() {
return new DataFrameAnalyticsSource(index, queryConfig);
}
}
}

View File

@ -0,0 +1,34 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import java.util.Locale;
public enum DataFrameAnalyticsState {
STARTED, REINDEXING, ANALYZING, STOPPING, STOPPED;
public static DataFrameAnalyticsState fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT));
}
public String value() {
return name().toLowerCase(Locale.ROOT);
}
}

View File

@ -0,0 +1,133 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.client.ml.NodeAttributes;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.inject.internal.ToStringBuilder;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DataFrameAnalyticsStats {
public static DataFrameAnalyticsStats fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
static final ParseField ID = new ParseField("id");
static final ParseField STATE = new ParseField("state");
static final ParseField PROGRESS_PERCENT = new ParseField("progress_percent");
static final ParseField NODE = new ParseField("node");
static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation");
private static final ConstructingObjectParser<DataFrameAnalyticsStats, Void> PARSER =
new ConstructingObjectParser<>("data_frame_analytics_stats", true,
args -> new DataFrameAnalyticsStats(
(String) args[0],
(DataFrameAnalyticsState) args[1],
(Integer) args[2],
(NodeAttributes) args[3],
(String) args[4]));
static {
PARSER.declareString(constructorArg(), ID);
PARSER.declareField(constructorArg(), p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
return DataFrameAnalyticsState.fromString(p.text());
}
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
}, STATE, ObjectParser.ValueType.STRING);
PARSER.declareInt(optionalConstructorArg(), PROGRESS_PERCENT);
PARSER.declareObject(optionalConstructorArg(), NodeAttributes.PARSER, NODE);
PARSER.declareString(optionalConstructorArg(), ASSIGNMENT_EXPLANATION);
}
private final String id;
private final DataFrameAnalyticsState state;
private final Integer progressPercent;
private final NodeAttributes node;
private final String assignmentExplanation;
public DataFrameAnalyticsStats(String id, DataFrameAnalyticsState state, @Nullable Integer progressPercent,
@Nullable NodeAttributes node, @Nullable String assignmentExplanation) {
this.id = id;
this.state = state;
this.progressPercent = progressPercent;
this.node = node;
this.assignmentExplanation = assignmentExplanation;
}
public String getId() {
return id;
}
public DataFrameAnalyticsState getState() {
return state;
}
public Integer getProgressPercent() {
return progressPercent;
}
public NodeAttributes getNode() {
return node;
}
public String getAssignmentExplanation() {
return assignmentExplanation;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DataFrameAnalyticsStats other = (DataFrameAnalyticsStats) o;
return Objects.equals(id, other.id)
&& Objects.equals(state, other.state)
&& Objects.equals(progressPercent, other.progressPercent)
&& Objects.equals(node, other.node)
&& Objects.equals(assignmentExplanation, other.assignmentExplanation);
}
@Override
public int hashCode() {
return Objects.hash(id, state, progressPercent, node, assignmentExplanation);
}
@Override
public String toString() {
return new ToStringBuilder(getClass())
.add("id", id)
.add("state", state)
.add("progressPercent", progressPercent)
.add("node", node)
.add("assignmentExplanation", assignmentExplanation)
.toString();
}
}

View File

@ -0,0 +1,37 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.plugins.spi.NamedXContentProvider;
import java.util.Arrays;
import java.util.List;
public class MlDataFrameAnalysisNamedXContentProvider implements NamedXContentProvider {
@Override
public List<NamedXContentRegistry.Entry> getNamedXContentParsers() {
return Arrays.asList(
new NamedXContentRegistry.Entry(
DataFrameAnalysis.class,
OutlierDetection.NAME,
(p, c) -> OutlierDetection.fromXContent(p)));
}
}

View File

@ -0,0 +1,176 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
public class OutlierDetection implements DataFrameAnalysis {
public static OutlierDetection fromXContent(XContentParser parser) {
return PARSER.apply(parser, null).build();
}
public static OutlierDetection createDefault() {
return builder().build();
}
public static Builder builder() {
return new Builder();
}
public static final ParseField NAME = new ParseField("outlier_detection");
static final ParseField N_NEIGHBORS = new ParseField("n_neighbors");
static final ParseField METHOD = new ParseField("method");
public static final ParseField MINIMUM_SCORE_TO_WRITE_FEATURE_INFLUENCE =
new ParseField("minimum_score_to_write_feature_influence");
private static ObjectParser<Builder, Void> PARSER = new ObjectParser<>(NAME.getPreferredName(), true, Builder::new);
static {
PARSER.declareInt(Builder::setNNeighbors, N_NEIGHBORS);
PARSER.declareField(Builder::setMethod, p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
return Method.fromString(p.text());
}
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
}, METHOD, ObjectParser.ValueType.STRING);
PARSER.declareDouble(Builder::setMinScoreToWriteFeatureInfluence, MINIMUM_SCORE_TO_WRITE_FEATURE_INFLUENCE);
}
private final Integer nNeighbors;
private final Method method;
private final Double minScoreToWriteFeatureInfluence;
/**
* Constructs the outlier detection configuration
* @param nNeighbors The number of neighbors. Leave unspecified for dynamic detection.
* @param method The method. Leave unspecified for a dynamic mixture of methods.
* @param minScoreToWriteFeatureInfluence The min outlier score required to calculate feature influence. Defaults to 0.1.
*/
private OutlierDetection(@Nullable Integer nNeighbors, @Nullable Method method, @Nullable Double minScoreToWriteFeatureInfluence) {
this.nNeighbors = nNeighbors;
this.method = method;
this.minScoreToWriteFeatureInfluence = minScoreToWriteFeatureInfluence;
}
@Override
public String getName() {
return NAME.getPreferredName();
}
public Integer getNNeighbors() {
return nNeighbors;
}
public Method getMethod() {
return method;
}
public Double getMinScoreToWriteFeatureInfluence() {
return minScoreToWriteFeatureInfluence;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (nNeighbors != null) {
builder.field(N_NEIGHBORS.getPreferredName(), nNeighbors);
}
if (method != null) {
builder.field(METHOD.getPreferredName(), method);
}
if (minScoreToWriteFeatureInfluence != null) {
builder.field(MINIMUM_SCORE_TO_WRITE_FEATURE_INFLUENCE.getPreferredName(), minScoreToWriteFeatureInfluence);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
OutlierDetection other = (OutlierDetection) o;
return Objects.equals(nNeighbors, other.nNeighbors)
&& Objects.equals(method, other.method)
&& Objects.equals(minScoreToWriteFeatureInfluence, other.minScoreToWriteFeatureInfluence);
}
@Override
public int hashCode() {
return Objects.hash(nNeighbors, method, minScoreToWriteFeatureInfluence);
}
@Override
public String toString() {
return Strings.toString(this);
}
public enum Method {
LOF, LDOF, DISTANCE_KTH_NN, DISTANCE_KNN;
public static Method fromString(String value) {
return Method.valueOf(value.toUpperCase(Locale.ROOT));
}
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
}
public static class Builder {
private Integer nNeighbors;
private Method method;
private Double minScoreToWriteFeatureInfluence;
private Builder() {}
public Builder setNNeighbors(Integer nNeighbors) {
this.nNeighbors = nNeighbors;
return this;
}
public Builder setMethod(Method method) {
this.method = method;
return this;
}
public Builder setMinScoreToWriteFeatureInfluence(Double minScoreToWriteFeatureInfluence) {
this.minScoreToWriteFeatureInfluence = minScoreToWriteFeatureInfluence;
return this;
}
public OutlierDetection build() {
return new OutlierDetection(nNeighbors, method, minScoreToWriteFeatureInfluence);
}
}
}

View File

@ -0,0 +1,82 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import java.io.IOException;
import java.util.Objects;
import static java.util.Objects.requireNonNull;
/**
* Object for encapsulating the desired Query for a DataFrameAnalysis
*/
public class QueryConfig implements ToXContentObject {
public static QueryConfig fromXContent(XContentParser parser) throws IOException {
QueryBuilder query = AbstractQueryBuilder.parseInnerQueryBuilder(parser);
return new QueryConfig(query);
}
private final QueryBuilder query;
public QueryConfig(QueryBuilder query) {
this.query = requireNonNull(query);
}
public QueryConfig(QueryConfig queryConfig) {
this(requireNonNull(queryConfig).query);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
query.toXContent(builder, params);
return builder;
}
public QueryBuilder getQuery() {
return query;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
QueryConfig other = (QueryConfig) o;
return Objects.equals(query, other.query);
}
@Override
public int hashCode() {
return Objects.hash(query);
}
@Override
public String toString() {
return Strings.toString(this);
}
}

View File

@ -0,0 +1,32 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation;
import org.elasticsearch.common.xcontent.ToXContentObject;
/**
* Defines an evaluation
*/
public interface Evaluation extends ToXContentObject {
/**
* Returns the evaluation name
*/
String getName();
}

View File

@ -0,0 +1,43 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation;
import org.elasticsearch.common.xcontent.ToXContentObject;
/**
* Defines an evaluation metric
*/
public interface EvaluationMetric extends ToXContentObject {
/**
* Returns the name of the metric
*/
String getName();
/**
* The result of an evaluation metric
*/
interface Result extends ToXContentObject {
/**
* Returns the name of the metric
*/
String getMetricName();
}
}

View File

@ -0,0 +1,57 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.BinarySoftClassification;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.plugins.spi.NamedXContentProvider;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.AucRocMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric;
import java.util.Arrays;
import java.util.List;
public class MlEvaluationNamedXContentProvider implements NamedXContentProvider {
@Override
public List<NamedXContentRegistry.Entry> getNamedXContentParsers() {
return Arrays.asList(
// Evaluations
new NamedXContentRegistry.Entry(
Evaluation.class, new ParseField(BinarySoftClassification.NAME), BinarySoftClassification::fromXContent),
// Evaluation metrics
new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(AucRocMetric.NAME), AucRocMetric::fromXContent),
new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(PrecisionMetric.NAME), PrecisionMetric::fromXContent),
new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(RecallMetric.NAME), RecallMetric::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.class, new ParseField(ConfusionMatrixMetric.NAME), ConfusionMatrixMetric::fromXContent),
// Evaluation metrics results
new NamedXContentRegistry.Entry(
EvaluationMetric.Result.class, new ParseField(AucRocMetric.NAME), AucRocMetric.Result::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.Result.class, new ParseField(PrecisionMetric.NAME), PrecisionMetric.Result::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.Result.class, new ParseField(RecallMetric.NAME), RecallMetric.Result::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.Result.class, new ParseField(ConfusionMatrixMetric.NAME), ConfusionMatrixMetric.Result::fromXContent));
}
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation.softclassification;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
abstract class AbstractConfusionMatrixMetric implements EvaluationMetric {
protected static final ParseField AT = new ParseField("at");
protected final double[] thresholds;
protected AbstractConfusionMatrixMetric(List<Double> at) {
this.thresholds = Objects.requireNonNull(at).stream().mapToDouble(Double::doubleValue).toArray();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
return builder
.startObject()
.field(AT.getPreferredName(), thresholds)
.endObject();
}
}

View File

@ -0,0 +1,241 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation.softclassification;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Area under the curve (AUC) of the receiver operating characteristic (ROC).
* The ROC curve is a plot of the TPR (true positive rate) against
* the FPR (false positive rate) over a varying threshold.
*/
public class AucRocMetric implements EvaluationMetric {
public static final String NAME = "auc_roc";
public static final ParseField INCLUDE_CURVE = new ParseField("include_curve");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<AucRocMetric, Void> PARSER =
new ConstructingObjectParser<>(NAME, args -> new AucRocMetric((Boolean) args[0]));
static {
PARSER.declareBoolean(optionalConstructorArg(), INCLUDE_CURVE);
}
public static AucRocMetric fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
public static AucRocMetric withCurve() {
return new AucRocMetric(true);
}
private final boolean includeCurve;
public AucRocMetric(Boolean includeCurve) {
this.includeCurve = includeCurve == null ? false : includeCurve;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
return builder
.startObject()
.field(INCLUDE_CURVE.getPreferredName(), includeCurve)
.endObject();
}
@Override
public String getName() {
return NAME;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AucRocMetric that = (AucRocMetric) o;
return Objects.equals(includeCurve, that.includeCurve);
}
@Override
public int hashCode() {
return Objects.hash(includeCurve);
}
public static class Result implements EvaluationMetric.Result {
public static Result fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private static final ParseField SCORE = new ParseField("score");
private static final ParseField CURVE = new ParseField("curve");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<Result, Void> PARSER =
new ConstructingObjectParser<>("auc_roc_result", true, args -> new Result((double) args[0], (List<AucRocPoint>) args[1]));
static {
PARSER.declareDouble(constructorArg(), SCORE);
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> AucRocPoint.fromXContent(p), CURVE);
}
private final double score;
private final List<AucRocPoint> curve;
public Result(double score, @Nullable List<AucRocPoint> curve) {
this.score = score;
this.curve = curve;
}
@Override
public String getMetricName() {
return NAME;
}
public double getScore() {
return score;
}
public List<AucRocPoint> getCurve() {
return curve == null ? null : Collections.unmodifiableList(curve);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field(SCORE.getPreferredName(), score);
if (curve != null && curve.isEmpty() == false) {
builder.field(CURVE.getPreferredName(), curve);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Result that = (Result) o;
return Objects.equals(score, that.score)
&& Objects.equals(curve, that.curve);
}
@Override
public int hashCode() {
return Objects.hash(score, curve);
}
@Override
public String toString() {
return Strings.toString(this);
}
}
public static final class AucRocPoint implements ToXContentObject {
public static AucRocPoint fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private static final ParseField TPR = new ParseField("tpr");
private static final ParseField FPR = new ParseField("fpr");
private static final ParseField THRESHOLD = new ParseField("threshold");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<AucRocPoint, Void> PARSER =
new ConstructingObjectParser<>(
"auc_roc_point",
true,
args -> new AucRocPoint((double) args[0], (double) args[1], (double) args[2]));
static {
PARSER.declareDouble(constructorArg(), TPR);
PARSER.declareDouble(constructorArg(), FPR);
PARSER.declareDouble(constructorArg(), THRESHOLD);
}
private final double tpr;
private final double fpr;
private final double threshold;
public AucRocPoint(double tpr, double fpr, double threshold) {
this.tpr = tpr;
this.fpr = fpr;
this.threshold = threshold;
}
public double getTruePositiveRate() {
return tpr;
}
public double getFalsePositiveRate() {
return fpr;
}
public double getThreshold() {
return threshold;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder
.startObject()
.field(TPR.getPreferredName(), tpr)
.field(FPR.getPreferredName(), fpr)
.field(THRESHOLD.getPreferredName(), threshold)
.endObject();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AucRocPoint that = (AucRocPoint) o;
return tpr == that.tpr && fpr == that.fpr && threshold == that.threshold;
}
@Override
public int hashCode() {
return Objects.hash(tpr, fpr, threshold);
}
@Override
public String toString() {
return Strings.toString(this);
}
}
}

View File

@ -0,0 +1,129 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation.softclassification;
import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Evaluation of binary soft classification methods, e.g. outlier detection.
* This is useful to evaluate problems where a model outputs a probability of whether
* a data frame row belongs to one of two groups.
*/
public class BinarySoftClassification implements Evaluation {
public static final String NAME = "binary_soft_classification";
private static final ParseField ACTUAL_FIELD = new ParseField("actual_field");
private static final ParseField PREDICTED_PROBABILITY_FIELD = new ParseField("predicted_probability_field");
private static final ParseField METRICS = new ParseField("metrics");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<BinarySoftClassification, Void> PARSER =
new ConstructingObjectParser<>(
NAME,
args -> new BinarySoftClassification((String) args[0], (String) args[1], (List<EvaluationMetric>) args[2]));
static {
PARSER.declareString(constructorArg(), ACTUAL_FIELD);
PARSER.declareString(constructorArg(), PREDICTED_PROBABILITY_FIELD);
PARSER.declareNamedObjects(optionalConstructorArg(), (p, c, n) -> p.namedObject(EvaluationMetric.class, n, null), METRICS);
}
public static BinarySoftClassification fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
/**
* The field where the actual class is marked up.
* The value of this field is assumed to either be 1 or 0, or true or false.
*/
private final String actualField;
/**
* The field of the predicted probability in [0.0, 1.0].
*/
private final String predictedProbabilityField;
/**
* The list of metrics to calculate
*/
private final List<EvaluationMetric> metrics;
public BinarySoftClassification(String actualField, String predictedProbabilityField, EvaluationMetric... metric) {
this(actualField, predictedProbabilityField, Arrays.asList(metric));
}
public BinarySoftClassification(String actualField, String predictedProbabilityField,
@Nullable List<EvaluationMetric> metrics) {
this.actualField = Objects.requireNonNull(actualField);
this.predictedProbabilityField = Objects.requireNonNull(predictedProbabilityField);
this.metrics = Objects.requireNonNull(metrics);
}
@Override
public String getName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field(ACTUAL_FIELD.getPreferredName(), actualField);
builder.field(PREDICTED_PROBABILITY_FIELD.getPreferredName(), predictedProbabilityField);
builder.startObject(METRICS.getPreferredName());
for (EvaluationMetric metric : metrics) {
builder.field(metric.getName(), metric);
}
builder.endObject();
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BinarySoftClassification that = (BinarySoftClassification) o;
return Objects.equals(actualField, that.actualField)
&& Objects.equals(predictedProbabilityField, that.predictedProbabilityField)
&& Objects.equals(metrics, that.metrics);
}
@Override
public int hashCode() {
return Objects.hash(actualField, predictedProbabilityField, metrics);
}
}

View File

@ -0,0 +1,206 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation.softclassification;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
public class ConfusionMatrixMetric extends AbstractConfusionMatrixMetric {
public static final String NAME = "confusion_matrix";
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<ConfusionMatrixMetric, Void> PARSER =
new ConstructingObjectParser<>(NAME, args -> new ConfusionMatrixMetric((List<Double>) args[0]));
static {
PARSER.declareDoubleArray(constructorArg(), AT);
}
public static ConfusionMatrixMetric fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
public static ConfusionMatrixMetric at(Double... at) {
return new ConfusionMatrixMetric(Arrays.asList(at));
}
public ConfusionMatrixMetric(List<Double> at) {
super(at);
}
@Override
public String getName() {
return NAME;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ConfusionMatrixMetric that = (ConfusionMatrixMetric) o;
return Arrays.equals(thresholds, that.thresholds);
}
@Override
public int hashCode() {
return Arrays.hashCode(thresholds);
}
public static class Result implements EvaluationMetric.Result {
public static Result fromXContent(XContentParser parser) throws IOException {
return new Result(parser.map(LinkedHashMap::new, ConfusionMatrix::fromXContent));
}
private final Map<String, ConfusionMatrix> results;
public Result(Map<String, ConfusionMatrix> results) {
this.results = Objects.requireNonNull(results);
}
@Override
public String getMetricName() {
return NAME;
}
public ConfusionMatrix getScoreByThreshold(String threshold) {
return results.get(threshold);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
return builder.map(results);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Result that = (Result) o;
return Objects.equals(results, that.results);
}
@Override
public int hashCode() {
return Objects.hash(results);
}
@Override
public String toString() {
return Strings.toString(this);
}
}
public static final class ConfusionMatrix implements ToXContentObject {
public static ConfusionMatrix fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private static final ParseField TP = new ParseField("tp");
private static final ParseField FP = new ParseField("fp");
private static final ParseField TN = new ParseField("tn");
private static final ParseField FN = new ParseField("fn");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<ConfusionMatrix, Void> PARSER =
new ConstructingObjectParser<>(
"confusion_matrix", true, args -> new ConfusionMatrix((long) args[0], (long) args[1], (long) args[2], (long) args[3]));
static {
PARSER.declareLong(constructorArg(), TP);
PARSER.declareLong(constructorArg(), FP);
PARSER.declareLong(constructorArg(), TN);
PARSER.declareLong(constructorArg(), FN);
}
private final long tp;
private final long fp;
private final long tn;
private final long fn;
public ConfusionMatrix(long tp, long fp, long tn, long fn) {
this.tp = tp;
this.fp = fp;
this.tn = tn;
this.fn = fn;
}
public long getTruePositives() {
return tp;
}
public long getFalsePositives() {
return fp;
}
public long getTrueNegatives() {
return tn;
}
public long getFalseNegatives() {
return fn;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder
.startObject()
.field(TP.getPreferredName(), tp)
.field(FP.getPreferredName(), fp)
.field(TN.getPreferredName(), tn)
.field(FN.getPreferredName(), fn)
.endObject();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ConfusionMatrix that = (ConfusionMatrix) o;
return tp == that.tp && fp == that.fp && tn == that.tn && fn == that.fn;
}
@Override
public int hashCode() {
return Objects.hash(tp, fp, tn, fn);
}
@Override
public String toString() {
return Strings.toString(this);
}
}
}

View File

@ -0,0 +1,123 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation.softclassification;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
public class PrecisionMetric extends AbstractConfusionMatrixMetric {
public static final String NAME = "precision";
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<PrecisionMetric, Void> PARSER =
new ConstructingObjectParser<>(NAME, args -> new PrecisionMetric((List<Double>) args[0]));
static {
PARSER.declareDoubleArray(constructorArg(), AT);
}
public static PrecisionMetric fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
public static PrecisionMetric at(Double... at) {
return new PrecisionMetric(Arrays.asList(at));
}
public PrecisionMetric(List<Double> at) {
super(at);
}
@Override
public String getName() {
return NAME;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PrecisionMetric that = (PrecisionMetric) o;
return Arrays.equals(thresholds, that.thresholds);
}
@Override
public int hashCode() {
return Arrays.hashCode(thresholds);
}
public static class Result implements EvaluationMetric.Result {
public static Result fromXContent(XContentParser parser) throws IOException {
return new Result(parser.map(LinkedHashMap::new, p -> p.doubleValue()));
}
private final Map<String, Double> results;
public Result(Map<String, Double> results) {
this.results = Objects.requireNonNull(results);
}
@Override
public String getMetricName() {
return NAME;
}
public Double getScoreByThreshold(String threshold) {
return results.get(threshold);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
return builder.map(results);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Result that = (Result) o;
return Objects.equals(results, that.results);
}
@Override
public int hashCode() {
return Objects.hash(results);
}
@Override
public String toString() {
return Strings.toString(this);
}
}
}

View File

@ -0,0 +1,123 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation.softclassification;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
public class RecallMetric extends AbstractConfusionMatrixMetric {
public static final String NAME = "recall";
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<RecallMetric, Void> PARSER =
new ConstructingObjectParser<>(NAME, args -> new RecallMetric((List<Double>) args[0]));
static {
PARSER.declareDoubleArray(constructorArg(), AT);
}
public static RecallMetric fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
public static RecallMetric at(Double... at) {
return new RecallMetric(Arrays.asList(at));
}
public RecallMetric(List<Double> at) {
super(at);
}
@Override
public String getName() {
return NAME;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RecallMetric that = (RecallMetric) o;
return Arrays.equals(thresholds, that.thresholds);
}
@Override
public int hashCode() {
return Arrays.hashCode(thresholds);
}
public static class Result implements EvaluationMetric.Result {
public static Result fromXContent(XContentParser parser) throws IOException {
return new Result(parser.map(LinkedHashMap::new, p -> p.doubleValue()));
}
private final Map<String, Double> results;
public Result(Map<String, Double> results) {
this.results = Objects.requireNonNull(results);
}
@Override
public String getMetricName() {
return NAME;
}
public Double getScoreByThreshold(String threshold) {
return results.get(threshold);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
return builder.map(results);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Result that = (Result) o;
return Objects.equals(results, that.results);
}
@Override
public int hashCode() {
return Objects.hash(results);
}
@Override
public String toString() {
return Strings.toString(this);
}
}
}

View File

@ -1 +1,4 @@
org.elasticsearch.client.indexlifecycle.IndexLifecycleNamedXContentProvider
org.elasticsearch.client.dataframe.DataFrameNamedXContentProvider
org.elasticsearch.client.indexlifecycle.IndexLifecycleNamedXContentProvider
org.elasticsearch.client.ml.dataframe.MlDataFrameAnalysisNamedXContentProvider
org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider

View File

@ -24,6 +24,7 @@ import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.dataframe.DataFrameNamedXContentProvider;
import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsRequest;
@ -43,7 +44,9 @@ import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.client.dataframe.GetDataFrameTransformRequest.ALLOW_NO_MATCH;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
@ -53,7 +56,9 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContents);
}
public void testPutDataFrameTransform() throws IOException {
@ -110,7 +115,6 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
}
StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id, waitForCompletion, timeValue);
Request request = DataFrameRequestConverters.stopDataFrameTransform(stopRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/" + stopRequest.getId() + "/_stop"));
@ -128,6 +132,11 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
} else {
assertFalse(request.getParameters().containsKey("timeout"));
}
assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH));
stopRequest.setAllowNoMatch(randomBoolean());
request = DataFrameRequestConverters.stopDataFrameTransform(stopRequest);
assertEquals(stopRequest.getAllowNoMatch(), Boolean.parseBoolean(request.getParameters().get(ALLOW_NO_MATCH)));
}
public void testPreviewDataFrameTransform() throws IOException {
@ -153,6 +162,7 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
assertFalse(request.getParameters().containsKey("from"));
assertFalse(request.getParameters().containsKey("size"));
assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH));
getStatsRequest.setPageParams(new PageParams(0, null));
request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest);
@ -167,6 +177,10 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
getStatsRequest.setPageParams(new PageParams(0, 10));
request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest);
assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10")));
getStatsRequest.setAllowNoMatch(false);
request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest);
assertThat(request.getParameters(), hasEntry("allow_no_match", "false"));
}
public void testGetDataFrameTransform() {
@ -178,6 +192,7 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
assertFalse(request.getParameters().containsKey("from"));
assertFalse(request.getParameters().containsKey("size"));
assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH));
getRequest.setPageParams(new PageParams(0, null));
request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
@ -192,6 +207,10 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
getRequest.setPageParams(new PageParams(0, 10));
request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10")));
getRequest.setAllowNoMatch(false);
request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
assertThat(request.getParameters(), hasEntry("allow_no_match", "false"));
}
public void testGetDataFrameTransform_givenMulitpleIds() {

View File

@ -66,6 +66,7 @@ import java.util.Map;
import java.util.Optional;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
@ -186,9 +187,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
DataFrameClient client = highLevelClient().dataFrame();
AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform,
client::putDataFrameTransformAsync);
assertTrue(ack.isAcknowledged());
putTransform(transform);
GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest(id);
GetDataFrameTransformResponse getResponse = execute(getRequest, client::getDataFrameTransform,
@ -205,14 +204,10 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
DataFrameClient client = highLevelClient().dataFrame();
DataFrameTransformConfig transform = validDataFrameTransformConfig("test-get-all-1", sourceIndex, "pivot-dest-1");
AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform,
client::putDataFrameTransformAsync);
assertTrue(ack.isAcknowledged());
putTransform(transform);
transform = validDataFrameTransformConfig("test-get-all-2", sourceIndex, "pivot-dest-2");
ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform,
client::putDataFrameTransformAsync);
assertTrue(ack.isAcknowledged());
putTransform(transform);
GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest("_all");
GetDataFrameTransformResponse getResponse = execute(getRequest, client::getDataFrameTransform,
@ -251,10 +246,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
DataFrameClient client = highLevelClient().dataFrame();
AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform,
client::putDataFrameTransformAsync);
assertTrue(ack.isAcknowledged());
transformsToClean.add(id);
putTransform(transform);
StartDataFrameTransformRequest startRequest = new StartDataFrameTransformRequest(id);
StartDataFrameTransformResponse startResponse =
@ -318,7 +310,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
.build();
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/43324")
// TODO add tests to cover continuous situations
public void testGetStats() throws Exception {
String sourceIndex = "transform-source";
createIndex(sourceIndex);
@ -340,10 +332,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
.build();
DataFrameClient client = highLevelClient().dataFrame();
AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform,
client::putDataFrameTransformAsync);
assertTrue(ack.isAcknowledged());
transformsToClean.add(id);
putTransform(transform);
GetDataFrameTransformStatsResponse statsResponse = execute(new GetDataFrameTransformStatsRequest(id),
client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync);
@ -365,15 +354,25 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
GetDataFrameTransformStatsResponse response = execute(new GetDataFrameTransformStatsRequest(id),
client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync);
DataFrameTransformStateAndStats stateAndStats = response.getTransformsStateAndStats().get(0);
assertEquals(IndexerState.STARTED, stateAndStats.getTransformState().getIndexerState());
assertEquals(DataFrameTransformTaskState.STARTED, stateAndStats.getTransformState().getTaskState());
assertEquals(null, stateAndStats.getTransformState().getReason());
assertNotEquals(zeroIndexerStats, stateAndStats.getTransformStats());
assertNotNull(stateAndStats.getTransformState().getProgress());
assertThat(stateAndStats.getTransformState().getTaskState(),
is(oneOf(DataFrameTransformTaskState.STARTED, DataFrameTransformTaskState.STOPPED)));
assertThat(stateAndStats.getTransformState().getIndexerState(),
is(oneOf(IndexerState.STARTED, IndexerState.STOPPED)));
assertThat(stateAndStats.getTransformState().getProgress().getPercentComplete(), equalTo(100.0));
assertThat(stateAndStats.getTransformState().getProgress().getTotalDocs(), greaterThan(0L));
assertThat(stateAndStats.getTransformState().getProgress().getRemainingDocs(), equalTo(0L));
assertThat(stateAndStats.getTransformState().getReason(), is(nullValue()));
});
}
void putTransform(DataFrameTransformConfig config) throws IOException {
DataFrameClient client = highLevelClient().dataFrame();
AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(config), client::putDataFrameTransform,
client::putDataFrameTransformAsync);
assertTrue(ack.isAcknowledged());
transformsToClean.add(config.getId());
}
}

View File

@ -28,12 +28,14 @@ import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarEventRequest;
import org.elasticsearch.client.ml.DeleteCalendarJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarRequest;
import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteExpiredDataRequest;
import org.elasticsearch.client.ml.DeleteFilterRequest;
import org.elasticsearch.client.ml.DeleteForecastRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
import org.elasticsearch.client.ml.EvaluateDataFrameRequest;
import org.elasticsearch.client.ml.FindFileStructureRequest;
import org.elasticsearch.client.ml.FindFileStructureRequestTests;
import org.elasticsearch.client.ml.FlushJobRequest;
@ -42,6 +44,8 @@ import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetCalendarEventsRequest;
import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCategoriesRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest;
import org.elasticsearch.client.ml.GetDatafeedRequest;
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
import org.elasticsearch.client.ml.GetFiltersRequest;
@ -58,13 +62,16 @@ import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PutCalendarJobRequest;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.PutDatafeedRequest;
import org.elasticsearch.client.ml.PutFilterRequest;
import org.elasticsearch.client.ml.PutJobRequest;
import org.elasticsearch.client.ml.RevertModelSnapshotRequest;
import org.elasticsearch.client.ml.SetUpgradeModeRequest;
import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.StartDatafeedRequest;
import org.elasticsearch.client.ml.StartDatafeedRequestTests;
import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.StopDatafeedRequest;
import org.elasticsearch.client.ml.UpdateFilterRequest;
import org.elasticsearch.client.ml.UpdateJobRequest;
@ -75,6 +82,12 @@ import org.elasticsearch.client.ml.calendars.ScheduledEvent;
import org.elasticsearch.client.ml.calendars.ScheduledEventTests;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.client.ml.dataframe.MlDataFrameAnalysisNamedXContentProvider;
import org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.BinarySoftClassification;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric;
import org.elasticsearch.client.ml.filestructurefinder.FileStructure;
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
import org.elasticsearch.client.ml.job.config.Detector;
@ -84,23 +97,30 @@ import org.elasticsearch.client.ml.job.config.JobUpdateTests;
import org.elasticsearch.client.ml.job.config.MlFilter;
import org.elasticsearch.client.ml.job.config.MlFilterTests;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfigTests.randomDataFrameAnalyticsConfig;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.core.IsNull.nullValue;
@ -154,7 +174,6 @@ public class MLRequestConvertersTests extends ESTestCase {
assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_jobs"));
}
public void testOpenJob() throws Exception {
String jobId = "some-job-id";
OpenJobRequest openJobRequest = new OpenJobRequest(jobId);
@ -669,6 +688,109 @@ public class MLRequestConvertersTests extends ESTestCase {
assertEquals("/_ml/calendars/" + calendarId + "/events/" + eventId, request.getEndpoint());
}
public void testPutDataFrameAnalytics() throws IOException {
PutDataFrameAnalyticsRequest putRequest = new PutDataFrameAnalyticsRequest(randomDataFrameAnalyticsConfig());
Request request = MLRequestConverters.putDataFrameAnalytics(putRequest);
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
assertEquals("/_ml/data_frame/analytics/" + putRequest.getConfig().getId(), request.getEndpoint());
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
DataFrameAnalyticsConfig parsedConfig = DataFrameAnalyticsConfig.fromXContent(parser);
assertThat(parsedConfig, equalTo(putRequest.getConfig()));
}
}
public void testGetDataFrameAnalytics() {
String configId1 = randomAlphaOfLength(10);
String configId2 = randomAlphaOfLength(10);
String configId3 = randomAlphaOfLength(10);
GetDataFrameAnalyticsRequest getRequest = new GetDataFrameAnalyticsRequest(configId1, configId2, configId3)
.setAllowNoMatch(false)
.setPageParams(new PageParams(100, 300));
Request request = MLRequestConverters.getDataFrameAnalytics(getRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertEquals("/_ml/data_frame/analytics/" + configId1 + "," + configId2 + "," + configId3, request.getEndpoint());
assertThat(request.getParameters(), allOf(hasEntry("from", "100"), hasEntry("size", "300"), hasEntry("allow_no_match", "false")));
assertNull(request.getEntity());
}
public void testGetDataFrameAnalyticsStats() {
String configId1 = randomAlphaOfLength(10);
String configId2 = randomAlphaOfLength(10);
String configId3 = randomAlphaOfLength(10);
GetDataFrameAnalyticsStatsRequest getStatsRequest = new GetDataFrameAnalyticsStatsRequest(configId1, configId2, configId3)
.setAllowNoMatch(false)
.setPageParams(new PageParams(100, 300));
Request request = MLRequestConverters.getDataFrameAnalyticsStats(getStatsRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertEquals("/_ml/data_frame/analytics/" + configId1 + "," + configId2 + "," + configId3 + "/_stats", request.getEndpoint());
assertThat(request.getParameters(), allOf(hasEntry("from", "100"), hasEntry("size", "300"), hasEntry("allow_no_match", "false")));
assertNull(request.getEntity());
}
public void testStartDataFrameAnalytics() {
StartDataFrameAnalyticsRequest startRequest = new StartDataFrameAnalyticsRequest(randomAlphaOfLength(10));
Request request = MLRequestConverters.startDataFrameAnalytics(startRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals("/_ml/data_frame/analytics/" + startRequest.getId() + "/_start", request.getEndpoint());
assertNull(request.getEntity());
}
public void testStartDataFrameAnalytics_WithTimeout() {
StartDataFrameAnalyticsRequest startRequest = new StartDataFrameAnalyticsRequest(randomAlphaOfLength(10))
.setTimeout(TimeValue.timeValueMinutes(1));
Request request = MLRequestConverters.startDataFrameAnalytics(startRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals("/_ml/data_frame/analytics/" + startRequest.getId() + "/_start", request.getEndpoint());
assertThat(request.getParameters(), hasEntry("timeout", "1m"));
assertNull(request.getEntity());
}
public void testStopDataFrameAnalytics() {
StopDataFrameAnalyticsRequest stopRequest = new StopDataFrameAnalyticsRequest(randomAlphaOfLength(10));
Request request = MLRequestConverters.stopDataFrameAnalytics(stopRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals("/_ml/data_frame/analytics/" + stopRequest.getId() + "/_stop", request.getEndpoint());
assertNull(request.getEntity());
}
public void testStopDataFrameAnalytics_WithParams() {
StopDataFrameAnalyticsRequest stopRequest = new StopDataFrameAnalyticsRequest(randomAlphaOfLength(10))
.setTimeout(TimeValue.timeValueMinutes(1))
.setAllowNoMatch(false);
Request request = MLRequestConverters.stopDataFrameAnalytics(stopRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals("/_ml/data_frame/analytics/" + stopRequest.getId() + "/_stop", request.getEndpoint());
assertThat(request.getParameters(), allOf(hasEntry("timeout", "1m"), hasEntry("allow_no_match", "false")));
assertNull(request.getEntity());
}
public void testDeleteDataFrameAnalytics() {
DeleteDataFrameAnalyticsRequest deleteRequest = new DeleteDataFrameAnalyticsRequest(randomAlphaOfLength(10));
Request request = MLRequestConverters.deleteDataFrameAnalytics(deleteRequest);
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
assertEquals("/_ml/data_frame/analytics/" + deleteRequest.getId(), request.getEndpoint());
assertNull(request.getEntity());
}
public void testEvaluateDataFrame() throws IOException {
EvaluateDataFrameRequest evaluateRequest =
new EvaluateDataFrameRequest(
Arrays.asList(generateRandomStringArray(1, 10, false, false)),
new BinarySoftClassification(
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10),
PrecisionMetric.at(0.5), RecallMetric.at(0.6, 0.7)));
Request request = MLRequestConverters.evaluateDataFrame(evaluateRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals("/_ml/data_frame/_evaluate", request.getEndpoint());
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
EvaluateDataFrameRequest parsedRequest = EvaluateDataFrameRequest.fromXContent(parser);
assertThat(parsedRequest, equalTo(evaluateRequest));
}
}
public void testPutFilter() throws IOException {
MlFilter filter = MlFilterTests.createRandomBuilder("foo").build();
PutFilterRequest putFilterRequest = new PutFilterRequest(filter);
@ -835,6 +957,15 @@ public class MLRequestConvertersTests extends ESTestCase {
assertThat(request.getParameters().get(SetUpgradeModeRequest.TIMEOUT.getPreferredName()), is("1h"));
}
@Override
protected NamedXContentRegistry xContentRegistry() {
List<NamedXContentRegistry.Entry> namedXContent = new ArrayList<>();
namedXContent.addAll(new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedXContents());
namedXContent.addAll(new MlDataFrameAnalysisNamedXContentProvider().getNamedXContentParsers());
namedXContent.addAll(new MlEvaluationNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContent);
}
private static Job createValidJob(String jobId) {
AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList(
Detector.builder().setFunction("count").build()));

View File

@ -29,11 +29,13 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.GetIndexRequest;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.CloseJobResponse;
import org.elasticsearch.client.ml.DeleteCalendarEventRequest;
import org.elasticsearch.client.ml.DeleteCalendarJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarRequest;
import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteExpiredDataRequest;
import org.elasticsearch.client.ml.DeleteExpiredDataResponse;
@ -42,6 +44,8 @@ import org.elasticsearch.client.ml.DeleteForecastRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteJobResponse;
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
import org.elasticsearch.client.ml.EvaluateDataFrameRequest;
import org.elasticsearch.client.ml.EvaluateDataFrameResponse;
import org.elasticsearch.client.ml.FindFileStructureRequest;
import org.elasticsearch.client.ml.FindFileStructureResponse;
import org.elasticsearch.client.ml.FlushJobRequest;
@ -52,6 +56,10 @@ import org.elasticsearch.client.ml.GetCalendarEventsRequest;
import org.elasticsearch.client.ml.GetCalendarEventsResponse;
import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCalendarsResponse;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsResponse;
import org.elasticsearch.client.ml.GetDatafeedRequest;
import org.elasticsearch.client.ml.GetDatafeedResponse;
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
@ -77,6 +85,8 @@ import org.elasticsearch.client.ml.PreviewDatafeedResponse;
import org.elasticsearch.client.ml.PutCalendarJobRequest;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutCalendarResponse;
import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.PutDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.PutDatafeedRequest;
import org.elasticsearch.client.ml.PutDatafeedResponse;
import org.elasticsearch.client.ml.PutFilterRequest;
@ -86,8 +96,11 @@ import org.elasticsearch.client.ml.PutJobResponse;
import org.elasticsearch.client.ml.RevertModelSnapshotRequest;
import org.elasticsearch.client.ml.RevertModelSnapshotResponse;
import org.elasticsearch.client.ml.SetUpgradeModeRequest;
import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.StartDatafeedRequest;
import org.elasticsearch.client.ml.StartDatafeedResponse;
import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.StopDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.StopDatafeedRequest;
import org.elasticsearch.client.ml.StopDatafeedResponse;
import org.elasticsearch.client.ml.UpdateDatafeedRequest;
@ -103,6 +116,18 @@ import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.client.ml.datafeed.DatafeedState;
import org.elasticsearch.client.ml.datafeed.DatafeedStats;
import org.elasticsearch.client.ml.datafeed.DatafeedUpdate;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsDest;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsSource;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsState;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsStats;
import org.elasticsearch.client.ml.dataframe.OutlierDetection;
import org.elasticsearch.client.ml.dataframe.QueryConfig;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.AucRocMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.BinarySoftClassification;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric;
import org.elasticsearch.client.ml.filestructurefinder.FileStructure;
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
import org.elasticsearch.client.ml.job.config.DataDescription;
@ -113,9 +138,12 @@ import org.elasticsearch.client.ml.job.config.JobUpdate;
import org.elasticsearch.client.ml.job.config.MlFilter;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.client.ml.job.stats.JobStats;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit;
import org.junit.After;
@ -136,6 +164,7 @@ import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.hasItems;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@ -528,18 +557,7 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
String indexName = "start_data_1";
// Set up the index and docs
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.endObject()
.startObject("total")
.field("type", "long")
.endObject()
.endObject()
.endObject());
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
createIndex(indexName, defaultMappingForTest());
BulkRequest bulk = new BulkRequest();
bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
long now = (System.currentTimeMillis()/1000)*1000;
@ -611,18 +629,7 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
String indexName = "stop_data_1";
// Set up the index
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.endObject()
.startObject("total")
.field("type", "long")
.endObject()
.endObject()
.endObject());
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
createIndex(indexName, defaultMappingForTest());
// create the job and the datafeed
Job job1 = buildJob(jobId1);
@ -684,18 +691,7 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
String indexName = "datafeed_stats_data_1";
// Set up the index
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.endObject()
.startObject("total")
.field("type", "long")
.endObject()
.endObject()
.endObject());
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
createIndex(indexName, defaultMappingForTest());
// create the job and the datafeed
Job job1 = buildJob(jobId1);
@ -762,18 +758,7 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
String indexName = "preview_data_1";
// Set up the index and docs
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.endObject()
.startObject("total")
.field("type", "long")
.endObject()
.endObject()
.endObject());
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
createIndex(indexName, defaultMappingForTest());
BulkRequest bulk = new BulkRequest();
bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
long now = (System.currentTimeMillis()/1000)*1000;
@ -826,21 +811,9 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
}
private String createExpiredData(String jobId) throws Exception {
String indexId = jobId + "-data";
String indexName = jobId + "-data";
// Set up the index and docs
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexId);
createIndexRequest.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.field("format", "epoch_millis")
.endObject()
.startObject("total")
.field("type", "long")
.endObject()
.endObject()
.endObject());
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
createIndex(indexName, defaultMappingForTest());
BulkRequest bulk = new BulkRequest();
bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
@ -853,7 +826,7 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
long timestamp = nowMillis - TimeValue.timeValueHours(totalBuckets - bucket).getMillis();
int bucketRate = bucket == anomalousBucket ? anomalousRate : normalRate;
for (int point = 0; point < bucketRate; point++) {
IndexRequest indexRequest = new IndexRequest(indexId);
IndexRequest indexRequest = new IndexRequest(indexName);
indexRequest.source(XContentType.JSON, "timestamp", timestamp, "total", randomInt(1000));
bulk.add(indexRequest);
}
@ -872,7 +845,7 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
Job job = buildJobForExpiredDataTests(jobId);
putJob(job);
openJob(job);
String datafeedId = createAndPutDatafeed(jobId, indexId);
String datafeedId = createAndPutDatafeed(jobId, indexName);
startDatafeed(datafeedId, String.valueOf(0), String.valueOf(nowMillis - TimeValue.timeValueHours(24).getMillis()));
@ -1230,6 +1203,418 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
assertThat(remainingIds, not(hasItem(deletedEvent)));
}
public void testPutDataFrameAnalyticsConfig() throws Exception {
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
String configId = "put-test-config";
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder(configId)
.setSource(DataFrameAnalyticsSource.builder()
.setIndex("put-test-source-index")
.build())
.setDest(DataFrameAnalyticsDest.builder()
.setIndex("put-test-dest-index")
.build())
.setAnalysis(OutlierDetection.createDefault())
.build();
createIndex("put-test-source-index", defaultMappingForTest());
PutDataFrameAnalyticsResponse putDataFrameAnalyticsResponse = execute(
new PutDataFrameAnalyticsRequest(config),
machineLearningClient::putDataFrameAnalytics, machineLearningClient::putDataFrameAnalyticsAsync);
DataFrameAnalyticsConfig createdConfig = putDataFrameAnalyticsResponse.getConfig();
assertThat(createdConfig.getId(), equalTo(config.getId()));
assertThat(createdConfig.getSource().getIndex(), equalTo(config.getSource().getIndex()));
assertThat(createdConfig.getSource().getQueryConfig(), equalTo(new QueryConfig(new MatchAllQueryBuilder()))); // default value
assertThat(createdConfig.getDest().getIndex(), equalTo(config.getDest().getIndex()));
assertThat(createdConfig.getDest().getResultsField(), equalTo("ml")); // default value
assertThat(createdConfig.getAnalysis(), equalTo(config.getAnalysis()));
assertThat(createdConfig.getAnalyzedFields(), equalTo(config.getAnalyzedFields()));
assertThat(createdConfig.getModelMemoryLimit(), equalTo(ByteSizeValue.parseBytesSizeValue("1gb", ""))); // default value
}
public void testGetDataFrameAnalyticsConfig_SingleConfig() throws Exception {
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
String configId = "get-test-config";
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder(configId)
.setSource(DataFrameAnalyticsSource.builder()
.setIndex("get-test-source-index")
.build())
.setDest(DataFrameAnalyticsDest.builder()
.setIndex("get-test-dest-index")
.build())
.setAnalysis(OutlierDetection.createDefault())
.build();
createIndex("get-test-source-index", defaultMappingForTest());
PutDataFrameAnalyticsResponse putDataFrameAnalyticsResponse = execute(
new PutDataFrameAnalyticsRequest(config),
machineLearningClient::putDataFrameAnalytics, machineLearningClient::putDataFrameAnalyticsAsync);
DataFrameAnalyticsConfig createdConfig = putDataFrameAnalyticsResponse.getConfig();
GetDataFrameAnalyticsResponse getDataFrameAnalyticsResponse = execute(
new GetDataFrameAnalyticsRequest(configId),
machineLearningClient::getDataFrameAnalytics, machineLearningClient::getDataFrameAnalyticsAsync);
assertThat(getDataFrameAnalyticsResponse.getAnalytics(), hasSize(1));
assertThat(getDataFrameAnalyticsResponse.getAnalytics(), contains(createdConfig));
}
public void testGetDataFrameAnalyticsConfig_MultipleConfigs() throws Exception {
createIndex("get-test-source-index", defaultMappingForTest());
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
String configIdPrefix = "get-test-config-";
int numberOfConfigs = 10;
List<DataFrameAnalyticsConfig> createdConfigs = new ArrayList<>();
for (int i = 0; i < numberOfConfigs; ++i) {
String configId = configIdPrefix + i;
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder(configId)
.setSource(DataFrameAnalyticsSource.builder()
.setIndex("get-test-source-index")
.build())
.setDest(DataFrameAnalyticsDest.builder()
.setIndex("get-test-dest-index")
.build())
.setAnalysis(OutlierDetection.createDefault())
.build();
PutDataFrameAnalyticsResponse putDataFrameAnalyticsResponse = execute(
new PutDataFrameAnalyticsRequest(config),
machineLearningClient::putDataFrameAnalytics, machineLearningClient::putDataFrameAnalyticsAsync);
DataFrameAnalyticsConfig createdConfig = putDataFrameAnalyticsResponse.getConfig();
createdConfigs.add(createdConfig);
}
{
GetDataFrameAnalyticsResponse getDataFrameAnalyticsResponse = execute(
GetDataFrameAnalyticsRequest.getAllDataFrameAnalyticsRequest(),
machineLearningClient::getDataFrameAnalytics, machineLearningClient::getDataFrameAnalyticsAsync);
assertThat(getDataFrameAnalyticsResponse.getAnalytics(), hasSize(numberOfConfigs));
assertThat(getDataFrameAnalyticsResponse.getAnalytics(), containsInAnyOrder(createdConfigs.toArray()));
}
{
GetDataFrameAnalyticsResponse getDataFrameAnalyticsResponse = execute(
new GetDataFrameAnalyticsRequest(configIdPrefix + "*"),
machineLearningClient::getDataFrameAnalytics, machineLearningClient::getDataFrameAnalyticsAsync);
assertThat(getDataFrameAnalyticsResponse.getAnalytics(), hasSize(numberOfConfigs));
assertThat(getDataFrameAnalyticsResponse.getAnalytics(), containsInAnyOrder(createdConfigs.toArray()));
}
{
GetDataFrameAnalyticsResponse getDataFrameAnalyticsResponse = execute(
new GetDataFrameAnalyticsRequest(configIdPrefix + "9", configIdPrefix + "1", configIdPrefix + "4"),
machineLearningClient::getDataFrameAnalytics, machineLearningClient::getDataFrameAnalyticsAsync);
assertThat(getDataFrameAnalyticsResponse.getAnalytics(), hasSize(3));
assertThat(
getDataFrameAnalyticsResponse.getAnalytics(),
containsInAnyOrder(createdConfigs.get(1), createdConfigs.get(4), createdConfigs.get(9)));
}
{
GetDataFrameAnalyticsRequest getDataFrameAnalyticsRequest = new GetDataFrameAnalyticsRequest(configIdPrefix + "*");
getDataFrameAnalyticsRequest.setPageParams(new PageParams(3, 4));
GetDataFrameAnalyticsResponse getDataFrameAnalyticsResponse = execute(
getDataFrameAnalyticsRequest,
machineLearningClient::getDataFrameAnalytics, machineLearningClient::getDataFrameAnalyticsAsync);
assertThat(getDataFrameAnalyticsResponse.getAnalytics(), hasSize(4));
assertThat(
getDataFrameAnalyticsResponse.getAnalytics(),
containsInAnyOrder(createdConfigs.get(3), createdConfigs.get(4), createdConfigs.get(5), createdConfigs.get(6)));
}
}
public void testGetDataFrameAnalyticsConfig_ConfigNotFound() {
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
GetDataFrameAnalyticsRequest request = new GetDataFrameAnalyticsRequest("config_that_does_not_exist");
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
() -> execute(request, machineLearningClient::getDataFrameAnalytics, machineLearningClient::getDataFrameAnalyticsAsync));
assertThat(exception.status().getStatus(), equalTo(404));
}
public void testGetDataFrameAnalyticsStats() throws Exception {
String sourceIndex = "get-stats-test-source-index";
String destIndex = "get-stats-test-dest-index";
createIndex(sourceIndex, defaultMappingForTest());
highLevelClient().index(new IndexRequest(sourceIndex).source(XContentType.JSON, "total", 10000), RequestOptions.DEFAULT);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
String configId = "get-stats-test-config";
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder(configId)
.setSource(DataFrameAnalyticsSource.builder()
.setIndex(sourceIndex)
.build())
.setDest(DataFrameAnalyticsDest.builder()
.setIndex(destIndex)
.build())
.setAnalysis(OutlierDetection.createDefault())
.build();
execute(
new PutDataFrameAnalyticsRequest(config),
machineLearningClient::putDataFrameAnalytics, machineLearningClient::putDataFrameAnalyticsAsync);
GetDataFrameAnalyticsStatsResponse statsResponse = execute(
new GetDataFrameAnalyticsStatsRequest(configId),
machineLearningClient::getDataFrameAnalyticsStats, machineLearningClient::getDataFrameAnalyticsStatsAsync);
assertThat(statsResponse.getAnalyticsStats(), hasSize(1));
DataFrameAnalyticsStats stats = statsResponse.getAnalyticsStats().get(0);
assertThat(stats.getId(), equalTo(configId));
assertThat(stats.getState(), equalTo(DataFrameAnalyticsState.STOPPED));
assertNull(stats.getProgressPercent());
assertNull(stats.getNode());
assertNull(stats.getAssignmentExplanation());
assertThat(statsResponse.getNodeFailures(), hasSize(0));
assertThat(statsResponse.getTaskFailures(), hasSize(0));
}
public void testStartDataFrameAnalyticsConfig() throws Exception {
String sourceIndex = "start-test-source-index";
String destIndex = "start-test-dest-index";
createIndex(sourceIndex, defaultMappingForTest());
highLevelClient().index(new IndexRequest(sourceIndex).source(XContentType.JSON, "total", 10000)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT);
// Verify that the destination index does not exist. Otherwise, analytics' reindexing step would fail.
assertFalse(highLevelClient().indices().exists(new GetIndexRequest(destIndex), RequestOptions.DEFAULT));
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
String configId = "start-test-config";
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder(configId)
.setSource(DataFrameAnalyticsSource.builder()
.setIndex(sourceIndex)
.build())
.setDest(DataFrameAnalyticsDest.builder()
.setIndex(destIndex)
.build())
.setAnalysis(OutlierDetection.createDefault())
.build();
execute(
new PutDataFrameAnalyticsRequest(config),
machineLearningClient::putDataFrameAnalytics, machineLearningClient::putDataFrameAnalyticsAsync);
assertThat(getAnalyticsState(configId), equalTo(DataFrameAnalyticsState.STOPPED));
AcknowledgedResponse startDataFrameAnalyticsResponse = execute(
new StartDataFrameAnalyticsRequest(configId),
machineLearningClient::startDataFrameAnalytics, machineLearningClient::startDataFrameAnalyticsAsync);
assertTrue(startDataFrameAnalyticsResponse.isAcknowledged());
// Wait for the analytics to stop.
assertBusy(() -> assertThat(getAnalyticsState(configId), equalTo(DataFrameAnalyticsState.STOPPED)), 30, TimeUnit.SECONDS);
// Verify that the destination index got created.
assertTrue(highLevelClient().indices().exists(new GetIndexRequest(destIndex), RequestOptions.DEFAULT));
}
public void testStopDataFrameAnalyticsConfig() throws Exception {
String sourceIndex = "stop-test-source-index";
String destIndex = "stop-test-dest-index";
createIndex(sourceIndex, mappingForClassification());
highLevelClient().index(new IndexRequest(sourceIndex).source(XContentType.JSON, "total", 10000)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT);
// Verify that the destination index does not exist. Otherwise, analytics' reindexing step would fail.
assertFalse(highLevelClient().indices().exists(new GetIndexRequest(destIndex), RequestOptions.DEFAULT));
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
String configId = "stop-test-config";
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder(configId)
.setSource(DataFrameAnalyticsSource.builder()
.setIndex(sourceIndex)
.build())
.setDest(DataFrameAnalyticsDest.builder()
.setIndex(destIndex)
.build())
.setAnalysis(OutlierDetection.createDefault())
.build();
execute(
new PutDataFrameAnalyticsRequest(config),
machineLearningClient::putDataFrameAnalytics, machineLearningClient::putDataFrameAnalyticsAsync);
assertThat(getAnalyticsState(configId), equalTo(DataFrameAnalyticsState.STOPPED));
AcknowledgedResponse startDataFrameAnalyticsResponse = execute(
new StartDataFrameAnalyticsRequest(configId),
machineLearningClient::startDataFrameAnalytics, machineLearningClient::startDataFrameAnalyticsAsync);
assertTrue(startDataFrameAnalyticsResponse.isAcknowledged());
assertThat(getAnalyticsState(configId), equalTo(DataFrameAnalyticsState.STARTED));
StopDataFrameAnalyticsResponse stopDataFrameAnalyticsResponse = execute(
new StopDataFrameAnalyticsRequest(configId),
machineLearningClient::stopDataFrameAnalytics, machineLearningClient::stopDataFrameAnalyticsAsync);
assertTrue(stopDataFrameAnalyticsResponse.isStopped());
assertThat(getAnalyticsState(configId), equalTo(DataFrameAnalyticsState.STOPPED));
}
private DataFrameAnalyticsState getAnalyticsState(String configId) throws IOException {
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
GetDataFrameAnalyticsStatsResponse statsResponse =
machineLearningClient.getDataFrameAnalyticsStats(new GetDataFrameAnalyticsStatsRequest(configId), RequestOptions.DEFAULT);
assertThat(statsResponse.getAnalyticsStats(), hasSize(1));
DataFrameAnalyticsStats stats = statsResponse.getAnalyticsStats().get(0);
return stats.getState();
}
public void testDeleteDataFrameAnalyticsConfig() throws Exception {
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
String configId = "delete-test-config";
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder(configId)
.setSource(DataFrameAnalyticsSource.builder()
.setIndex("delete-test-source-index")
.build())
.setDest(DataFrameAnalyticsDest.builder()
.setIndex("delete-test-dest-index")
.build())
.setAnalysis(OutlierDetection.createDefault())
.build();
createIndex("delete-test-source-index", defaultMappingForTest());
GetDataFrameAnalyticsResponse getDataFrameAnalyticsResponse = execute(
new GetDataFrameAnalyticsRequest(configId + "*"),
machineLearningClient::getDataFrameAnalytics, machineLearningClient::getDataFrameAnalyticsAsync);
assertThat(getDataFrameAnalyticsResponse.getAnalytics(), hasSize(0));
execute(
new PutDataFrameAnalyticsRequest(config),
machineLearningClient::putDataFrameAnalytics, machineLearningClient::putDataFrameAnalyticsAsync);
getDataFrameAnalyticsResponse = execute(
new GetDataFrameAnalyticsRequest(configId + "*"),
machineLearningClient::getDataFrameAnalytics, machineLearningClient::getDataFrameAnalyticsAsync);
assertThat(getDataFrameAnalyticsResponse.getAnalytics(), hasSize(1));
AcknowledgedResponse deleteDataFrameAnalyticsResponse = execute(
new DeleteDataFrameAnalyticsRequest(configId),
machineLearningClient::deleteDataFrameAnalytics, machineLearningClient::deleteDataFrameAnalyticsAsync);
assertTrue(deleteDataFrameAnalyticsResponse.isAcknowledged());
getDataFrameAnalyticsResponse = execute(
new GetDataFrameAnalyticsRequest(configId + "*"),
machineLearningClient::getDataFrameAnalytics, machineLearningClient::getDataFrameAnalyticsAsync);
assertThat(getDataFrameAnalyticsResponse.getAnalytics(), hasSize(0));
}
public void testDeleteDataFrameAnalyticsConfig_ConfigNotFound() {
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
DeleteDataFrameAnalyticsRequest request = new DeleteDataFrameAnalyticsRequest("config_that_does_not_exist");
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
() -> execute(
request, machineLearningClient::deleteDataFrameAnalytics, machineLearningClient::deleteDataFrameAnalyticsAsync));
assertThat(exception.status().getStatus(), equalTo(404));
}
public void testEvaluateDataFrame() throws IOException {
String indexName = "evaluate-test-index";
createIndex(indexName, mappingForClassification());
BulkRequest bulk = new BulkRequest()
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(docForClassification(indexName, false, 0.1)) // #0
.add(docForClassification(indexName, false, 0.2)) // #1
.add(docForClassification(indexName, false, 0.3)) // #2
.add(docForClassification(indexName, false, 0.4)) // #3
.add(docForClassification(indexName, false, 0.7)) // #4
.add(docForClassification(indexName, true, 0.2)) // #5
.add(docForClassification(indexName, true, 0.3)) // #6
.add(docForClassification(indexName, true, 0.4)) // #7
.add(docForClassification(indexName, true, 0.8)) // #8
.add(docForClassification(indexName, true, 0.9)); // #9
highLevelClient().bulk(bulk, RequestOptions.DEFAULT);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
EvaluateDataFrameRequest evaluateDataFrameRequest =
new EvaluateDataFrameRequest(
indexName,
new BinarySoftClassification(
actualField,
probabilityField,
PrecisionMetric.at(0.4, 0.5, 0.6), RecallMetric.at(0.5, 0.7), ConfusionMatrixMetric.at(0.5), AucRocMetric.withCurve()));
EvaluateDataFrameResponse evaluateDataFrameResponse =
execute(evaluateDataFrameRequest, machineLearningClient::evaluateDataFrame, machineLearningClient::evaluateDataFrameAsync);
assertThat(evaluateDataFrameResponse.getEvaluationName(), equalTo(BinarySoftClassification.NAME));
assertThat(evaluateDataFrameResponse.getMetrics().size(), equalTo(4));
PrecisionMetric.Result precisionResult = evaluateDataFrameResponse.getMetricByName(PrecisionMetric.NAME);
assertThat(precisionResult.getMetricName(), equalTo(PrecisionMetric.NAME));
// Precision is 3/5=0.6 as there were 3 true examples (#7, #8, #9) among the 5 positive examples (#3, #4, #7, #8, #9)
assertThat(precisionResult.getScoreByThreshold("0.4"), closeTo(0.6, 1e-9));
// Precision is 2/3=0.(6) as there were 2 true examples (#8, #9) among the 3 positive examples (#4, #8, #9)
assertThat(precisionResult.getScoreByThreshold("0.5"), closeTo(0.666666666, 1e-9));
// Precision is 2/3=0.(6) as there were 2 true examples (#8, #9) among the 3 positive examples (#4, #8, #9)
assertThat(precisionResult.getScoreByThreshold("0.6"), closeTo(0.666666666, 1e-9));
assertNull(precisionResult.getScoreByThreshold("0.1"));
RecallMetric.Result recallResult = evaluateDataFrameResponse.getMetricByName(RecallMetric.NAME);
assertThat(recallResult.getMetricName(), equalTo(RecallMetric.NAME));
// Recall is 2/5=0.4 as there were 2 true positive examples (#8, #9) among the 5 true examples (#5, #6, #7, #8, #9)
assertThat(recallResult.getScoreByThreshold("0.5"), closeTo(0.4, 1e-9));
// Recall is 2/5=0.4 as there were 2 true positive examples (#8, #9) among the 5 true examples (#5, #6, #7, #8, #9)
assertThat(recallResult.getScoreByThreshold("0.7"), closeTo(0.4, 1e-9));
assertNull(recallResult.getScoreByThreshold("0.1"));
ConfusionMatrixMetric.Result confusionMatrixResult = evaluateDataFrameResponse.getMetricByName(ConfusionMatrixMetric.NAME);
assertThat(confusionMatrixResult.getMetricName(), equalTo(ConfusionMatrixMetric.NAME));
ConfusionMatrixMetric.ConfusionMatrix confusionMatrix = confusionMatrixResult.getScoreByThreshold("0.5");
assertThat(confusionMatrix.getTruePositives(), equalTo(2L)); // docs #8 and #9
assertThat(confusionMatrix.getFalsePositives(), equalTo(1L)); // doc #4
assertThat(confusionMatrix.getTrueNegatives(), equalTo(4L)); // docs #0, #1, #2 and #3
assertThat(confusionMatrix.getFalseNegatives(), equalTo(3L)); // docs #5, #6 and #7
assertNull(confusionMatrixResult.getScoreByThreshold("0.1"));
AucRocMetric.Result aucRocResult = evaluateDataFrameResponse.getMetricByName(AucRocMetric.NAME);
assertThat(aucRocResult.getMetricName(), equalTo(AucRocMetric.NAME));
assertThat(aucRocResult.getScore(), closeTo(0.70025, 1e-9));
assertNotNull(aucRocResult.getCurve());
List<AucRocMetric.AucRocPoint> curve = aucRocResult.getCurve();
AucRocMetric.AucRocPoint curvePointAtThreshold0 = curve.stream().filter(p -> p.getThreshold() == 0.0).findFirst().get();
assertThat(curvePointAtThreshold0.getTruePositiveRate(), equalTo(1.0));
assertThat(curvePointAtThreshold0.getFalsePositiveRate(), equalTo(1.0));
assertThat(curvePointAtThreshold0.getThreshold(), equalTo(0.0));
AucRocMetric.AucRocPoint curvePointAtThreshold1 = curve.stream().filter(p -> p.getThreshold() == 1.0).findFirst().get();
assertThat(curvePointAtThreshold1.getTruePositiveRate(), equalTo(0.0));
assertThat(curvePointAtThreshold1.getFalsePositiveRate(), equalTo(0.0));
assertThat(curvePointAtThreshold1.getThreshold(), equalTo(1.0));
}
private static XContentBuilder defaultMappingForTest() throws IOException {
return XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.endObject()
.startObject("total")
.field("type", "long")
.endObject()
.endObject()
.endObject();
}
private static final String actualField = "label";
private static final String probabilityField = "p";
private static XContentBuilder mappingForClassification() throws IOException {
return XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject(actualField)
.field("type", "keyword")
.endObject()
.startObject(probabilityField)
.field("type", "double")
.endObject()
.endObject()
.endObject();
}
private static IndexRequest docForClassification(String indexName, boolean isTrue, double p) {
return new IndexRequest()
.index(indexName)
.source(XContentType.JSON, actualField, Boolean.toString(isTrue), probabilityField, p);
}
private void createIndex(String indexName, XContentBuilder mapping) throws IOException {
highLevelClient().indices().create(new CreateIndexRequest(indexName).mapping(mapping), RequestOptions.DEFAULT);
}
public void testPutFilter() throws Exception {
String filterId = "filter-job-test";
MlFilter mlFilter = MlFilter.builder(filterId)

View File

@ -20,14 +20,18 @@ package org.elasticsearch.client;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.GetDatafeedRequest;
import org.elasticsearch.client.ml.GetDatafeedResponse;
import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobResponse;
import org.elasticsearch.client.ml.StopDatafeedRequest;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.client.ml.job.config.Job;
import java.io.IOException;
@ -48,6 +52,7 @@ public class MlTestStateCleaner {
public void clearMlMetadata() throws IOException {
deleteAllDatafeeds();
deleteAllJobs();
deleteAllDataFrameAnalytics();
}
private void deleteAllDatafeeds() throws IOException {
@ -99,4 +104,12 @@ public class MlTestStateCleaner {
throw new RuntimeException("Had to resort to force-closing jobs, something went wrong?", e1);
}
}
private void deleteAllDataFrameAnalytics() throws IOException {
GetDataFrameAnalyticsResponse getDataFrameAnalyticsResponse =
mlClient.getDataFrameAnalytics(GetDataFrameAnalyticsRequest.getAllDataFrameAnalyticsRequest(), RequestOptions.DEFAULT);
for (DataFrameAnalyticsConfig config : getDataFrameAnalyticsResponse.getAnalytics()) {
mlClient.deleteDataFrameAnalytics(new DeleteDataFrameAnalyticsRequest(config.getId()), RequestOptions.DEFAULT);
}
}
}

View File

@ -46,6 +46,8 @@ import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.client.core.MainRequest;
import org.elasticsearch.client.core.MainResponse;
import org.elasticsearch.client.dataframe.transforms.SyncConfig;
import org.elasticsearch.client.dataframe.transforms.TimeSyncConfig;
import org.elasticsearch.client.indexlifecycle.AllocateAction;
import org.elasticsearch.client.indexlifecycle.DeleteAction;
import org.elasticsearch.client.indexlifecycle.ForceMergeAction;
@ -56,6 +58,13 @@ import org.elasticsearch.client.indexlifecycle.RolloverAction;
import org.elasticsearch.client.indexlifecycle.SetPriorityAction;
import org.elasticsearch.client.indexlifecycle.ShrinkAction;
import org.elasticsearch.client.indexlifecycle.UnfollowAction;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalysis;
import org.elasticsearch.client.ml.dataframe.OutlierDetection;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.AucRocMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.BinarySoftClassification;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
@ -109,6 +118,7 @@ import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.hamcrest.CoreMatchers.endsWith;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.Matchers.hasItems;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
@ -664,7 +674,7 @@ public class RestHighLevelClientTests extends ESTestCase {
public void testProvidedNamedXContents() {
List<NamedXContentRegistry.Entry> namedXContents = RestHighLevelClient.getProvidedNamedXContents();
assertEquals(20, namedXContents.size());
assertEquals(31, namedXContents.size());
Map<Class<?>, Integer> categories = new HashMap<>();
List<String> names = new ArrayList<>();
for (NamedXContentRegistry.Entry namedXContent : namedXContents) {
@ -674,7 +684,7 @@ public class RestHighLevelClientTests extends ESTestCase {
categories.put(namedXContent.categoryClass, counter + 1);
}
}
assertEquals("Had: " + categories, 4, categories.size());
assertEquals("Had: " + categories, 9, categories.size());
assertEquals(Integer.valueOf(3), categories.get(Aggregation.class));
assertTrue(names.contains(ChildrenAggregationBuilder.NAME));
assertTrue(names.contains(MatrixStatsAggregationBuilder.NAME));
@ -698,6 +708,16 @@ public class RestHighLevelClientTests extends ESTestCase {
assertTrue(names.contains(ShrinkAction.NAME));
assertTrue(names.contains(FreezeAction.NAME));
assertTrue(names.contains(SetPriorityAction.NAME));
assertEquals(Integer.valueOf(1), categories.get(DataFrameAnalysis.class));
assertTrue(names.contains(OutlierDetection.NAME.getPreferredName()));
assertEquals(Integer.valueOf(1), categories.get(SyncConfig.class));
assertTrue(names.contains(TimeSyncConfig.NAME));
assertEquals(Integer.valueOf(1), categories.get(org.elasticsearch.client.ml.dataframe.evaluation.Evaluation.class));
assertThat(names, hasItems(BinarySoftClassification.NAME));
assertEquals(Integer.valueOf(4), categories.get(org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric.class));
assertThat(names, hasItems(AucRocMetric.NAME, PrecisionMetric.NAME, RecallMetric.NAME, ConfusionMatrixMetric.NAME));
assertEquals(Integer.valueOf(4), categories.get(org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric.Result.class));
assertThat(names, hasItems(AucRocMetric.NAME, PrecisionMetric.NAME, RecallMetric.NAME, ConfusionMatrixMetric.NAME));
}
public void testApiNamingConventions() throws Exception {

View File

@ -78,7 +78,8 @@ public class SnapshotRequestConvertersTests extends ESTestCase {
Path repositoryLocation = PathUtils.get(".");
PutRepositoryRequest putRepositoryRequest = new PutRepositoryRequest(repository);
putRepositoryRequest.type(FsRepository.TYPE);
putRepositoryRequest.verify(randomBoolean());
final boolean verify = randomBoolean();
putRepositoryRequest.verify(verify);
putRepositoryRequest.settings(
Settings.builder()
@ -90,6 +91,11 @@ public class SnapshotRequestConvertersTests extends ESTestCase {
Request request = SnapshotRequestConverters.createRepository(putRepositoryRequest);
assertThat(request.getEndpoint(), equalTo(endpoint));
assertThat(request.getMethod(), equalTo(HttpPut.METHOD_NAME));
if (verify) {
assertThat(request.getParameters().get("verify"), nullValue());
} else {
assertThat(request.getParameters().get("verify"), equalTo("false"));
}
RequestConvertersTests.assertToXContentBody(putRepositoryRequest, request.getEntity());
}

View File

@ -0,0 +1,56 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.elasticsearch.test.ESTestCase;
import java.util.Arrays;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.hasSize;
public class ValidationExceptionTests extends ESTestCase {
private static final String ERROR = "some-error";
private static final String OTHER_ERROR = "some-other-error";
public void testWithError() {
ValidationException e = ValidationException.withError(ERROR, OTHER_ERROR);
assertThat(e.validationErrors(), hasSize(2));
assertThat(e.validationErrors(), contains(ERROR, OTHER_ERROR));
}
public void testWithErrors() {
ValidationException e = ValidationException.withErrors(Arrays.asList(ERROR, OTHER_ERROR));
assertThat(e.validationErrors(), hasSize(2));
assertThat(e.validationErrors(), contains(ERROR, OTHER_ERROR));
}
public void testAddValidationError() {
ValidationException e = new ValidationException();
assertThat(e.validationErrors(), hasSize(0));
e.addValidationError(ERROR);
assertThat(e.validationErrors(), hasSize(1));
assertThat(e.validationErrors(), contains(ERROR));
e.addValidationError(OTHER_ERROR);
assertThat(e.validationErrors(), hasSize(2));
assertThat(e.validationErrors(), contains(ERROR, OTHER_ERROR));
}
}

View File

@ -35,7 +35,6 @@ import java.util.List;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
public class GetDataFrameTransformResponseTests extends ESTestCase {
public void testXContentParser() throws IOException {
@ -79,6 +78,9 @@ public class GetDataFrameTransformResponseTests extends ESTestCase {
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContents);
}
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import static org.elasticsearch.client.dataframe.transforms.SourceConfigTests.randomSourceConfig;
@ -55,7 +56,10 @@ public class PreviewDataFrameTransformRequestTests extends AbstractXContentTestC
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContents);
}
public void testValidate() {

View File

@ -31,6 +31,7 @@ import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import static org.hamcrest.Matchers.containsString;
@ -71,6 +72,9 @@ public class PutDataFrameTransformRequestTests extends AbstractXContentTestCase<
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContents);
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.client.dataframe.transforms;
import org.elasticsearch.client.dataframe.DataFrameNamedXContentProvider;
import org.elasticsearch.Version;
import org.elasticsearch.client.dataframe.transforms.pivot.PivotConfigTests;
import org.elasticsearch.common.settings.Settings;
@ -30,6 +31,7 @@ import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.time.Instant;
import java.util.Collections;
import java.util.List;
import java.util.function.Predicate;
import static org.elasticsearch.client.dataframe.transforms.DestConfigTests.randomDestConfig;
@ -41,12 +43,17 @@ public class DataFrameTransformConfigTests extends AbstractXContentTestCase<Data
return new DataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10),
randomSourceConfig(),
randomDestConfig(),
randomBoolean() ? null : randomSyncConfig(),
PivotConfigTests.randomPivotConfig(),
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 100),
randomBoolean() ? null : Instant.now(),
randomBoolean() ? null : Version.CURRENT.toString());
}
public static SyncConfig randomSyncConfig() {
return TimeSyncConfigTests.randomTimeSyncConfig();
}
@Override
protected DataFrameTransformConfig createTestInstance() {
return randomDataFrameTransformConfig();
@ -71,6 +78,9 @@ public class DataFrameTransformConfigTests extends AbstractXContentTestCase<Data
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContents);
}
}

View File

@ -0,0 +1,49 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.dataframe.transforms;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class TimeSyncConfigTests extends AbstractXContentTestCase<TimeSyncConfig> {
public static TimeSyncConfig randomTimeSyncConfig() {
return new TimeSyncConfig(randomAlphaOfLengthBetween(1, 10), new TimeValue(randomNonNegativeLong()));
}
@Override
protected TimeSyncConfig createTestInstance() {
return randomTimeSyncConfig();
}
@Override
protected TimeSyncConfig doParseInstance(XContentParser parser) throws IOException {
return TimeSyncConfig.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,59 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.dataframe.transforms.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.client.dataframe.transforms.TimeSyncConfig;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
public class TimeSyncConfigTests
extends AbstractResponseTestCase<org.elasticsearch.xpack.core.dataframe.transforms.TimeSyncConfig, TimeSyncConfig> {
public static org.elasticsearch.xpack.core.dataframe.transforms.TimeSyncConfig randomTimeSyncConfig() {
return new org.elasticsearch.xpack.core.dataframe.transforms.TimeSyncConfig(randomAlphaOfLengthBetween(1, 10),
new TimeValue(randomNonNegativeLong()));
}
public static void assertHlrcEquals(org.elasticsearch.xpack.core.dataframe.transforms.TimeSyncConfig serverTestInstance,
TimeSyncConfig clientInstance) {
assertEquals(serverTestInstance.getField(), clientInstance.getField());
assertEquals(serverTestInstance.getDelay(), clientInstance.getDelay());
}
@Override
protected org.elasticsearch.xpack.core.dataframe.transforms.TimeSyncConfig createServerTestInstance() {
return randomTimeSyncConfig();
}
@Override
protected TimeSyncConfig doParseToClientInstance(XContentParser parser) throws IOException {
return TimeSyncConfig.fromXContent(parser);
}
@Override
protected void assertInstances(org.elasticsearch.xpack.core.dataframe.transforms.TimeSyncConfig serverTestInstance,
TimeSyncConfig clientInstance) {
assertHlrcEquals(serverTestInstance, clientInstance);
}
}

View File

@ -171,6 +171,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
client.dataFrame().putDataFrameTransform(
request, RequestOptions.DEFAULT);
// end::put-data-frame-transform-execute
transformsToClean.add(request.getConfig().getId());
assertTrue(response.isAcknowledged());
}
@ -208,6 +209,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
// end::put-data-frame-transform-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
transformsToClean.add(request.getConfig().getId());
}
}
@ -261,6 +263,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
// tag::stop-data-frame-transform-request-options
request.setWaitForCompletion(Boolean.TRUE); // <1>
request.setTimeout(TimeValue.timeValueSeconds(30)); // <2>
request.setAllowNoMatch(true); // <3>
// end::stop-data-frame-transform-request-options
// tag::stop-data-frame-transform-execute
@ -431,6 +434,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
.setQueryConfig(queryConfig)
.build(), // <1>
pivotConfig); // <2>
PreviewDataFrameTransformRequest request =
new PreviewDataFrameTransformRequest(transformConfig); // <3>
// end::preview-data-frame-transform-request
@ -496,12 +500,18 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
.setPivotConfig(pivotConfig)
.build();
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig), RequestOptions.DEFAULT);
transformsToClean.add(id);
// tag::get-data-frame-transform-stats-request
GetDataFrameTransformStatsRequest request =
new GetDataFrameTransformStatsRequest(id); // <1>
// end::get-data-frame-transform-stats-request
// tag::get-data-frame-transform-stats-request-options
request.setPageParams(new PageParams(0, 100)); // <1>
request.setAllowNoMatch(true); // <2>
// end::get-data-frame-transform-stats-request-options
{
// tag::get-data-frame-transform-stats-execute
GetDataFrameTransformStatsResponse response =
@ -593,6 +603,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
// tag::get-data-frame-transform-request-options
request.setPageParams(new PageParams(0, 100)); // <1>
request.setAllowNoMatch(true); // <2>
// end::get-data-frame-transform-request-options
// tag::get-data-frame-transform-execute

View File

@ -194,7 +194,7 @@ public class LicensingDocumentationIT extends ESRestHighLevelClientTestCase {
//end::get-license-response
assertThat(currentLicense, containsString("trial"));
assertThat(currentLicense, containsString("client_rest-high-level_integTestCluster"));
assertThat(currentLicense, containsString("integTest"));
}
{
GetLicenseRequest request = new GetLicenseRequest();
@ -233,7 +233,7 @@ public class LicensingDocumentationIT extends ESRestHighLevelClientTestCase {
String currentLicense = response.getLicenseDefinition();
assertThat(currentLicense, startsWith("{"));
assertThat(currentLicense, containsString("trial"));
assertThat(currentLicense, containsString("client_rest-high-level_integTestCluster"));
assertThat(currentLicense, containsString("integTest"));
assertThat(currentLicense, endsWith("}"));
}
}

View File

@ -39,6 +39,7 @@ import org.elasticsearch.client.ml.CloseJobResponse;
import org.elasticsearch.client.ml.DeleteCalendarEventRequest;
import org.elasticsearch.client.ml.DeleteCalendarJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarRequest;
import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteExpiredDataRequest;
import org.elasticsearch.client.ml.DeleteExpiredDataResponse;
@ -47,6 +48,8 @@ import org.elasticsearch.client.ml.DeleteForecastRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteJobResponse;
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
import org.elasticsearch.client.ml.EvaluateDataFrameRequest;
import org.elasticsearch.client.ml.EvaluateDataFrameResponse;
import org.elasticsearch.client.ml.FindFileStructureRequest;
import org.elasticsearch.client.ml.FindFileStructureResponse;
import org.elasticsearch.client.ml.FlushJobRequest;
@ -61,8 +64,10 @@ import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCalendarsResponse;
import org.elasticsearch.client.ml.GetCategoriesRequest;
import org.elasticsearch.client.ml.GetCategoriesResponse;
import org.elasticsearch.client.ml.GetModelSnapshotsRequest;
import org.elasticsearch.client.ml.GetModelSnapshotsResponse;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsResponse;
import org.elasticsearch.client.ml.GetDatafeedRequest;
import org.elasticsearch.client.ml.GetDatafeedResponse;
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
@ -75,6 +80,8 @@ import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobResponse;
import org.elasticsearch.client.ml.GetJobStatsRequest;
import org.elasticsearch.client.ml.GetJobStatsResponse;
import org.elasticsearch.client.ml.GetModelSnapshotsRequest;
import org.elasticsearch.client.ml.GetModelSnapshotsResponse;
import org.elasticsearch.client.ml.GetOverallBucketsRequest;
import org.elasticsearch.client.ml.GetOverallBucketsResponse;
import org.elasticsearch.client.ml.GetRecordsRequest;
@ -92,6 +99,8 @@ import org.elasticsearch.client.ml.PreviewDatafeedResponse;
import org.elasticsearch.client.ml.PutCalendarJobRequest;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutCalendarResponse;
import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.PutDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.PutDatafeedRequest;
import org.elasticsearch.client.ml.PutDatafeedResponse;
import org.elasticsearch.client.ml.PutFilterRequest;
@ -101,8 +110,11 @@ import org.elasticsearch.client.ml.PutJobResponse;
import org.elasticsearch.client.ml.RevertModelSnapshotRequest;
import org.elasticsearch.client.ml.RevertModelSnapshotResponse;
import org.elasticsearch.client.ml.SetUpgradeModeRequest;
import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.StartDatafeedRequest;
import org.elasticsearch.client.ml.StartDatafeedResponse;
import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.StopDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.StopDatafeedRequest;
import org.elasticsearch.client.ml.StopDatafeedResponse;
import org.elasticsearch.client.ml.UpdateDatafeedRequest;
@ -118,6 +130,21 @@ import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.client.ml.datafeed.DatafeedStats;
import org.elasticsearch.client.ml.datafeed.DatafeedUpdate;
import org.elasticsearch.client.ml.datafeed.DelayedDataCheckConfig;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalysis;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsDest;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsSource;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsState;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsStats;
import org.elasticsearch.client.ml.dataframe.OutlierDetection;
import org.elasticsearch.client.ml.dataframe.QueryConfig;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.AucRocMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.BinarySoftClassification;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric.ConfusionMatrix;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric;
import org.elasticsearch.client.ml.filestructurefinder.FileStructure;
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
import org.elasticsearch.client.ml.job.config.AnalysisLimits;
@ -139,13 +166,18 @@ import org.elasticsearch.client.ml.job.results.Influencer;
import org.elasticsearch.client.ml.job.results.OverallBucket;
import org.elasticsearch.client.ml.job.stats.JobStats;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.tasks.TaskId;
import org.hamcrest.CoreMatchers;
import org.junit.After;
import java.io.IOException;
@ -870,18 +902,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String datafeedId = job.getId() + "-feed";
String indexName = "preview_data_2";
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.endObject()
.startObject("total")
.field("type", "long")
.endObject()
.endObject()
.endObject());
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
createIndex(indexName);
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId())
.setIndices(indexName)
.build();
@ -938,18 +959,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String datafeedId = job.getId() + "-feed";
String indexName = "start_data_2";
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.endObject()
.startObject("total")
.field("type", "long")
.endObject()
.endObject()
.endObject());
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
createIndex(indexName);
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId())
.setIndices(indexName)
.build();
@ -1067,18 +1077,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
String datafeedId1 = job.getId() + "-feed";
String indexName = "datafeed_stats_data_2";
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.endObject()
.startObject("total")
.field("type", "long")
.endObject()
.endObject()
.endObject());
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
createIndex(indexName);
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId1, job.getId())
.setIndices(indexName)
.build();
@ -2802,6 +2801,465 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
public void testGetDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex());
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::get-data-frame-analytics-request
GetDataFrameAnalyticsRequest request = new GetDataFrameAnalyticsRequest("my-analytics-config"); // <1>
// end::get-data-frame-analytics-request
// tag::get-data-frame-analytics-execute
GetDataFrameAnalyticsResponse response = client.machineLearning().getDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::get-data-frame-analytics-execute
// tag::get-data-frame-analytics-response
List<DataFrameAnalyticsConfig> configs = response.getAnalytics();
// end::get-data-frame-analytics-response
assertThat(configs.size(), equalTo(1));
}
{
GetDataFrameAnalyticsRequest request = new GetDataFrameAnalyticsRequest("my-analytics-config");
// tag::get-data-frame-analytics-execute-listener
ActionListener<GetDataFrameAnalyticsResponse> listener = new ActionListener<GetDataFrameAnalyticsResponse>() {
@Override
public void onResponse(GetDataFrameAnalyticsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-data-frame-analytics-execute-async
client.machineLearning().getDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetDataFrameAnalyticsStats() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex());
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::get-data-frame-analytics-stats-request
GetDataFrameAnalyticsStatsRequest request = new GetDataFrameAnalyticsStatsRequest("my-analytics-config"); // <1>
// end::get-data-frame-analytics-stats-request
// tag::get-data-frame-analytics-stats-execute
GetDataFrameAnalyticsStatsResponse response =
client.machineLearning().getDataFrameAnalyticsStats(request, RequestOptions.DEFAULT);
// end::get-data-frame-analytics-stats-execute
// tag::get-data-frame-analytics-stats-response
List<DataFrameAnalyticsStats> stats = response.getAnalyticsStats();
// end::get-data-frame-analytics-stats-response
assertThat(stats.size(), equalTo(1));
}
{
GetDataFrameAnalyticsStatsRequest request = new GetDataFrameAnalyticsStatsRequest("my-analytics-config");
// tag::get-data-frame-analytics-stats-execute-listener
ActionListener<GetDataFrameAnalyticsStatsResponse> listener = new ActionListener<GetDataFrameAnalyticsStatsResponse>() {
@Override
public void onResponse(GetDataFrameAnalyticsStatsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-data-frame-analytics-stats-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-data-frame-analytics-stats-execute-async
client.machineLearning().getDataFrameAnalyticsStatsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-data-frame-analytics-stats-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPutDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex());
RestHighLevelClient client = highLevelClient();
{
// tag::put-data-frame-analytics-query-config
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
// end::put-data-frame-analytics-query-config
// tag::put-data-frame-analytics-source-config
DataFrameAnalyticsSource sourceConfig = DataFrameAnalyticsSource.builder() // <1>
.setIndex("put-test-source-index") // <2>
.setQueryConfig(queryConfig) // <3>
.build();
// end::put-data-frame-analytics-source-config
// tag::put-data-frame-analytics-dest-config
DataFrameAnalyticsDest destConfig = DataFrameAnalyticsDest.builder() // <1>
.setIndex("put-test-dest-index") // <2>
.build();
// end::put-data-frame-analytics-dest-config
// tag::put-data-frame-analytics-analysis-default
DataFrameAnalysis outlierDetection = OutlierDetection.createDefault(); // <1>
// end::put-data-frame-analytics-analysis-default
// tag::put-data-frame-analytics-analysis-customized
DataFrameAnalysis outlierDetectionCustomized = OutlierDetection.builder() // <1>
.setMethod(OutlierDetection.Method.DISTANCE_KNN) // <2>
.setNNeighbors(5) // <3>
.build();
// end::put-data-frame-analytics-analysis-customized
// tag::put-data-frame-analytics-analyzed-fields
FetchSourceContext analyzedFields =
new FetchSourceContext(
true,
new String[] { "included_field_1", "included_field_2" },
new String[] { "excluded_field" });
// end::put-data-frame-analytics-analyzed-fields
// tag::put-data-frame-analytics-config
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder("my-analytics-config") // <1>
.setSource(sourceConfig) // <2>
.setDest(destConfig) // <3>
.setAnalysis(outlierDetection) // <4>
.setAnalyzedFields(analyzedFields) // <5>
.setModelMemoryLimit(new ByteSizeValue(5, ByteSizeUnit.MB)) // <6>
.build();
// end::put-data-frame-analytics-config
// tag::put-data-frame-analytics-request
PutDataFrameAnalyticsRequest request = new PutDataFrameAnalyticsRequest(config); // <1>
// end::put-data-frame-analytics-request
// tag::put-data-frame-analytics-execute
PutDataFrameAnalyticsResponse response = client.machineLearning().putDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::put-data-frame-analytics-execute
// tag::put-data-frame-analytics-response
DataFrameAnalyticsConfig createdConfig = response.getConfig();
// end::put-data-frame-analytics-response
assertThat(createdConfig.getId(), equalTo("my-analytics-config"));
}
{
PutDataFrameAnalyticsRequest request = new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG);
// tag::put-data-frame-analytics-execute-listener
ActionListener<PutDataFrameAnalyticsResponse> listener = new ActionListener<PutDataFrameAnalyticsResponse>() {
@Override
public void onResponse(PutDataFrameAnalyticsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-data-frame-analytics-execute-async
client.machineLearning().putDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex());
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::delete-data-frame-analytics-request
DeleteDataFrameAnalyticsRequest request = new DeleteDataFrameAnalyticsRequest("my-analytics-config"); // <1>
// end::delete-data-frame-analytics-request
// tag::delete-data-frame-analytics-execute
AcknowledgedResponse response = client.machineLearning().deleteDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::delete-data-frame-analytics-execute
// tag::delete-data-frame-analytics-response
boolean acknowledged = response.isAcknowledged();
// end::delete-data-frame-analytics-response
assertThat(acknowledged, is(true));
}
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
DeleteDataFrameAnalyticsRequest request = new DeleteDataFrameAnalyticsRequest("my-analytics-config");
// tag::delete-data-frame-analytics-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-data-frame-analytics-execute-async
client.machineLearning().deleteDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::delete-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testStartDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex());
highLevelClient().index(
new IndexRequest(DF_ANALYTICS_CONFIG.getSource().getIndex()).source(XContentType.JSON, "total", 10000)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT);
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::start-data-frame-analytics-request
StartDataFrameAnalyticsRequest request = new StartDataFrameAnalyticsRequest("my-analytics-config"); // <1>
// end::start-data-frame-analytics-request
// tag::start-data-frame-analytics-execute
AcknowledgedResponse response = client.machineLearning().startDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::start-data-frame-analytics-execute
// tag::start-data-frame-analytics-response
boolean acknowledged = response.isAcknowledged();
// end::start-data-frame-analytics-response
assertThat(acknowledged, is(true));
}
assertBusy(
() -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)),
30, TimeUnit.SECONDS);
{
StartDataFrameAnalyticsRequest request = new StartDataFrameAnalyticsRequest("my-analytics-config");
// tag::start-data-frame-analytics-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::start-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::start-data-frame-analytics-execute-async
client.machineLearning().startDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::start-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
assertBusy(
() -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)),
30, TimeUnit.SECONDS);
}
public void testStopDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex());
highLevelClient().index(
new IndexRequest(DF_ANALYTICS_CONFIG.getSource().getIndex()).source(XContentType.JSON, "total", 10000)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT);
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::stop-data-frame-analytics-request
StopDataFrameAnalyticsRequest request = new StopDataFrameAnalyticsRequest("my-analytics-config"); // <1>
// end::stop-data-frame-analytics-request
// tag::stop-data-frame-analytics-execute
StopDataFrameAnalyticsResponse response = client.machineLearning().stopDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::stop-data-frame-analytics-execute
// tag::stop-data-frame-analytics-response
boolean acknowledged = response.isStopped();
// end::stop-data-frame-analytics-response
assertThat(acknowledged, is(true));
}
assertBusy(
() -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)),
30, TimeUnit.SECONDS);
{
StopDataFrameAnalyticsRequest request = new StopDataFrameAnalyticsRequest("my-analytics-config");
// tag::stop-data-frame-analytics-execute-listener
ActionListener<StopDataFrameAnalyticsResponse> listener = new ActionListener<StopDataFrameAnalyticsResponse>() {
@Override
public void onResponse(StopDataFrameAnalyticsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::stop-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::stop-data-frame-analytics-execute-async
client.machineLearning().stopDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::stop-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
assertBusy(
() -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)),
30, TimeUnit.SECONDS);
}
public void testEvaluateDataFrame() throws Exception {
String indexName = "evaluate-test-index";
CreateIndexRequest createIndexRequest =
new CreateIndexRequest(indexName)
.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("label")
.field("type", "keyword")
.endObject()
.startObject("p")
.field("type", "double")
.endObject()
.endObject()
.endObject());
BulkRequest bulkRequest =
new BulkRequest(indexName)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(new IndexRequest().source(XContentType.JSON, "label", false, "p", 0.1)) // #0
.add(new IndexRequest().source(XContentType.JSON, "label", false, "p", 0.2)) // #1
.add(new IndexRequest().source(XContentType.JSON, "label", false, "p", 0.3)) // #2
.add(new IndexRequest().source(XContentType.JSON, "label", false, "p", 0.4)) // #3
.add(new IndexRequest().source(XContentType.JSON, "label", false, "p", 0.7)) // #4
.add(new IndexRequest().source(XContentType.JSON, "label", true, "p", 0.2)) // #5
.add(new IndexRequest().source(XContentType.JSON, "label", true, "p", 0.3)) // #6
.add(new IndexRequest().source(XContentType.JSON, "label", true, "p", 0.4)) // #7
.add(new IndexRequest().source(XContentType.JSON, "label", true, "p", 0.8)) // #8
.add(new IndexRequest().source(XContentType.JSON, "label", true, "p", 0.9)); // #9
RestHighLevelClient client = highLevelClient();
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
client.bulk(bulkRequest, RequestOptions.DEFAULT);
{
// tag::evaluate-data-frame-request
EvaluateDataFrameRequest request = new EvaluateDataFrameRequest( // <1>
indexName, // <2>
new BinarySoftClassification( // <3>
"label", // <4>
"p", // <5>
// Evaluation metrics // <6>
PrecisionMetric.at(0.4, 0.5, 0.6), // <7>
RecallMetric.at(0.5, 0.7), // <8>
ConfusionMatrixMetric.at(0.5), // <9>
AucRocMetric.withCurve())); // <10>
// end::evaluate-data-frame-request
// tag::evaluate-data-frame-execute
EvaluateDataFrameResponse response = client.machineLearning().evaluateDataFrame(request, RequestOptions.DEFAULT);
// end::evaluate-data-frame-execute
// tag::evaluate-data-frame-response
List<EvaluationMetric.Result> metrics = response.getMetrics(); // <1>
PrecisionMetric.Result precisionResult = response.getMetricByName(PrecisionMetric.NAME); // <2>
double precision = precisionResult.getScoreByThreshold("0.4"); // <3>
ConfusionMatrixMetric.Result confusionMatrixResult = response.getMetricByName(ConfusionMatrixMetric.NAME); // <4>
ConfusionMatrix confusionMatrix = confusionMatrixResult.getScoreByThreshold("0.5"); // <5>
// end::evaluate-data-frame-response
assertThat(
metrics.stream().map(m -> m.getMetricName()).collect(Collectors.toList()),
containsInAnyOrder(PrecisionMetric.NAME, RecallMetric.NAME, ConfusionMatrixMetric.NAME, AucRocMetric.NAME));
assertThat(precision, closeTo(0.6, 1e-9));
assertThat(confusionMatrix.getTruePositives(), CoreMatchers.equalTo(2L)); // docs #8 and #9
assertThat(confusionMatrix.getFalsePositives(), CoreMatchers.equalTo(1L)); // doc #4
assertThat(confusionMatrix.getTrueNegatives(), CoreMatchers.equalTo(4L)); // docs #0, #1, #2 and #3
assertThat(confusionMatrix.getFalseNegatives(), CoreMatchers.equalTo(3L)); // docs #5, #6 and #7
}
{
EvaluateDataFrameRequest request = new EvaluateDataFrameRequest(
indexName,
new BinarySoftClassification(
"label",
"p",
PrecisionMetric.at(0.4, 0.5, 0.6),
RecallMetric.at(0.5, 0.7),
ConfusionMatrixMetric.at(0.5),
AucRocMetric.withCurve()));
// tag::evaluate-data-frame-execute-listener
ActionListener<EvaluateDataFrameResponse> listener = new ActionListener<EvaluateDataFrameResponse>() {
@Override
public void onResponse(EvaluateDataFrameResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::evaluate-data-frame-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::evaluate-data-frame-execute-async
client.machineLearning().evaluateDataFrameAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::evaluate-data-frame-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testCreateFilter() throws Exception {
RestHighLevelClient client = highLevelClient();
@ -3140,4 +3598,39 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
assertThat(createdFilter.getId(), equalTo("my_safe_domains"));
return createdFilter.getId();
}
private void createIndex(String indexName) throws IOException {
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.endObject()
.startObject("total")
.field("type", "long")
.endObject()
.endObject()
.endObject());
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
}
private DataFrameAnalyticsState getAnalyticsState(String configId) throws IOException {
GetDataFrameAnalyticsStatsResponse statsResponse =
highLevelClient().machineLearning().getDataFrameAnalyticsStats(
new GetDataFrameAnalyticsStatsRequest(configId), RequestOptions.DEFAULT);
assertThat(statsResponse.getAnalyticsStats(), hasSize(1));
DataFrameAnalyticsStats stats = statsResponse.getAnalyticsStats().get(0);
return stats.getState();
}
private static final DataFrameAnalyticsConfig DF_ANALYTICS_CONFIG =
DataFrameAnalyticsConfig.builder("my-analytics-config")
.setSource(DataFrameAnalyticsSource.builder()
.setIndex("put-test-source-index")
.build())
.setDest(DataFrameAnalyticsDest.builder()
.setIndex("put-test-dest-index")
.build())
.setAnalysis(OutlierDetection.createDefault())
.build();
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.AucRocMetric;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class AucRocMetricAucRocPointTests extends AbstractXContentTestCase<AucRocMetric.AucRocPoint> {
static AucRocMetric.AucRocPoint randomPoint() {
return new AucRocMetric.AucRocPoint(randomDouble(), randomDouble(), randomDouble());
}
@Override
protected AucRocMetric.AucRocPoint createTestInstance() {
return randomPoint();
}
@Override
protected AucRocMetric.AucRocPoint doParseInstance(XContentParser parser) throws IOException {
return AucRocMetric.AucRocPoint.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,63 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.AucRocMetric;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.client.ml.AucRocMetricAucRocPointTests.randomPoint;
public class AucRocMetricResultTests extends AbstractXContentTestCase<AucRocMetric.Result> {
static AucRocMetric.Result randomResult() {
return new AucRocMetric.Result(
randomDouble(),
Stream
.generate(() -> randomPoint())
.limit(randomIntBetween(1, 10))
.collect(Collectors.toList()));
}
@Override
protected AucRocMetric.Result createTestInstance() {
return randomResult();
}
@Override
protected AucRocMetric.Result doParseInstance(XContentParser parser) throws IOException {
return AucRocMetric.Result.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only
return field -> !field.isEmpty();
}
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class ConfusionMatrixMetricConfusionMatrixTests extends AbstractXContentTestCase<ConfusionMatrixMetric.ConfusionMatrix> {
static ConfusionMatrixMetric.ConfusionMatrix randomConfusionMatrix() {
return new ConfusionMatrixMetric.ConfusionMatrix(randomInt(), randomInt(), randomInt(), randomInt());
}
@Override
protected ConfusionMatrixMetric.ConfusionMatrix createTestInstance() {
return randomConfusionMatrix();
}
@Override
protected ConfusionMatrixMetric.ConfusionMatrix doParseInstance(XContentParser parser) throws IOException {
return ConfusionMatrixMetric.ConfusionMatrix.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,62 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.client.ml.ConfusionMatrixMetricConfusionMatrixTests.randomConfusionMatrix;
public class ConfusionMatrixMetricResultTests extends AbstractXContentTestCase<ConfusionMatrixMetric.Result> {
static ConfusionMatrixMetric.Result randomResult() {
return new ConfusionMatrixMetric.Result(
Stream
.generate(() -> randomConfusionMatrix())
.limit(randomIntBetween(1, 5))
.collect(Collectors.toMap(v -> String.valueOf(randomDouble()), v -> v)));
}
@Override
protected ConfusionMatrixMetric.Result createTestInstance() {
return randomResult();
}
@Override
protected ConfusionMatrixMetric.Result doParseInstance(XContentParser parser) throws IOException {
return ConfusionMatrixMetric.Result.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// disallow unknown fields in the root of the object as field names must be parsable as numbers
return field -> field.isEmpty();
}
}

View File

@ -0,0 +1,39 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.test.ESTestCase;
import java.util.Optional;
import static org.hamcrest.Matchers.containsString;
public class DeleteDataFrameAnalyticsRequestTests extends ESTestCase {
public void testValidate_Ok() {
assertEquals(Optional.empty(), new DeleteDataFrameAnalyticsRequest("valid-id").validate());
assertEquals(Optional.empty(), new DeleteDataFrameAnalyticsRequest("").validate());
}
public void testValidate_Failure() {
assertThat(new DeleteDataFrameAnalyticsRequest(null).validate().get().getMessage(),
containsString("data frame analytics id must not be null"));
}
}

View File

@ -0,0 +1,76 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Predicate;
public class EvaluateDataFrameResponseTests extends AbstractXContentTestCase<EvaluateDataFrameResponse> {
public static EvaluateDataFrameResponse randomResponse() {
List<EvaluationMetric.Result> metrics = new ArrayList<>();
if (randomBoolean()) {
metrics.add(AucRocMetricResultTests.randomResult());
}
if (randomBoolean()) {
metrics.add(PrecisionMetricResultTests.randomResult());
}
if (randomBoolean()) {
metrics.add(RecallMetricResultTests.randomResult());
}
if (randomBoolean()) {
metrics.add(ConfusionMatrixMetricResultTests.randomResult());
}
return new EvaluateDataFrameResponse(randomAlphaOfLength(5), metrics);
}
@Override
protected EvaluateDataFrameResponse createTestInstance() {
return randomResponse();
}
@Override
protected EvaluateDataFrameResponse doParseInstance(XContentParser parser) throws IOException {
return EvaluateDataFrameResponse.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the metrics map (i.e. alongside named metrics like "precision" or "recall")
return field -> field.isEmpty() || field.contains(".");
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(new MlEvaluationNamedXContentProvider().getNamedXContentParsers());
}
}

View File

@ -0,0 +1,39 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.test.ESTestCase;
import java.util.Optional;
import static org.hamcrest.Matchers.containsString;
public class GetDataFrameAnalyticsRequestTests extends ESTestCase {
public void testValidate_Ok() {
assertEquals(Optional.empty(), new GetDataFrameAnalyticsRequest("valid-id").validate());
assertEquals(Optional.empty(), new GetDataFrameAnalyticsRequest("").validate());
}
public void testValidate_Failure() {
assertThat(new GetDataFrameAnalyticsRequest(new String[0]).validate().get().getMessage(),
containsString("data frame analytics id must not be null"));
}
}

View File

@ -0,0 +1,39 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.test.ESTestCase;
import java.util.Optional;
import static org.hamcrest.Matchers.containsString;
public class GetDataFrameAnalyticsStatsRequestTests extends ESTestCase {
public void testValidate_Ok() {
assertEquals(Optional.empty(), new GetDataFrameAnalyticsStatsRequest("valid-id").validate());
assertEquals(Optional.empty(), new GetDataFrameAnalyticsStatsRequest("").validate());
}
public void testValidate_Failure() {
assertThat(new GetDataFrameAnalyticsStatsRequest(new String[0]).validate().get().getMessage(),
containsString("data frame analytics id must not be null"));
}
}

View File

@ -0,0 +1,60 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class PrecisionMetricResultTests extends AbstractXContentTestCase<PrecisionMetric.Result> {
static PrecisionMetric.Result randomResult() {
return new PrecisionMetric.Result(
Stream
.generate(() -> randomDouble())
.limit(randomIntBetween(1, 5))
.collect(Collectors.toMap(v -> String.valueOf(randomDouble()), v -> v)));
}
@Override
protected PrecisionMetric.Result createTestInstance() {
return randomResult();
}
@Override
protected PrecisionMetric.Result doParseInstance(XContentParser parser) throws IOException {
return PrecisionMetric.Result.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// disallow unknown fields in the root of the object as field names must be parsable as numbers
return field -> field.isEmpty();
}
}

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfigTests;
import org.elasticsearch.client.ml.dataframe.MlDataFrameAnalysisNamedXContentProvider;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import static org.hamcrest.Matchers.containsString;
public class PutDataFrameAnalyticsRequestTests extends AbstractXContentTestCase<PutDataFrameAnalyticsRequest> {
public void testValidate_Ok() {
assertFalse(createTestInstance().validate().isPresent());
}
public void testValidate_Failure() {
Optional<ValidationException> exception = new PutDataFrameAnalyticsRequest(null).validate();
assertTrue(exception.isPresent());
assertThat(exception.get().getMessage(), containsString("put requires a non-null data frame analytics config"));
}
@Override
protected PutDataFrameAnalyticsRequest createTestInstance() {
return new PutDataFrameAnalyticsRequest(DataFrameAnalyticsConfigTests.randomDataFrameAnalyticsConfig());
}
@Override
protected PutDataFrameAnalyticsRequest doParseInstance(XContentParser parser) throws IOException {
return new PutDataFrameAnalyticsRequest(DataFrameAnalyticsConfig.fromXContent(parser));
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
@Override
protected NamedXContentRegistry xContentRegistry() {
List<NamedXContentRegistry.Entry> namedXContent = new ArrayList<>();
namedXContent.addAll(new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedXContents());
namedXContent.addAll(new MlDataFrameAnalysisNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContent);
}
}

View File

@ -0,0 +1,60 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class RecallMetricResultTests extends AbstractXContentTestCase<RecallMetric.Result> {
static RecallMetric.Result randomResult() {
return new RecallMetric.Result(
Stream
.generate(() -> randomDouble())
.limit(randomIntBetween(1, 5))
.collect(Collectors.toMap(v -> String.valueOf(randomDouble()), v -> v)));
}
@Override
protected RecallMetric.Result createTestInstance() {
return randomResult();
}
@Override
protected RecallMetric.Result doParseInstance(XContentParser parser) throws IOException {
return RecallMetric.Result.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// disallow unknown fields in the root of the object as field names must be parsable as numbers
return field -> field.isEmpty();
}
}

View File

@ -0,0 +1,43 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
import java.util.Optional;
import static org.hamcrest.Matchers.containsString;
public class StartDataFrameAnalyticsRequestTests extends ESTestCase {
public void testValidate_Ok() {
assertEquals(Optional.empty(), new StartDataFrameAnalyticsRequest("foo").validate());
assertEquals(Optional.empty(), new StartDataFrameAnalyticsRequest("foo").setTimeout(null).validate());
assertEquals(Optional.empty(), new StartDataFrameAnalyticsRequest("foo").setTimeout(TimeValue.ZERO).validate());
}
public void testValidate_Failure() {
assertThat(new StartDataFrameAnalyticsRequest(null).validate().get().getMessage(),
containsString("data frame analytics id must not be null"));
assertThat(new StartDataFrameAnalyticsRequest(null).setTimeout(TimeValue.ZERO).validate().get().getMessage(),
containsString("data frame analytics id must not be null"));
}
}

View File

@ -0,0 +1,43 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
import java.util.Optional;
import static org.hamcrest.Matchers.containsString;
public class StopDataFrameAnalyticsRequestTests extends ESTestCase {
public void testValidate_Ok() {
assertEquals(Optional.empty(), new StopDataFrameAnalyticsRequest("foo").validate());
assertEquals(Optional.empty(), new StopDataFrameAnalyticsRequest("foo").setTimeout(null).validate());
assertEquals(Optional.empty(), new StopDataFrameAnalyticsRequest("foo").setTimeout(TimeValue.ZERO).validate());
}
public void testValidate_Failure() {
assertThat(new StopDataFrameAnalyticsRequest(null).validate().get().getMessage(),
containsString("data frame analytics id must not be null"));
assertThat(new StopDataFrameAnalyticsRequest(null).setTimeout(TimeValue.ZERO).validate().get().getMessage(),
containsString("data frame analytics id must not be null"));
}
}

View File

@ -0,0 +1,42 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class StopDataFrameAnalyticsResponseTests extends AbstractXContentTestCase<StopDataFrameAnalyticsResponse> {
@Override
protected StopDataFrameAnalyticsResponse createTestInstance() {
return new StopDataFrameAnalyticsResponse(randomBoolean());
}
@Override
protected StopDataFrameAnalyticsResponse doParseInstance(XContentParser parser) throws IOException {
return StopDataFrameAnalyticsResponse.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,88 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.function.Predicate;
import static org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsSourceTests.randomSourceConfig;
import static org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsDestTests.randomDestConfig;
import static org.elasticsearch.client.ml.dataframe.OutlierDetectionTests.randomOutlierDetection;
public class DataFrameAnalyticsConfigTests extends AbstractXContentTestCase<DataFrameAnalyticsConfig> {
public static DataFrameAnalyticsConfig randomDataFrameAnalyticsConfig() {
DataFrameAnalyticsConfig.Builder builder =
DataFrameAnalyticsConfig.builder(randomAlphaOfLengthBetween(1, 10))
.setSource(randomSourceConfig())
.setDest(randomDestConfig())
.setAnalysis(randomOutlierDetection());
if (randomBoolean()) {
builder.setAnalyzedFields(new FetchSourceContext(true,
generateRandomStringArray(10, 10, false, false),
generateRandomStringArray(10, 10, false, false)));
}
if (randomBoolean()) {
builder.setModelMemoryLimit(new ByteSizeValue(randomIntBetween(1, 16), randomFrom(ByteSizeUnit.MB, ByteSizeUnit.GB)));
}
return builder.build();
}
@Override
protected DataFrameAnalyticsConfig createTestInstance() {
return randomDataFrameAnalyticsConfig();
}
@Override
protected DataFrameAnalyticsConfig doParseInstance(XContentParser parser) throws IOException {
return DataFrameAnalyticsConfig.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only
return field -> !field.isEmpty();
}
@Override
protected NamedXContentRegistry xContentRegistry() {
List<NamedXContentRegistry.Entry> namedXContent = new ArrayList<>();
namedXContent.addAll(new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedXContents());
namedXContent.addAll(new MlDataFrameAnalysisNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContent);
}
}

View File

@ -0,0 +1,50 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class DataFrameAnalyticsDestTests extends AbstractXContentTestCase<DataFrameAnalyticsDest> {
public static DataFrameAnalyticsDest randomDestConfig() {
return DataFrameAnalyticsDest.builder()
.setIndex(randomAlphaOfLengthBetween(1, 10))
.setResultsField(randomBoolean() ? null : randomAlphaOfLengthBetween(1, 10))
.build();
}
@Override
protected DataFrameAnalyticsDest doParseInstance(XContentParser parser) throws IOException {
return DataFrameAnalyticsDest.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected DataFrameAnalyticsDest createTestInstance() {
return randomDestConfig();
}
}

View File

@ -0,0 +1,70 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.function.Predicate;
import static java.util.Collections.emptyList;
import static org.elasticsearch.client.ml.dataframe.QueryConfigTests.randomQueryConfig;
public class DataFrameAnalyticsSourceTests extends AbstractXContentTestCase<DataFrameAnalyticsSource> {
public static DataFrameAnalyticsSource randomSourceConfig() {
return DataFrameAnalyticsSource.builder()
.setIndex(randomAlphaOfLengthBetween(1, 10))
.setQueryConfig(randomBoolean() ? null : randomQueryConfig())
.build();
}
@Override
protected DataFrameAnalyticsSource doParseInstance(XContentParser parser) throws IOException {
return DataFrameAnalyticsSource.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only as QueryConfig stores a Map<String, Object>
return field -> !field.isEmpty();
}
@Override
protected DataFrameAnalyticsSource createTestInstance() {
return randomSourceConfig();
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
}

View File

@ -0,0 +1,66 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.client.ml.NodeAttributesTests;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
public class DataFrameAnalyticsStatsTests extends ESTestCase {
public void testFromXContent() throws IOException {
xContentTester(this::createParser,
DataFrameAnalyticsStatsTests::randomDataFrameAnalyticsStats,
DataFrameAnalyticsStatsTests::toXContent,
DataFrameAnalyticsStats::fromXContent)
.supportsUnknownFields(true)
.randomFieldsExcludeFilter(field -> field.startsWith("node.attributes"))
.test();
}
public static DataFrameAnalyticsStats randomDataFrameAnalyticsStats() {
return new DataFrameAnalyticsStats(
randomAlphaOfLengthBetween(1, 10),
randomFrom(DataFrameAnalyticsState.values()),
randomBoolean() ? null : randomIntBetween(0, 100),
randomBoolean() ? null : NodeAttributesTests.createRandom(),
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20));
}
public static void toXContent(DataFrameAnalyticsStats stats, XContentBuilder builder) throws IOException {
builder.startObject();
builder.field(DataFrameAnalyticsStats.ID.getPreferredName(), stats.getId());
builder.field(DataFrameAnalyticsStats.STATE.getPreferredName(), stats.getState().value());
if (stats.getProgressPercent() != null) {
builder.field(DataFrameAnalyticsStats.PROGRESS_PERCENT.getPreferredName(), stats.getProgressPercent());
}
if (stats.getNode() != null) {
builder.field(DataFrameAnalyticsStats.NODE.getPreferredName(), stats.getNode());
}
if (stats.getAssignmentExplanation() != null) {
builder.field(DataFrameAnalyticsStats.ASSIGNMENT_EXPLANATION.getPreferredName(), stats.getAssignmentExplanation());
}
builder.endObject();
}
}

View File

@ -0,0 +1,73 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
public class OutlierDetectionTests extends AbstractXContentTestCase<OutlierDetection> {
public static OutlierDetection randomOutlierDetection() {
return OutlierDetection.builder()
.setNNeighbors(randomBoolean() ? null : randomIntBetween(1, 20))
.setMethod(randomBoolean() ? null : randomFrom(OutlierDetection.Method.values()))
.setMinScoreToWriteFeatureInfluence(randomBoolean() ? null : randomDoubleBetween(0.0, 1.0, true))
.build();
}
@Override
protected OutlierDetection doParseInstance(XContentParser parser) throws IOException {
return OutlierDetection.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected OutlierDetection createTestInstance() {
return randomOutlierDetection();
}
public void testGetParams_GivenDefaults() {
OutlierDetection outlierDetection = OutlierDetection.createDefault();
assertNull(outlierDetection.getNNeighbors());
assertNull(outlierDetection.getMethod());
assertNull(outlierDetection.getMinScoreToWriteFeatureInfluence());
}
public void testGetParams_GivenExplicitValues() {
OutlierDetection outlierDetection =
OutlierDetection.builder()
.setNNeighbors(42)
.setMethod(OutlierDetection.Method.LDOF)
.setMinScoreToWriteFeatureInfluence(0.5)
.build();
assertThat(outlierDetection.getNNeighbors(), equalTo(42));
assertThat(outlierDetection.getMethod(), equalTo(OutlierDetection.Method.LDOF));
assertThat(outlierDetection.getMinScoreToWriteFeatureInfluence(), closeTo(0.5, 1E-9));
}
}

View File

@ -0,0 +1,62 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.MatchNoneQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import static java.util.Collections.emptyList;
public class QueryConfigTests extends AbstractXContentTestCase<QueryConfig> {
public static QueryConfig randomQueryConfig() {
QueryBuilder queryBuilder = randomBoolean() ? new MatchAllQueryBuilder() : new MatchNoneQueryBuilder();
return new QueryConfig(queryBuilder);
}
@Override
protected QueryConfig createTestInstance() {
return randomQueryConfig();
}
@Override
protected QueryConfig doParseInstance(XContentParser parser) throws IOException {
return QueryConfig.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
}

View File

@ -20,7 +20,7 @@ RUN groupadd -g 1000 elasticsearch && \
WORKDIR /usr/share/elasticsearch
COPY ${elasticsearch} /opt/
${source_elasticsearch}
RUN tar zxf /opt/${elasticsearch} --strip-components=1
RUN grep ES_DISTRIBUTION_TYPE=tar /usr/share/elasticsearch/bin/elasticsearch-env \

View File

@ -32,6 +32,7 @@ include-tagged::{doc-tests-file}[{api}-request-options]
<1> The page parameters `from` and `size`. `from` specifies the number of
{dataframe-transforms} to skip. `size` specifies the maximum number of
{dataframe-transforms} to get. Defaults to `0` and `100` respectively.
<2> Whether to ignore if a wildcard expression matches no transforms.
include::../execution.asciidoc[]

View File

@ -22,6 +22,19 @@ include-tagged::{doc-tests-file}[{api}-request]
--------------------------------------------------
<1> Constructing a new GET Stats request referencing an existing {dataframe-transform}
==== Optional Arguments
The following arguments are optional.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request-options]
--------------------------------------------------
<1> The page parameters `from` and `size`. `from` specifies the number of data frame transform stats to skip.
`size` specifies the maximum number of data frame transform stats to get.
Defaults to `0` and `100` respectively.
<2> Whether to ignore if a wildcard expression matches no transforms.
include::../execution.asciidoc[]

View File

@ -32,6 +32,7 @@ include-tagged::{doc-tests-file}[{api}-request-options]
--------------------------------------------------
<1> If true wait for the data frame task to stop before responding
<2> Controls the amount of time to wait until the {dataframe-job} stops.
<3> Whether to ignore if a wildcard expression matches no transforms.
include::../execution.asciidoc[]

View File

@ -0,0 +1,28 @@
--
:api: delete-data-frame-analytics
:request: DeleteDataFrameAnalyticsRequest
:response: AcknowledgedResponse
--
[id="{upid}-{api}"]
=== Delete Data Frame Analytics API
The Delete Data Frame Analytics API is used to delete an existing {dataframe-analytics-config}.
The API accepts a +{request}+ object as a request and returns a +{response}+.
[id="{upid}-{api}-request"]
==== Delete Data Frame Analytics Request
A +{request}+ object requires a {dataframe-analytics-config} id.
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
---------------------------------------------------
<1> Constructing a new request referencing an existing {dataframe-analytics-config}
include::../execution.asciidoc[]
[id="{upid}-{api}-response"]
==== Response
The returned +{response}+ object acknowledges the {dataframe-analytics-config} deletion.

View File

@ -0,0 +1,45 @@
--
:api: evaluate-data-frame
:request: EvaluateDataFrameRequest
:response: EvaluateDataFrameResponse
--
[id="{upid}-{api}"]
=== Evaluate Data Frame API
The Evaluate Data Frame API is used to evaluate an ML algorithm that ran on a {dataframe}.
The API accepts an +{request}+ object and returns an +{response}+.
[id="{upid}-{api}-request"]
==== Evaluate Data Frame Request
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
--------------------------------------------------
<1> Constructing a new evaluation request
<2> Reference to an existing index
<3> Kind of evaluation to perform
<4> Name of the field in the index. Its value denotes the actual (i.e. ground truth) label for an example. Must be either true or false
<5> Name of the field in the index. Its value denotes the probability (as per some ML algorithm) of the example being classified as positive
<6> The remaining parameters are the metrics to be calculated based on the two fields described above.
<7> https://en.wikipedia.org/wiki/Precision_and_recall[Precision] calculated at thresholds: 0.4, 0.5 and 0.6
<8> https://en.wikipedia.org/wiki/Precision_and_recall[Recall] calculated at thresholds: 0.5 and 0.7
<9> https://en.wikipedia.org/wiki/Confusion_matrix[Confusion matrix] calculated at threshold 0.5
<10> https://en.wikipedia.org/wiki/Receiver_operating_characteristic#Area_under_the_curve[AuC ROC] calculated and the curve points returned
include::../execution.asciidoc[]
[id="{upid}-{api}-response"]
==== Response
The returned +{response}+ contains the requested evaluation metrics.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-response]
--------------------------------------------------
<1> Fetching all the calculated metrics results
<2> Fetching precision metric by name
<3> Fetching precision at a given (0.4) threshold
<4> Fetching confusion matrix metric by name
<5> Fetching confusion matrix at a given (0.5) threshold

View File

@ -0,0 +1,34 @@
--
:api: get-data-frame-analytics-stats
:request: GetDataFrameAnalyticsStatsRequest
:response: GetDataFrameAnalyticsStatsResponse
--
[id="{upid}-{api}"]
=== Get Data Frame Analytics Stats API
The Get Data Frame Analytics Stats API is used to read the operational statistics of one or more {dataframe-analytics-config}s.
The API accepts a +{request}+ object and returns a +{response}+.
[id="{upid}-{api}-request"]
==== Get Data Frame Analytics Stats Request
A +{request}+ requires either a {dataframe-analytics-config} id, a comma separated list of ids or
the special wildcard `_all` to get the statistics for all {dataframe-analytics-config}s
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
--------------------------------------------------
<1> Constructing a new GET Stats request referencing an existing {dataframe-analytics-config}
include::../execution.asciidoc[]
[id="{upid}-{api}-response"]
==== Response
The returned +{response}+ contains the requested {dataframe-analytics-config} statistics.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-response]
--------------------------------------------------

View File

@ -0,0 +1,34 @@
--
:api: get-data-frame-analytics
:request: GetDataFrameAnalyticsRequest
:response: GetDataFrameAnalyticsResponse
--
[id="{upid}-{api}"]
=== Get Data Frame Analytics API
The Get Data Frame Analytics API is used to get one or more {dataframe-analytics-config}s.
The API accepts a +{request}+ object and returns a +{response}+.
[id="{upid}-{api}-request"]
==== Get Data Frame Analytics Request
A +{request}+ requires either a {dataframe-analytics-config} id, a comma separated list of ids or
the special wildcard `_all` to get all {dataframe-analytics-config}s.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
--------------------------------------------------
<1> Constructing a new GET request referencing an existing {dataframe-analytics-config}
include::../execution.asciidoc[]
[id="{upid}-{api}-response"]
==== Response
The returned +{response}+ contains the requested {dataframe-analytics-config}s.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-response]
--------------------------------------------------

View File

@ -0,0 +1,115 @@
--
:api: put-data-frame-analytics
:request: PutDataFrameAnalyticsRequest
:response: PutDataFrameAnalyticsResponse
--
[id="{upid}-{api}"]
=== Put Data Frame Analytics API
The Put Data Frame Analytics API is used to create a new {dataframe-analytics-config}.
The API accepts a +{request}+ object as a request and returns a +{response}+.
[id="{upid}-{api}-request"]
==== Put Data Frame Analytics Request
A +{request}+ requires the following argument:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
--------------------------------------------------
<1> The configuration of the {dataframe-job} to create
[id="{upid}-{api}-config"]
==== Data Frame Analytics Configuration
The `DataFrameAnalyticsConfig` object contains all the details about the {dataframe-job}
configuration and contains the following arguments:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-config]
--------------------------------------------------
<1> The {dataframe-analytics-config} id
<2> The source index and query from which to gather data
<3> The destination index
<4> The analysis to be performed
<5> The fields to be included in / excluded from the analysis
<6> The memory limit for the model created as part of the analysis process
[id="{upid}-{api}-query-config"]
==== SourceConfig
The index and the query from which to collect data.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-source-config]
--------------------------------------------------
<1> Constructing a new DataFrameAnalyticsSource
<2> The source index
<3> The query from which to gather the data. If query is not set, a `match_all` query is used by default.
===== QueryConfig
The query with which to select data from the source.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-query-config]
--------------------------------------------------
==== DestinationConfig
The index to which data should be written by the {dataframe-job}.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-dest-config]
--------------------------------------------------
<1> Constructing a new DataFrameAnalyticsDest
<2> The destination index
==== Analysis
The analysis to be performed.
Currently, only one analysis is supported: +OutlierDetection+.
+OutlierDetection+ analysis can be created in one of two ways:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-analysis-default]
--------------------------------------------------
<1> Constructing a new OutlierDetection object with default strategy to determine outliers
or
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-analysis-customized]
--------------------------------------------------
<1> Constructing a new OutlierDetection object
<2> The method used to perform the analysis
<3> Number of neighbors taken into account during analysis
==== Analyzed fields
FetchContext object containing fields to be included in / excluded from the analysis
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-analyzed-fields]
--------------------------------------------------
include::../execution.asciidoc[]
[id="{upid}-{api}-response"]
==== Response
The returned +{response}+ contains the newly created {dataframe-analytics-config}.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-response]
--------------------------------------------------

View File

@ -0,0 +1,28 @@
--
:api: start-data-frame-analytics
:request: StartDataFrameAnalyticsRequest
:response: AcknowledgedResponse
--
[id="{upid}-{api}"]
=== Start Data Frame Analytics API
The Start Data Frame Analytics API is used to start an existing {dataframe-analytics-config}.
It accepts a +{request}+ object and responds with a +{response}+ object.
[id="{upid}-{api}-request"]
==== Start Data Frame Analytics Request
A +{request}+ object requires a {dataframe-analytics-config} id.
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
---------------------------------------------------
<1> Constructing a new start request referencing an existing {dataframe-analytics-config}
include::../execution.asciidoc[]
[id="{upid}-{api}-response"]
==== Response
The returned +{response}+ object acknowledges the {dataframe-job} has started.

View File

@ -0,0 +1,28 @@
--
:api: stop-data-frame-analytics
:request: StopDataFrameAnalyticsRequest
:response: StopDataFrameAnalyticsResponse
--
[id="{upid}-{api}"]
=== Stop Data Frame Analytics API
The Stop Data Frame Analytics API is used to stop a running {dataframe-analytics-config}.
It accepts a +{request}+ object and responds with a +{response}+ object.
[id="{upid}-{api}-request"]
==== Stop Data Frame Analytics Request
A +{request}+ object requires a {dataframe-analytics-config} id.
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
---------------------------------------------------
<1> Constructing a new stop request referencing an existing {dataframe-analytics-config}
include::../execution.asciidoc[]
[id="{upid}-{api}-response"]
==== Response
The returned +{response}+ object acknowledges the {dataframe-job} has stopped.

View File

@ -285,6 +285,13 @@ The Java High Level REST Client supports the following Machine Learning APIs:
* <<{upid}-put-calendar-job>>
* <<{upid}-delete-calendar-job>>
* <<{upid}-delete-calendar>>
* <<{upid}-get-data-frame-analytics>>
* <<{upid}-get-data-frame-analytics-stats>>
* <<{upid}-put-data-frame-analytics>>
* <<{upid}-delete-data-frame-analytics>>
* <<{upid}-start-data-frame-analytics>>
* <<{upid}-stop-data-frame-analytics>>
* <<{upid}-evaluate-data-frame>>
* <<{upid}-put-filter>>
* <<{upid}-get-filters>>
* <<{upid}-update-filter>>
@ -329,6 +336,13 @@ include::ml/delete-calendar-event.asciidoc[]
include::ml/put-calendar-job.asciidoc[]
include::ml/delete-calendar-job.asciidoc[]
include::ml/delete-calendar.asciidoc[]
include::ml/get-data-frame-analytics.asciidoc[]
include::ml/get-data-frame-analytics-stats.asciidoc[]
include::ml/put-data-frame-analytics.asciidoc[]
include::ml/delete-data-frame-analytics.asciidoc[]
include::ml/start-data-frame-analytics.asciidoc[]
include::ml/stop-data-frame-analytics.asciidoc[]
include::ml/evaluate-data-frame.asciidoc[]
include::ml/put-filter.asciidoc[]
include::ml/get-filters.asciidoc[]
include::ml/update-filter.asciidoc[]

View File

@ -30,6 +30,8 @@ See the <<painless-api-reference-score, Score API>> for a high-level overview of
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* def {java11-javadoc}/java.base/java/util/List.html#get(int)[get](int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* Map groupBy(Function)
* int {java11-javadoc}/java.base/java/util/List.html#hashCode()[hashCode]()
@ -84,6 +86,8 @@ See the <<painless-api-reference-score, Score API>> for a high-level overview of
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* def {java11-javadoc}/java.base/java/util/List.html#get(int)[get](int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* Map groupBy(Function)
* int {java11-javadoc}/java.base/java/util/List.html#hashCode()[hashCode]()
@ -138,6 +142,8 @@ See the <<painless-api-reference-score, Score API>> for a high-level overview of
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* def {java11-javadoc}/java.base/java/util/List.html#get(int)[get](int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* Map groupBy(Function)
* int {java11-javadoc}/java.base/java/util/List.html#hashCode()[hashCode]()

View File

@ -4335,6 +4335,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* def {java11-javadoc}/java.base/java/util/List.html#get(int)[get](int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* Map groupBy(Function)
* int {java11-javadoc}/java.base/java/util/List.html#hashCode()[hashCode]()
@ -4386,6 +4388,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(BiFunction)
* void {java11-javadoc}/java.base/java/util/Map.html#forEach(java.util.function.BiConsumer)[forEach](BiConsumer)
* def {java11-javadoc}/java.base/java/util/Map.html#get(java.lang.Object)[get](def)
* Object getByPath(String)
* Object getByPath(String, Object)
* def {java11-javadoc}/java.base/java/util/Map.html#getOrDefault(java.lang.Object,java.lang.Object)[getOrDefault](def, def)
* Map groupBy(BiFunction)
* int {java11-javadoc}/java.base/java/lang/Object.html#hashCode()[hashCode]()
@ -4500,6 +4504,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* def {java11-javadoc}/java.base/java/util/List.html#get(int)[get](int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* Map groupBy(Function)
* int {java11-javadoc}/java.base/java/util/List.html#hashCode()[hashCode]()
@ -4666,6 +4672,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* def {java11-javadoc}/java.base/java/util/List.html#get(int)[get](int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* Map groupBy(Function)
* int {java11-javadoc}/java.base/java/util/List.html#hashCode()[hashCode]()
@ -5367,6 +5375,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(BiFunction)
* void {java11-javadoc}/java.base/java/util/Map.html#forEach(java.util.function.BiConsumer)[forEach](BiConsumer)
* def {java11-javadoc}/java.base/java/util/Map.html#get(java.lang.Object)[get](def)
* Object getByPath(String)
* Object getByPath(String, Object)
* def {java11-javadoc}/java.base/java/util/Map.html#getOrDefault(java.lang.Object,java.lang.Object)[getOrDefault](def, def)
* Map groupBy(BiFunction)
* int {java11-javadoc}/java.base/java/lang/Object.html#hashCode()[hashCode]()
@ -5457,6 +5467,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(BiFunction)
* void {java11-javadoc}/java.base/java/util/Map.html#forEach(java.util.function.BiConsumer)[forEach](BiConsumer)
* def {java11-javadoc}/java.base/java/util/Map.html#get(java.lang.Object)[get](def)
* Object getByPath(String)
* Object getByPath(String, Object)
* def {java11-javadoc}/java.base/java/util/Map.html#getOrDefault(java.lang.Object,java.lang.Object)[getOrDefault](def, def)
* Map groupBy(BiFunction)
* int {java11-javadoc}/java.base/java/lang/Object.html#hashCode()[hashCode]()
@ -5502,6 +5514,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(BiFunction)
* void {java11-javadoc}/java.base/java/util/Map.html#forEach(java.util.function.BiConsumer)[forEach](BiConsumer)
* def {java11-javadoc}/java.base/java/util/Map.html#get(java.lang.Object)[get](def)
* Object getByPath(String)
* Object getByPath(String, Object)
* def {java11-javadoc}/java.base/java/util/Map.html#getOrDefault(java.lang.Object,java.lang.Object)[getOrDefault](def, def)
* Map groupBy(BiFunction)
* int {java11-javadoc}/java.base/java/lang/Object.html#hashCode()[hashCode]()
@ -5668,6 +5682,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(BiFunction)
* void {java11-javadoc}/java.base/java/util/Map.html#forEach(java.util.function.BiConsumer)[forEach](BiConsumer)
* def {java11-javadoc}/java.base/java/util/Map.html#get(java.lang.Object)[get](def)
* Object getByPath(String)
* Object getByPath(String, Object)
* def {java11-javadoc}/java.base/java/util/Map.html#getOrDefault(java.lang.Object,java.lang.Object)[getOrDefault](def, def)
* Map groupBy(BiFunction)
* int {java11-javadoc}/java.base/java/lang/Object.html#hashCode()[hashCode]()
@ -5764,6 +5780,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* def {java11-javadoc}/java.base/java/util/List.html#get(int)[get](int)
* Object getByPath(String)
* Object getByPath(String, Object)
* def {java11-javadoc}/java.base/java/util/Deque.html#getFirst()[getFirst]()
* def {java11-javadoc}/java.base/java/util/Deque.html#getLast()[getLast]()
* int getLength()
@ -5836,6 +5854,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* def {java11-javadoc}/java.base/java/util/List.html#get(int)[get](int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* Map groupBy(Function)
* int {java11-javadoc}/java.base/java/util/List.html#hashCode()[hashCode]()
@ -6056,6 +6076,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(BiFunction)
* void {java11-javadoc}/java.base/java/util/Map.html#forEach(java.util.function.BiConsumer)[forEach](BiConsumer)
* def {java11-javadoc}/java.base/java/util/Map.html#get(java.lang.Object)[get](def)
* Object getByPath(String)
* Object getByPath(String, Object)
* def {java11-javadoc}/java.base/java/util/Map.html#getOrDefault(java.lang.Object,java.lang.Object)[getOrDefault](def, def)
* Map groupBy(BiFunction)
* int {java11-javadoc}/java.base/java/lang/Object.html#hashCode()[hashCode]()
@ -6157,6 +6179,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* def {java11-javadoc}/java.base/java/util/NavigableMap.html#floorKey(java.lang.Object)[floorKey](def)
* void {java11-javadoc}/java.base/java/util/Map.html#forEach(java.util.function.BiConsumer)[forEach](BiConsumer)
* def {java11-javadoc}/java.base/java/util/Map.html#get(java.lang.Object)[get](def)
* Object getByPath(String)
* Object getByPath(String, Object)
* def {java11-javadoc}/java.base/java/util/Map.html#getOrDefault(java.lang.Object,java.lang.Object)[getOrDefault](def, def)
* Map groupBy(BiFunction)
* int {java11-javadoc}/java.base/java/lang/Object.html#hashCode()[hashCode]()
@ -6642,6 +6666,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* def {java11-javadoc}/java.base/java/util/SortedMap.html#firstKey()[firstKey]()
* void {java11-javadoc}/java.base/java/util/Map.html#forEach(java.util.function.BiConsumer)[forEach](BiConsumer)
* def {java11-javadoc}/java.base/java/util/Map.html#get(java.lang.Object)[get](def)
* Object getByPath(String)
* Object getByPath(String, Object)
* def {java11-javadoc}/java.base/java/util/Map.html#getOrDefault(java.lang.Object,java.lang.Object)[getOrDefault](def, def)
* Map groupBy(BiFunction)
* int {java11-javadoc}/java.base/java/lang/Object.html#hashCode()[hashCode]()
@ -6844,6 +6870,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* def {java11-javadoc}/java.base/java/util/Vector.html#firstElement()[firstElement]()
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* def {java11-javadoc}/java.base/java/util/List.html#get(int)[get](int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* Map groupBy(Function)
* int {java11-javadoc}/java.base/java/util/List.html#hashCode()[hashCode]()
@ -6988,6 +7016,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* def {java11-javadoc}/java.base/java/util/NavigableMap.html#floorKey(java.lang.Object)[floorKey](def)
* void {java11-javadoc}/java.base/java/util/Map.html#forEach(java.util.function.BiConsumer)[forEach](BiConsumer)
* def {java11-javadoc}/java.base/java/util/Map.html#get(java.lang.Object)[get](def)
* Object getByPath(String)
* Object getByPath(String, Object)
* def {java11-javadoc}/java.base/java/util/Map.html#getOrDefault(java.lang.Object,java.lang.Object)[getOrDefault](def, def)
* Map groupBy(BiFunction)
* int {java11-javadoc}/java.base/java/lang/Object.html#hashCode()[hashCode]()
@ -7158,6 +7188,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* def {java11-javadoc}/java.base/java/util/Vector.html#firstElement()[firstElement]()
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* def {java11-javadoc}/java.base/java/util/List.html#get(int)[get](int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* Map groupBy(Function)
* int {java11-javadoc}/java.base/java/util/List.html#hashCode()[hashCode]()
@ -8016,6 +8048,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* Boolean get(int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* boolean getValue()
* Map groupBy(Function)
@ -8071,6 +8105,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* BytesRef get(int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* BytesRef getValue()
* Map groupBy(Function)
@ -8126,6 +8162,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* JodaCompatibleZonedDateTime get(int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* JodaCompatibleZonedDateTime getValue()
* Map groupBy(Function)
@ -8181,6 +8219,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* Double get(int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* double getValue()
* Map groupBy(Function)
@ -8240,6 +8280,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* double geohashDistance(String)
* double geohashDistanceWithDefault(String, double)
* GeoPoint get(int)
* Object getByPath(String)
* Object getByPath(String, Object)
* double getLat()
* double[] getLats()
* int getLength()
@ -8301,6 +8343,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* Long get(int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* long getValue()
* Map groupBy(Function)
@ -8356,6 +8400,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* String get(int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* String getValue()
* Map groupBy(Function)
@ -8415,6 +8461,8 @@ See the <<painless-api-reference-shared, Shared API>> for a high-level overview
* List findResults(Function)
* void {java11-javadoc}/java.base/java/lang/Iterable.html#forEach(java.util.function.Consumer)[forEach](Consumer)
* String get(int)
* Object getByPath(String)
* Object getByPath(String, Object)
* int getLength()
* String getValue()
* Map groupBy(Function)

View File

@ -7,3 +7,5 @@ include::painless-method-dispatch.asciidoc[]
include::painless-debugging.asciidoc[]
include::painless-execute-script.asciidoc[]
include::../redirects.asciidoc[]

Some files were not shown because too many files have changed in this diff Show More