[ML] Data Frame HLRC Get API (#40509)

This commit is contained in:
David Kyle 2019-03-27 12:40:39 +00:00 committed by GitHub
parent b4b17e16e0
commit c990b30019
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 672 additions and 49 deletions

View File

@ -22,6 +22,8 @@ package org.elasticsearch.client;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformResponse;
import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsResponse;
import org.elasticsearch.client.dataframe.PreviewDataFrameTransformRequest;
@ -275,7 +277,7 @@ public final class DataFrameClient {
* @param listener Listener to be notified upon request completion
*/
public void stopDataFrameTransformAsync(StopDataFrameTransformRequest request, RequestOptions options,
ActionListener<StopDataFrameTransformResponse> listener) {
ActionListener<StopDataFrameTransformResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
DataFrameRequestConverters::stopDataFrameTransform,
options,
@ -283,4 +285,44 @@ public final class DataFrameClient {
listener,
Collections.emptySet());
}
/**
* Get one or more data frame transform configurations
* <p>
* For additional info
* see <a href="https://www.TODO.com">Get Data Frame transform documentation</a>
*
* @param request The get data frame transform request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return An GetDataFrameTransformResponse containing the requested transforms
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public GetDataFrameTransformResponse getDataFrameTransform(GetDataFrameTransformRequest request, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
DataFrameRequestConverters::getDataFrameTransform,
options,
GetDataFrameTransformResponse::fromXContent,
Collections.emptySet());
}
/**
* Get one or more data frame transform configurations asynchronously and notifies listener on completion
* <p>
* For additional info
* see <a href="https://www.TODO.com">Get Data Frame transform documentation</a>
*
* @param request The get data frame transform request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void getDataFrameTransformAsync(GetDataFrameTransformRequest request, RequestOptions options,
ActionListener<GetDataFrameTransformResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
DataFrameRequestConverters::getDataFrameTransform,
options,
GetDataFrameTransformResponse::fromXContent,
listener,
Collections.emptySet());
}
}

View File

@ -24,11 +24,13 @@ import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsRequest;
import org.elasticsearch.client.dataframe.PreviewDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.PutDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.StartDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.StopDataFrameTransformRequest;
import org.elasticsearch.common.Strings;
import java.io.IOException;
@ -49,6 +51,21 @@ final class DataFrameRequestConverters {
return request;
}
static Request getDataFrameTransform(GetDataFrameTransformRequest getRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_frame", "transforms")
.addPathPart(Strings.collectionToCommaDelimitedString(getRequest.getId()))
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
if (getRequest.getFrom() != null) {
request.addParameter("from", getRequest.getFrom().toString());
}
if (getRequest.getSize() != null) {
request.addParameter("size", getRequest.getSize().toString());
}
return request;
}
static Request deleteDataFrameTransform(DeleteDataFrameTransformRequest request) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_frame", "transforms")

View File

@ -0,0 +1,96 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.dataframe;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
public class GetDataFrameTransformRequest implements Validatable {
private final List<String> ids;
private Integer from;
private Integer size;
/**
* Helper method to create a request that will get ALL Data Frame Transforms
* @return new {@link GetDataFrameTransformRequest} object for the id "_all"
*/
public static GetDataFrameTransformRequest getAllDataFrameTransformsRequest() {
return new GetDataFrameTransformRequest("_all");
}
public GetDataFrameTransformRequest(String... ids) {
this.ids = Arrays.asList(ids);
}
public List<String> getId() {
return ids;
}
public Integer getFrom() {
return from;
}
public void setFrom(Integer from) {
this.from = from;
}
public Integer getSize() {
return size;
}
public void setSize(Integer size) {
this.size = size;
}
@Override
public Optional<ValidationException> validate() {
if (ids == null || ids.isEmpty()) {
ValidationException validationException = new ValidationException();
validationException.addValidationError("data frame transform id must not be null");
return Optional.of(validationException);
} else {
return Optional.empty();
}
}
@Override
public int hashCode() {
return Objects.hash(ids);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
GetDataFrameTransformRequest other = (GetDataFrameTransformRequest) obj;
return Objects.equals(ids, other.ids);
}
}

View File

@ -0,0 +1,142 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.dataframe;
import org.elasticsearch.client.dataframe.transforms.DataFrameTransformConfig;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class GetDataFrameTransformResponse {
public static final ParseField TRANSFORMS = new ParseField("transforms");
public static final ParseField INVALID_TRANSFORMS = new ParseField("invalid_transforms");
public static final ParseField COUNT = new ParseField("count");
@SuppressWarnings("unchecked")
static final ConstructingObjectParser<InvalidTransforms, Void> INVALID_TRANSFORMS_PARSER =
new ConstructingObjectParser<>("invalid_transforms", true, args -> new InvalidTransforms((List<String>) args[0]));
@SuppressWarnings("unchecked")
static final ConstructingObjectParser<GetDataFrameTransformResponse, Void> PARSER = new ConstructingObjectParser<>(
"get_data_frame_transform", true, args -> new GetDataFrameTransformResponse(
(List<DataFrameTransformConfig>) args[0], (int) args[1], (InvalidTransforms) args[2]));
static {
// Discard the count field which is the size of the transforms array
INVALID_TRANSFORMS_PARSER.declareInt((a, b) -> {}, COUNT);
INVALID_TRANSFORMS_PARSER.declareStringArray(constructorArg(), TRANSFORMS);
PARSER.declareObjectArray(constructorArg(), DataFrameTransformConfig.PARSER::apply, TRANSFORMS);
PARSER.declareInt(constructorArg(), COUNT);
PARSER.declareObject(optionalConstructorArg(), INVALID_TRANSFORMS_PARSER::apply, INVALID_TRANSFORMS);
}
public static GetDataFrameTransformResponse fromXContent(final XContentParser parser) {
return GetDataFrameTransformResponse.PARSER.apply(parser, null);
}
private List<DataFrameTransformConfig> transformConfigurations;
private int count;
private InvalidTransforms invalidTransforms;
public GetDataFrameTransformResponse(List<DataFrameTransformConfig> transformConfigurations,
int count,
@Nullable InvalidTransforms invalidTransforms) {
this.transformConfigurations = transformConfigurations;
this.count = count;
this.invalidTransforms = invalidTransforms;
}
@Nullable
public InvalidTransforms getInvalidTransforms() {
return invalidTransforms;
}
public int getCount() {
return count;
}
public List<DataFrameTransformConfig> getTransformConfigurations() {
return transformConfigurations;
}
@Override
public int hashCode() {
return Objects.hash(transformConfigurations, count, invalidTransforms);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final GetDataFrameTransformResponse that = (GetDataFrameTransformResponse) other;
return Objects.equals(this.transformConfigurations, that.transformConfigurations)
&& Objects.equals(this.count, that.count)
&& Objects.equals(this.invalidTransforms, that.invalidTransforms);
}
static class InvalidTransforms {
private final List<String> transformIds;
InvalidTransforms(List<String> transformIds) {
this.transformIds = transformIds;
}
public int getCount() {
return transformIds.size();
}
public List<String> getTransformIds() {
return transformIds;
}
@Override
public int hashCode() {
return Objects.hash(transformIds);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final InvalidTransforms that = (InvalidTransforms) other;
return Objects.equals(this.transformIds, that.transformIds);
}
}
}

View File

@ -46,7 +46,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
private final DestConfig dest;
private final PivotConfig pivotConfig;
public static final ConstructingObjectParser<DataFrameTransformConfig, String> PARSER =
public static final ConstructingObjectParser<DataFrameTransformConfig, Void> PARSER =
new ConstructingObjectParser<>("data_frame_transform", true,
(args) -> {
String id = (String) args[0];

View File

@ -24,6 +24,7 @@ import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsRequest;
import org.elasticsearch.client.dataframe.PreviewDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.PutDataFrameTransformRequest;
@ -147,4 +148,29 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/foo/_stats"));
}
public void testGetDataFrameTransform() {
GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest("bar");
Request request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/bar"));
assertFalse(request.getParameters().containsKey("from"));
assertFalse(request.getParameters().containsKey("size"));
getRequest.setFrom(0);
getRequest.setSize(10);
request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
assertEquals("0", request.getParameters().get("from"));
assertEquals("10", request.getParameters().get("size"));
}
public void testGetDataFrameTransform_givenMulitpleIds() {
GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest("foo", "bar", "baz");
Request request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/foo,bar,baz"));
}
}

View File

@ -27,6 +27,8 @@ import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.client.core.IndexerState;
import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformResponse;
import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsResponse;
import org.elasticsearch.client.dataframe.PreviewDataFrameTransformRequest;
@ -52,6 +54,7 @@ import org.elasticsearch.client.indices.CreateIndexResponse;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.junit.After;
@ -67,6 +70,7 @@ import java.util.Optional;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
@ -153,16 +157,8 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
String sourceIndex = "transform-source";
createIndex(sourceIndex);
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = new GroupConfig(Collections.singletonMap("reviewer", new TermsGroupSource("user_id")));
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);
String id = "test-crud";
DataFrameTransformConfig transform = new DataFrameTransformConfig(id,
new SourceConfig(new String[]{sourceIndex}, queryConfig), new DestConfig("pivot-dest"), pivotConfig);
DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
DataFrameClient client = highLevelClient().dataFrame();
AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform,
@ -180,20 +176,78 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
assertThat(deleteError.getMessage(), containsString("Transform with id [test-crud] could not be found"));
}
public void testGetTransform() throws IOException {
String sourceIndex = "transform-source";
createIndex(sourceIndex);
String id = "test-get";
DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
DataFrameClient client = highLevelClient().dataFrame();
AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform,
client::putDataFrameTransformAsync);
assertTrue(ack.isAcknowledged());
GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest(id);
GetDataFrameTransformResponse getResponse = execute(getRequest, client::getDataFrameTransform,
client::getDataFrameTransformAsync);
assertNull(getResponse.getInvalidTransforms());
assertThat(getResponse.getTransformConfigurations(), hasSize(1));
assertEquals(transform, getResponse.getTransformConfigurations().get(0));
}
public void testGetAllAndPageTransforms() throws IOException {
String sourceIndex = "transform-source";
createIndex(sourceIndex);
DataFrameClient client = highLevelClient().dataFrame();
DataFrameTransformConfig transform = validDataFrameTransformConfig("test-get-all-1", sourceIndex, "pivot-dest-1");
AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform,
client::putDataFrameTransformAsync);
assertTrue(ack.isAcknowledged());
transform = validDataFrameTransformConfig("test-get-all-2", sourceIndex, "pivot-dest-2");
ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform,
client::putDataFrameTransformAsync);
assertTrue(ack.isAcknowledged());
GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest("_all");
GetDataFrameTransformResponse getResponse = execute(getRequest, client::getDataFrameTransform,
client::getDataFrameTransformAsync);
assertNull(getResponse.getInvalidTransforms());
assertThat(getResponse.getTransformConfigurations(), hasSize(2));
assertEquals(transform, getResponse.getTransformConfigurations().get(1));
getRequest.setFrom(0);
getRequest.setSize(1);
getResponse = execute(getRequest, client::getDataFrameTransform,
client::getDataFrameTransformAsync);
assertNull(getResponse.getInvalidTransforms());
assertThat(getResponse.getTransformConfigurations(), hasSize(1));
GetDataFrameTransformRequest getMulitple = new GetDataFrameTransformRequest("test-get-all-1", "test-get-all-2");
getResponse = execute(getMulitple, client::getDataFrameTransform,
client::getDataFrameTransformAsync);
assertNull(getResponse.getInvalidTransforms());
assertThat(getResponse.getTransformConfigurations(), hasSize(2));
}
public void testGetMissingTransform() {
DataFrameClient client = highLevelClient().dataFrame();
ElasticsearchStatusException missingError = expectThrows(ElasticsearchStatusException.class,
() -> execute(new GetDataFrameTransformRequest("unknown"), client::getDataFrameTransform,
client::getDataFrameTransformAsync));
assertThat(missingError.status(), equalTo(RestStatus.NOT_FOUND));
}
public void testStartStop() throws IOException {
String sourceIndex = "transform-source";
createIndex(sourceIndex);
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = new GroupConfig(Collections.singletonMap("reviewer", new TermsGroupSource("user_id")));
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);
String id = "test-stop-start";
DataFrameTransformConfig transform = new DataFrameTransformConfig(id,
new SourceConfig(new String[]{sourceIndex}, queryConfig), new DestConfig("pivot-dest"), pivotConfig);
DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
DataFrameClient client = highLevelClient().dataFrame();
AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform,
@ -226,15 +280,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
createIndex(sourceIndex);
indexData(sourceIndex);
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = new GroupConfig(Collections.singletonMap("reviewer", new TermsGroupSource("user_id")));
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);
DataFrameTransformConfig transform = new DataFrameTransformConfig("test-preview",
new SourceConfig(new String[]{sourceIndex}, queryConfig), null, pivotConfig);
DataFrameTransformConfig transform = validDataFrameTransformConfig("test-preview", sourceIndex, null);
DataFrameClient client = highLevelClient().dataFrame();
PreviewDataFrameTransformResponse preview = execute(new PreviewDataFrameTransformRequest(transform),
@ -245,11 +291,27 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
assertThat(docs, hasSize(2));
Optional<Map<String, Object>> theresa = docs.stream().filter(doc -> "theresa".equals(doc.get("reviewer"))).findFirst();
assertTrue(theresa.isPresent());
assertEquals(2.5d, (double)theresa.get().get("avg_rating"), 0.01d);
assertEquals(2.5d, (double) theresa.get().get("avg_rating"), 0.01d);
Optional<Map<String, Object>> michel = docs.stream().filter(doc -> "michel".equals(doc.get("reviewer"))).findFirst();
assertTrue(michel.isPresent());
assertEquals(3.6d, (double)michel.get().get("avg_rating"), 0.1d);
assertEquals(3.6d, (double) michel.get().get("avg_rating"), 0.1d);
}
private DataFrameTransformConfig validDataFrameTransformConfig(String id, String source, String destination) {
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = new GroupConfig(Collections.singletonMap("reviewer", new TermsGroupSource("user_id")));
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);
DestConfig destConfig = (destination != null) ? new DestConfig(destination) : null;
return new DataFrameTransformConfig(id,
new SourceConfig(new String[]{source}, queryConfig),
destConfig,
pivotConfig);
}
public void testGetStats() throws Exception {

View File

@ -34,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.ingest.Pipeline;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.junit.AfterClass;
import org.junit.Before;
@ -130,7 +131,7 @@ public abstract class ESRestHighLevelClientTestCase extends ESRestTestCase {
private static class HighLevelClient extends RestHighLevelClient {
private HighLevelClient(RestClient restClient) {
super(restClient, (client) -> {}, Collections.emptyList());
super(restClient, (client) -> {}, new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedXContents());
}
}

View File

@ -0,0 +1,32 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.dataframe;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.Matchers.containsString;
public class GetDataFrameTransformRequestTests extends ESTestCase {
public void testValidate() {
assertFalse(new GetDataFrameTransformRequest("valid-id").validate().isPresent());
assertThat(new GetDataFrameTransformRequest(new String[0]).validate().get().getMessage(),
containsString("data frame transform id must not be null"));
}
}

View File

@ -0,0 +1,84 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.dataframe;
import org.elasticsearch.client.dataframe.transforms.DataFrameTransformConfig;
import org.elasticsearch.client.dataframe.transforms.DataFrameTransformConfigTests;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
public class GetDataFrameTransformResponseTests extends ESTestCase {
public void testXContentParser() throws IOException {
xContentTester(this::createParser,
GetDataFrameTransformResponseTests::createTestInstance,
GetDataFrameTransformResponseTests::toXContent,
GetDataFrameTransformResponse::fromXContent)
.supportsUnknownFields(false)
.test();
}
private static GetDataFrameTransformResponse createTestInstance() {
int numTransforms = randomIntBetween(0, 3);
List<DataFrameTransformConfig> transforms = new ArrayList<>();
for (int i=0; i<numTransforms; i++) {
transforms.add(DataFrameTransformConfigTests.randomDataFrameTransformConfig());
}
GetDataFrameTransformResponse.InvalidTransforms invalidTransforms = null;
if (randomBoolean()) {
List<String> invalidIds = Arrays.asList(generateRandomStringArray(5, 6, false, false));
invalidTransforms = new GetDataFrameTransformResponse.InvalidTransforms(invalidIds);
}
return new GetDataFrameTransformResponse(transforms, transforms.size() + 10, invalidTransforms);
}
private static void toXContent(GetDataFrameTransformResponse response, XContentBuilder builder) throws IOException {
builder.startObject();
{
builder.field("count", response.getCount());
builder.field("transforms", response.getTransformConfigurations());
if (response.getInvalidTransforms() != null) {
builder.startObject("invalid_transforms");
builder.field("count", response.getInvalidTransforms().getCount());
builder.field("transforms", response.getInvalidTransforms().getTransformIds());
builder.endObject();
}
}
builder.endObject();
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
}

View File

@ -27,6 +27,8 @@ import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.client.core.IndexerState;
import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformResponse;
import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsRequest;
import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsResponse;
import org.elasticsearch.client.dataframe.PreviewDataFrameTransformRequest;
@ -178,7 +180,6 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
ActionListener<AcknowledgedResponse> ackListener = listener;
listener = new LatchedActionListener<>(listener, latch);
// tag::put-data-frame-transform-execute-async
@ -264,7 +265,6 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
ActionListener<StartDataFrameTransformResponse> ackListener = listener;
listener = new LatchedActionListener<>(listener, latch);
StartDataFrameTransformRequest request = new StartDataFrameTransformRequest("mega-transform");
@ -294,7 +294,6 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
ActionListener<StopDataFrameTransformResponse> ackListener = listener;
listener = new LatchedActionListener<>(listener, latch);
StopDataFrameTransformRequest request = new StopDataFrameTransformRequest("mega-transform");
@ -392,14 +391,14 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
pivotConfig);
PreviewDataFrameTransformRequest request =
new PreviewDataFrameTransformRequest(transformConfig); // <3>
new PreviewDataFrameTransformRequest(transformConfig); // <3>
// end::preview-data-frame-transform-request
{
// tag::preview-data-frame-transform-execute
PreviewDataFrameTransformResponse response =
client.dataFrame()
.previewDataFrameTransform(request, RequestOptions.DEFAULT);
client.dataFrame()
.previewDataFrameTransform(request, RequestOptions.DEFAULT);
// end::preview-data-frame-transform-execute
assertNotNull(response.getDocs());
@ -482,18 +481,18 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
{
// tag::get-data-frame-transform-stats-execute-listener
ActionListener<GetDataFrameTransformStatsResponse> listener =
new ActionListener<GetDataFrameTransformStatsResponse>() {
@Override
public void onResponse(
GetDataFrameTransformStatsResponse response) {
// <1>
}
new ActionListener<GetDataFrameTransformStatsResponse>() {
@Override
public void onResponse(
GetDataFrameTransformStatsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-data-frame-transform-stats-execute-listener
// Replace the empty listener by a blocking listener in test
@ -508,4 +507,79 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetDataFrameTransform() throws IOException, InterruptedException {
createIndex("source-data");
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = new GroupConfig(Collections.singletonMap("reviewer", new TermsGroupSource("user_id")));
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);
DataFrameTransformConfig putTransformConfig = new DataFrameTransformConfig("mega-transform",
new SourceConfig(new String[]{"source-data"}, queryConfig),
new DestConfig("pivot-dest"), pivotConfig);
RestHighLevelClient client = highLevelClient();
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(putTransformConfig), RequestOptions.DEFAULT);
transformsToClean.add(putTransformConfig.getId());
{
// tag::get-data-frame-transform-request
GetDataFrameTransformRequest request =
new GetDataFrameTransformRequest("mega-transform"); // <1>
// end::get-data-frame-transform-request
// tag::get-data-frame-transform-request-options
request.setFrom(0); // <1>
request.setSize(100); // <2>
// end::get-data-frame-transform-request-options
// tag::get-data-frame-transform-execute
GetDataFrameTransformResponse response =
client.dataFrame()
.getDataFrameTransform(request, RequestOptions.DEFAULT);
// end::get-data-frame-transform-execute
// tag::get-data-frame-transform-response
List<DataFrameTransformConfig> transformConfigs =
response.getTransformConfigurations();
// end::get-data-frame-transform-response
assertEquals(1, transformConfigs.size());
}
{
// tag::get-data-frame-transform-execute-listener
ActionListener<GetDataFrameTransformResponse> listener =
new ActionListener<GetDataFrameTransformResponse>() {
@Override
public void onResponse(GetDataFrameTransformResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-data-frame-transform-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
GetDataFrameTransformRequest request = new GetDataFrameTransformRequest("mega-transform");
// tag::get-data-frame-transform-execute-async
client.dataFrame().getDataFrameTransformAsync(
request, RequestOptions.DEFAULT, listener); // <1>
// end::get-data-frame-transform-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
}

View File

@ -0,0 +1,45 @@
--
:api: get-data-frame-transform
:request: GetDataFrameTransformRequest
:response: GetDataFrameTransformResponse
--
[id="{upid}-{api}"]
=== Get Data Frame Transform API
The Get Data Frame Transform API is used get one or more {dataframe-transform}.
The API accepts a +{request}+ object and returns a +{response}+.
[id="{upid}-{api}-request"]
==== Get Data Frame Request
A +{request}+ requires either a data frame transform id, a comma separated list of ids or
the special wildcard `_all` to get all {dataframe-transform}s
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
--------------------------------------------------
<1> Constructing a new GET request referencing an existing {dataframe-transform}
==== Optional Arguments
The following arguments are optional.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request-options]
--------------------------------------------------
<1> Page {dataframe-transform}s starting from this value
<2> Return at most `size` {dataframe-transform}s
include::../execution.asciidoc[]
[id="{upid}-{api}-response"]
==== Response
The returned +{response}+ contains the requested {dataframe-transform}s.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-response]
--------------------------------------------------

View File

@ -554,6 +554,7 @@ include::ilm/remove_lifecycle_policy_from_index.asciidoc[]
The Java High Level REST Client supports the following Data Frame APIs:
* <<{upid}-get-data-frame-transform>>
* <<{upid}-get-data-frame-transform-stats>>
* <<{upid}-put-data-frame-transform>>
* <<{upid}-delete-data-frame-transform>>
@ -561,6 +562,7 @@ The Java High Level REST Client supports the following Data Frame APIs:
* <<{upid}-start-data-frame-transform>>
* <<{upid}-stop-data-frame-transform>>
include::dataframe/get_data_frame.asciidoc[]
include::dataframe/get_data_frame_stats.asciidoc[]
include::dataframe/put_data_frame.asciidoc[]
include::dataframe/delete_data_frame.asciidoc[]