[PURIFY] remove all trace of x-pack transforms (#31)

This commit removes all trace of Elastic licensed transforms.

Signed-off-by: Peter Nied <petern@amazon.com>
This commit is contained in:
Nick Knize 2021-02-02 17:03:58 -06:00 committed by Peter Nied
parent 3f168ac85c
commit d91ce2bd00
96 changed files with 1 additions and 11251 deletions

View File

@ -260,7 +260,6 @@ public class RestHighLevelClient implements Closeable {
private final IngestClient ingestClient = new IngestClient(this);
private final SnapshotClient snapshotClient = new SnapshotClient(this);
private final TasksClient tasksClient = new TasksClient(this);
private final TransformClient transformClient = new TransformClient(this);
/**
* Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the
@ -351,20 +350,6 @@ public class RestHighLevelClient implements Closeable {
return tasksClient;
}
/**
* Provides methods for accessing the Elastic Licensed Data Frame APIs that
* are shipped with the Elastic Stack distribution of Elasticsearch. All of
* these APIs will 404 if run against the OSS distribution of Elasticsearch.
* <p>
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/transform-apis.html">
* Transform APIs on elastic.co</a> for more information.
*
* @return the client wrapper for making Data Frame API calls
*/
public TransformClient transform() {
return transformClient;
}
/**
* Executes a bulk request using the Bulk API.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html">Bulk API on elastic.co</a>

View File

@ -1,388 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.client.transform.DeleteTransformRequest;
import org.elasticsearch.client.transform.GetTransformRequest;
import org.elasticsearch.client.transform.GetTransformResponse;
import org.elasticsearch.client.transform.GetTransformStatsRequest;
import org.elasticsearch.client.transform.GetTransformStatsResponse;
import org.elasticsearch.client.transform.PreviewTransformRequest;
import org.elasticsearch.client.transform.PreviewTransformResponse;
import org.elasticsearch.client.transform.PutTransformRequest;
import org.elasticsearch.client.transform.StartTransformRequest;
import org.elasticsearch.client.transform.StartTransformResponse;
import org.elasticsearch.client.transform.StopTransformRequest;
import org.elasticsearch.client.transform.StopTransformResponse;
import org.elasticsearch.client.transform.UpdateTransformRequest;
import org.elasticsearch.client.transform.UpdateTransformResponse;
import java.io.IOException;
import java.util.Collections;
public final class TransformClient {
private final RestHighLevelClient restHighLevelClient;
TransformClient(RestHighLevelClient restHighLevelClient) {
this.restHighLevelClient = restHighLevelClient;
}
/**
* Creates a new transform
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/put-transform.html">
* Create transform documentation</a>
*
* @param request The PutTransformRequest containing the
* {@link org.elasticsearch.client.transform.transforms.TransformConfig}.
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return An AcknowledgedResponse object indicating request success
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public AcknowledgedResponse putTransform(PutTransformRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
TransformRequestConverters::putTransform,
options,
AcknowledgedResponse::fromXContent,
Collections.emptySet());
}
/**
* Creates a new transform asynchronously and notifies listener on completion
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/put-transform.html">
* Create transform documentation</a>
* @param request The PutTransformRequest containing the
* {@link org.elasticsearch.client.transform.transforms.TransformConfig}.
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable putTransformAsync(PutTransformRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
TransformRequestConverters::putTransform,
options,
AcknowledgedResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Updates an existing transform
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/update-transform.html">
* Create transform documentation</a>
*
* @param request The UpdateTransformRequest containing the
* {@link org.elasticsearch.client.transform.transforms.TransformConfigUpdate}.
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return An UpdateTransformResponse object containing the updated configuration
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public UpdateTransformResponse updateTransform(UpdateTransformRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
TransformRequestConverters::updateTransform,
options,
UpdateTransformResponse::fromXContent,
Collections.emptySet());
}
/**
* Updates an existing transform asynchronously and notifies listener on completion
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/update-transform.html">
* Create transform documentation</a>
* @param request The UpdateTransformRequest containing the
* {@link org.elasticsearch.client.transform.transforms.TransformConfigUpdate}.
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable updateTransformAsync(UpdateTransformRequest request,
RequestOptions options,
ActionListener<UpdateTransformResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
TransformRequestConverters::updateTransform,
options,
UpdateTransformResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Get the running statistics of a transform
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-stats.html">
* Get transform stats documentation</a>
*
* @param request Specifies which transforms to get the stats for
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return The transform stats
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public GetTransformStatsResponse getTransformStats(GetTransformStatsRequest request, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
TransformRequestConverters::getTransformStats,
options,
GetTransformStatsResponse::fromXContent,
Collections.emptySet());
}
/**
* Get the running statistics of a transform asynchronously and notifies listener on completion
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-stats.html">
* Get transform stats documentation</a>
* @param request Specifies which transforms to get the stats for
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable getTransformStatsAsync(GetTransformStatsRequest request, RequestOptions options,
ActionListener<GetTransformStatsResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
TransformRequestConverters::getTransformStats,
options,
GetTransformStatsResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Delete a transform
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-transform.html">
* Delete transform documentation</a>
*
* @param request The delete transform request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return An AcknowledgedResponse object indicating request success
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public AcknowledgedResponse deleteTransform(DeleteTransformRequest request, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
TransformRequestConverters::deleteTransform,
options,
AcknowledgedResponse::fromXContent,
Collections.emptySet());
}
/**
* Delete a transform asynchronously and notifies listener on completion
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-transform.html">
* Delete transform documentation</a>
* @param request The delete transform request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable deleteTransformAsync(DeleteTransformRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
TransformRequestConverters::deleteTransform,
options,
AcknowledgedResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Preview the result of a transform
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/preview-transform.html">
* Preview transform documentation</a>
*
* @param request The preview transform request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return A response containing the results of the applied transform
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public PreviewTransformResponse previewTransform(PreviewTransformRequest request, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
TransformRequestConverters::previewTransform,
options,
PreviewTransformResponse::fromXContent,
Collections.emptySet());
}
/**
* Preview the result of a transform asynchronously and notifies listener on completion
* <p>
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/preview-transform.html">
* Preview transform documentation</a>
* @param request The preview transform request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable previewTransformAsync(PreviewTransformRequest request, RequestOptions options,
ActionListener<PreviewTransformResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
TransformRequestConverters::previewTransform,
options,
PreviewTransformResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Start a transform
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/start-transform.html">
* Start transform documentation</a>
*
* @param request The start transform request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return A response object indicating request success
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public StartTransformResponse startTransform(StartTransformRequest request, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
TransformRequestConverters::startTransform,
options,
StartTransformResponse::fromXContent,
Collections.emptySet());
}
/**
* Start a transform asynchronously and notifies listener on completion
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/start-transform.html">
* Start transform documentation</a>
* @param request The start transform request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable startTransformAsync(StartTransformRequest request, RequestOptions options,
ActionListener<StartTransformResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
TransformRequestConverters::startTransform,
options,
StartTransformResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Stop a transform
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-transform.html">
* Stop transform documentation</a>
*
* @param request The stop transform request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return A response object indicating request success
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public StopTransformResponse stopTransform(StopTransformRequest request, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
TransformRequestConverters::stopTransform,
options,
StopTransformResponse::fromXContent,
Collections.emptySet());
}
/**
* Stop a transform asynchronously and notifies listener on completion
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-transform.html">
* Stop transform documentation</a>
* @param request The stop transform request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable stopTransformAsync(StopTransformRequest request, RequestOptions options,
ActionListener<StopTransformResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
TransformRequestConverters::stopTransform,
options,
StopTransformResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Get one or more transform configurations
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform.html">
* Get transform documentation</a>
*
* @param request The get transform request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return An GetTransformResponse containing the requested transforms
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public GetTransformResponse getTransform(GetTransformRequest request, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
TransformRequestConverters::getTransform,
options,
GetTransformResponse::fromXContent,
Collections.emptySet());
}
/**
* Get one or more transform configurations asynchronously and notifies listener on completion
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform.html">
* Get data transform documentation</a>
* @param request The get transform request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable getTransformAsync(GetTransformRequest request, RequestOptions options,
ActionListener<GetTransformResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
TransformRequestConverters::getTransform,
options,
GetTransformResponse::fromXContent,
listener,
Collections.emptySet());
}
}

View File

@ -1,173 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.transform.DeleteTransformRequest;
import org.elasticsearch.client.transform.GetTransformRequest;
import org.elasticsearch.client.transform.GetTransformStatsRequest;
import org.elasticsearch.client.transform.PreviewTransformRequest;
import org.elasticsearch.client.transform.PutTransformRequest;
import org.elasticsearch.client.transform.StartTransformRequest;
import org.elasticsearch.client.transform.StopTransformRequest;
import org.elasticsearch.client.transform.UpdateTransformRequest;
import org.elasticsearch.common.Strings;
import java.io.IOException;
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
import static org.elasticsearch.client.RequestConverters.createEntity;
import static org.elasticsearch.client.transform.DeleteTransformRequest.FORCE;
import static org.elasticsearch.client.transform.GetTransformRequest.ALLOW_NO_MATCH;
import static org.elasticsearch.client.transform.PutTransformRequest.DEFER_VALIDATION;
import static org.elasticsearch.client.transform.StopTransformRequest.WAIT_FOR_CHECKPOINT;
final class TransformRequestConverters {
private TransformRequestConverters() {}
static Request putTransform(PutTransformRequest putRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_transform")
.addPathPart(putRequest.getConfig().getId())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
request.setEntity(createEntity(putRequest, REQUEST_BODY_CONTENT_TYPE));
if (putRequest.getDeferValidation() != null) {
request.addParameter(DEFER_VALIDATION, Boolean.toString(putRequest.getDeferValidation()));
}
return request;
}
static Request updateTransform(UpdateTransformRequest updateDataFrameTransformRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_transform")
.addPathPart(updateDataFrameTransformRequest.getId())
.addPathPart("_update")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
request.setEntity(createEntity(updateDataFrameTransformRequest, REQUEST_BODY_CONTENT_TYPE));
if (updateDataFrameTransformRequest.getDeferValidation() != null) {
request.addParameter(DEFER_VALIDATION, Boolean.toString(updateDataFrameTransformRequest.getDeferValidation()));
}
return request;
}
static Request getTransform(GetTransformRequest getRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_transform")
.addPathPart(Strings.collectionToCommaDelimitedString(getRequest.getId()))
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
if (getRequest.getPageParams() != null && getRequest.getPageParams().getFrom() != null) {
request.addParameter(PageParams.FROM.getPreferredName(), getRequest.getPageParams().getFrom().toString());
}
if (getRequest.getPageParams() != null && getRequest.getPageParams().getSize() != null) {
request.addParameter(PageParams.SIZE.getPreferredName(), getRequest.getPageParams().getSize().toString());
}
if (getRequest.getAllowNoMatch() != null) {
request.addParameter(ALLOW_NO_MATCH, getRequest.getAllowNoMatch().toString());
}
return request;
}
static Request deleteTransform(DeleteTransformRequest deleteRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_transform")
.addPathPart(deleteRequest.getId())
.build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
if (deleteRequest.getForce() != null) {
request.addParameter(FORCE, Boolean.toString(deleteRequest.getForce()));
}
return request;
}
static Request startTransform(StartTransformRequest startRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_transform")
.addPathPart(startRequest.getId())
.addPathPartAsIs("_start")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
if (startRequest.getTimeout() != null) {
params.withTimeout(startRequest.getTimeout());
}
request.addParameters(params.asMap());
return request;
}
static Request stopTransform(StopTransformRequest stopRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_transform")
.addPathPart(stopRequest.getId())
.addPathPartAsIs("_stop")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
if (stopRequest.getWaitForCompletion() != null) {
params.withWaitForCompletion(stopRequest.getWaitForCompletion());
}
if (stopRequest.getTimeout() != null) {
params.withTimeout(stopRequest.getTimeout());
}
if (stopRequest.getAllowNoMatch() != null) {
request.addParameter(ALLOW_NO_MATCH, stopRequest.getAllowNoMatch().toString());
}
if (stopRequest.getWaitForCheckpoint() != null) {
request.addParameter(WAIT_FOR_CHECKPOINT, stopRequest.getWaitForCheckpoint().toString());
}
request.addParameters(params.asMap());
return request;
}
static Request previewTransform(PreviewTransformRequest previewRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_transform", "_preview")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
request.setEntity(createEntity(previewRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request getTransformStats(GetTransformStatsRequest statsRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_transform")
.addPathPart(statsRequest.getId())
.addPathPartAsIs("_stats")
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
if (statsRequest.getPageParams() != null && statsRequest.getPageParams().getFrom() != null) {
request.addParameter(PageParams.FROM.getPreferredName(), statsRequest.getPageParams().getFrom().toString());
}
if (statsRequest.getPageParams() != null && statsRequest.getPageParams().getSize() != null) {
request.addParameter(PageParams.SIZE.getPreferredName(), statsRequest.getPageParams().getSize().toString());
}
if (statsRequest.getAllowNoMatch() != null) {
request.addParameter(ALLOW_NO_MATCH, statsRequest.getAllowNoMatch().toString());
}
return request;
}
}

View File

@ -1,98 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.TriFunction;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class AcknowledgedTasksResponse {
public static final ParseField TASK_FAILURES = new ParseField("task_failures");
public static final ParseField NODE_FAILURES = new ParseField("node_failures");
@SuppressWarnings("unchecked")
protected static <T extends AcknowledgedTasksResponse> ConstructingObjectParser<T, Void> generateParser(
String name,
TriFunction<Boolean, List<TaskOperationFailure>, List<? extends ElasticsearchException>, T> ctor,
String ackFieldName) {
ConstructingObjectParser<T, Void> parser = new ConstructingObjectParser<>(name, true,
args -> ctor.apply((boolean) args[0], (List<TaskOperationFailure>) args[1], (List<ElasticsearchException>) args[2]));
parser.declareBoolean(constructorArg(), new ParseField(ackFieldName));
parser.declareObjectArray(optionalConstructorArg(), (p, c) -> TaskOperationFailure.fromXContent(p), TASK_FAILURES);
parser.declareObjectArray(optionalConstructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), NODE_FAILURES);
return parser;
}
private boolean acknowledged;
private List<TaskOperationFailure> taskFailures;
private List<ElasticsearchException> nodeFailures;
public AcknowledgedTasksResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
@Nullable List<? extends ElasticsearchException> nodeFailures) {
this.acknowledged = acknowledged;
this.taskFailures = taskFailures == null ? Collections.emptyList() : Collections.unmodifiableList(taskFailures);
this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(nodeFailures);
}
public boolean isAcknowledged() {
return acknowledged;
}
public List<TaskOperationFailure> getTaskFailures() {
return taskFailures;
}
public List<ElasticsearchException> getNodeFailures() {
return nodeFailures;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
AcknowledgedTasksResponse other = (AcknowledgedTasksResponse) obj;
return acknowledged == other.acknowledged
&& taskFailures.equals(other.taskFailures)
&& nodeFailures.equals(other.nodeFailures);
}
@Override
public int hashCode() {
return Objects.hash(acknowledged, taskFailures, nodeFailures);
}
}

View File

@ -1,83 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import java.util.Objects;
import java.util.Optional;
/**
* Request to delete a transform
*/
public class DeleteTransformRequest implements Validatable {
public static final String FORCE = "force";
private final String id;
private Boolean force;
public DeleteTransformRequest(String id) {
this.id = id;
}
public String getId() {
return id;
}
public Boolean getForce() {
return force;
}
public void setForce(boolean force) {
this.force = force;
}
@Override
public Optional<ValidationException> validate() {
if (id == null) {
ValidationException validationException = new ValidationException();
validationException.addValidationError("transform id must not be null");
return Optional.of(validationException);
} else {
return Optional.empty();
}
}
@Override
public int hashCode() {
return Objects.hash(id, force);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
DeleteTransformRequest other = (DeleteTransformRequest) obj;
return Objects.equals(id, other.id) && Objects.equals(force, other.force);
}
}

View File

@ -1,100 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.core.PageParams;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
public class GetTransformRequest implements Validatable {
public static final String ALLOW_NO_MATCH = "allow_no_match";
/**
* Helper method to create a request that will get ALL Transforms
* @return new {@link GetTransformRequest} object for the id "_all"
*/
public static GetTransformRequest getAllTransformRequest() {
return new GetTransformRequest("_all");
}
private final List<String> ids;
private PageParams pageParams;
private Boolean allowNoMatch;
public GetTransformRequest(String... ids) {
this.ids = Arrays.asList(ids);
}
public List<String> getId() {
return ids;
}
public PageParams getPageParams() {
return pageParams;
}
public void setPageParams(PageParams pageParams) {
this.pageParams = pageParams;
}
public Boolean getAllowNoMatch() {
return allowNoMatch;
}
public void setAllowNoMatch(Boolean allowNoMatch) {
this.allowNoMatch = allowNoMatch;
}
@Override
public Optional<ValidationException> validate() {
if (ids == null || ids.isEmpty()) {
ValidationException validationException = new ValidationException();
validationException.addValidationError("transform id must not be null");
return Optional.of(validationException);
} else {
return Optional.empty();
}
}
@Override
public int hashCode() {
return Objects.hash(ids, pageParams, allowNoMatch);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
GetTransformRequest other = (GetTransformRequest) obj;
return Objects.equals(ids, other.ids)
&& Objects.equals(pageParams, other.pageParams)
&& Objects.equals(allowNoMatch, other.allowNoMatch);
}
}

View File

@ -1,145 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class GetTransformResponse {
public static final ParseField TRANSFORMS = new ParseField("transforms");
public static final ParseField INVALID_TRANSFORMS = new ParseField("invalid_transforms");
public static final ParseField COUNT = new ParseField("count");
@SuppressWarnings("unchecked")
static final ConstructingObjectParser<InvalidTransforms, Void> INVALID_TRANSFORMS_PARSER = new ConstructingObjectParser<>(
"invalid_transforms",
true,
args -> new InvalidTransforms((List<String>) args[0])
);
@SuppressWarnings("unchecked")
static final ConstructingObjectParser<GetTransformResponse, Void> PARSER = new ConstructingObjectParser<>(
"get_transform",
true,
args -> new GetTransformResponse((List<TransformConfig>) args[0], (long) args[1], (InvalidTransforms) args[2])
);
static {
// Discard the count field which is the size of the transforms array
INVALID_TRANSFORMS_PARSER.declareLong((a, b) -> {}, COUNT);
INVALID_TRANSFORMS_PARSER.declareStringArray(constructorArg(), TRANSFORMS);
PARSER.declareObjectArray(constructorArg(), TransformConfig.PARSER::apply, TRANSFORMS);
PARSER.declareLong(constructorArg(), COUNT);
PARSER.declareObject(optionalConstructorArg(), INVALID_TRANSFORMS_PARSER::apply, INVALID_TRANSFORMS);
}
public static GetTransformResponse fromXContent(final XContentParser parser) {
return GetTransformResponse.PARSER.apply(parser, null);
}
private List<TransformConfig> transformConfigurations;
private long count;
private InvalidTransforms invalidTransforms;
public GetTransformResponse(List<TransformConfig> transformConfigurations, long count, @Nullable InvalidTransforms invalidTransforms) {
this.transformConfigurations = transformConfigurations;
this.count = count;
this.invalidTransforms = invalidTransforms;
}
@Nullable
public InvalidTransforms getInvalidTransforms() {
return invalidTransforms;
}
public long getCount() {
return count;
}
public List<TransformConfig> getTransformConfigurations() {
return transformConfigurations;
}
@Override
public int hashCode() {
return Objects.hash(transformConfigurations, count, invalidTransforms);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final GetTransformResponse that = (GetTransformResponse) other;
return Objects.equals(this.transformConfigurations, that.transformConfigurations)
&& Objects.equals(this.count, that.count)
&& Objects.equals(this.invalidTransforms, that.invalidTransforms);
}
static class InvalidTransforms {
private final List<String> transformIds;
InvalidTransforms(List<String> transformIds) {
this.transformIds = transformIds;
}
public long getCount() {
return transformIds.size();
}
public List<String> getTransformIds() {
return transformIds;
}
@Override
public int hashCode() {
return Objects.hash(transformIds);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final InvalidTransforms that = (InvalidTransforms) other;
return Objects.equals(this.transformIds, that.transformIds);
}
}
}

View File

@ -1,88 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.core.PageParams;
import java.util.Objects;
import java.util.Optional;
public class GetTransformStatsRequest implements Validatable {
private final String id;
private PageParams pageParams;
private Boolean allowNoMatch;
public GetTransformStatsRequest(String id) {
this.id = id;
}
public String getId() {
return id;
}
public PageParams getPageParams() {
return pageParams;
}
public void setPageParams(PageParams pageParams) {
this.pageParams = pageParams;
}
public Boolean getAllowNoMatch() {
return allowNoMatch;
}
public void setAllowNoMatch(Boolean allowNoMatch) {
this.allowNoMatch = allowNoMatch;
}
@Override
public Optional<ValidationException> validate() {
if (id == null) {
ValidationException validationException = new ValidationException();
validationException.addValidationError("transform id must not be null");
return Optional.of(validationException);
} else {
return Optional.empty();
}
}
@Override
public int hashCode() {
return Objects.hash(id, pageParams, allowNoMatch);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
GetTransformStatsRequest other = (GetTransformStatsRequest) obj;
return Objects.equals(id, other.id)
&& Objects.equals(pageParams, other.pageParams)
&& Objects.equals(allowNoMatch, other.allowNoMatch);
}
}

View File

@ -1,127 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.client.transform.transforms.TransformStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class GetTransformStatsResponse {
public static final ParseField TRANSFORMS = new ParseField("transforms");
public static final ParseField COUNT = new ParseField("count");
@SuppressWarnings("unchecked")
static final ConstructingObjectParser<GetTransformStatsResponse, Void> PARSER = new ConstructingObjectParser<>(
"get_transform_stats_response",
true,
args -> new GetTransformStatsResponse(
(List<TransformStats>) args[0],
(long) args[1],
(List<TaskOperationFailure>) args[2],
(List<ElasticsearchException>) args[3]
)
);
static {
PARSER.declareObjectArray(constructorArg(), TransformStats.PARSER::apply, TRANSFORMS);
PARSER.declareLong(constructorArg(), COUNT);
PARSER.declareObjectArray(
optionalConstructorArg(),
(p, c) -> TaskOperationFailure.fromXContent(p),
AcknowledgedTasksResponse.TASK_FAILURES
);
PARSER.declareObjectArray(
optionalConstructorArg(),
(p, c) -> ElasticsearchException.fromXContent(p),
AcknowledgedTasksResponse.NODE_FAILURES
);
}
public static GetTransformStatsResponse fromXContent(final XContentParser parser) {
return GetTransformStatsResponse.PARSER.apply(parser, null);
}
private final List<TransformStats> transformsStats;
private final long count;
private final List<TaskOperationFailure> taskFailures;
private final List<ElasticsearchException> nodeFailures;
public GetTransformStatsResponse(
List<TransformStats> transformsStats,
long count,
@Nullable List<TaskOperationFailure> taskFailures,
@Nullable List<? extends ElasticsearchException> nodeFailures
) {
this.transformsStats = transformsStats;
this.count = count;
this.taskFailures = taskFailures == null ? Collections.emptyList() : Collections.unmodifiableList(taskFailures);
this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(nodeFailures);
}
public List<TransformStats> getTransformsStats() {
return transformsStats;
}
public long getCount() {
return count;
}
public List<ElasticsearchException> getNodeFailures() {
return nodeFailures;
}
public List<TaskOperationFailure> getTaskFailures() {
return taskFailures;
}
@Override
public int hashCode() {
return Objects.hash(transformsStats, count, nodeFailures, taskFailures);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final GetTransformStatsResponse that = (GetTransformStatsResponse) other;
return Objects.equals(this.transformsStats, that.transformsStats)
&& Objects.equals(this.count, that.count)
&& Objects.equals(this.nodeFailures, that.nodeFailures)
&& Objects.equals(this.taskFailures, that.taskFailures);
}
}

View File

@ -1,85 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
import java.util.Optional;
public class PreviewTransformRequest implements ToXContentObject, Validatable {
private final TransformConfig config;
public PreviewTransformRequest(TransformConfig config) {
this.config = config;
}
public TransformConfig getConfig() {
return config;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
return config.toXContent(builder, params);
}
@Override
public Optional<ValidationException> validate() {
ValidationException validationException = new ValidationException();
if (config == null) {
validationException.addValidationError("preview requires a non-null transform config");
return Optional.of(validationException);
} else {
if (config.getSource() == null) {
validationException.addValidationError("transform source cannot be null");
}
}
if (validationException.validationErrors().isEmpty()) {
return Optional.empty();
} else {
return Optional.of(validationException);
}
}
@Override
public int hashCode() {
return Objects.hash(config);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
PreviewTransformRequest other = (PreviewTransformRequest) obj;
return Objects.equals(config, other.config);
}
}

View File

@ -1,207 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class PreviewTransformResponse {
public static class GeneratedDestIndexSettings {
static final ParseField MAPPINGS = new ParseField("mappings");
private static final ParseField SETTINGS = new ParseField("settings");
private static final ParseField ALIASES = new ParseField("aliases");
private final Map<String, Object> mappings;
private final Settings settings;
private final Set<Alias> aliases;
private static final ConstructingObjectParser<GeneratedDestIndexSettings, Void> PARSER = new ConstructingObjectParser<>(
"transform_preview_generated_dest_index",
true,
args -> {
@SuppressWarnings("unchecked")
Map<String, Object> mappings = (Map<String, Object>) args[0];
Settings settings = (Settings) args[1];
@SuppressWarnings("unchecked")
Set<Alias> aliases = (Set<Alias>) args[2];
return new GeneratedDestIndexSettings(mappings, settings, aliases);
}
);
static {
PARSER.declareObject(optionalConstructorArg(), (p, c) -> p.mapOrdered(), MAPPINGS);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> Settings.fromXContent(p), SETTINGS);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> {
Set<Alias> aliases = new HashSet<>();
while ((p.nextToken()) != XContentParser.Token.END_OBJECT) {
aliases.add(Alias.fromXContent(p));
}
return aliases;
}, ALIASES);
}
public GeneratedDestIndexSettings(Map<String, Object> mappings, Settings settings, Set<Alias> aliases) {
this.mappings = mappings == null ? Collections.emptyMap() : Collections.unmodifiableMap(mappings);
this.settings = settings == null ? Settings.EMPTY : settings;
this.aliases = aliases == null ? Collections.emptySet() : Collections.unmodifiableSet(aliases);
}
public Map<String, Object> getMappings() {
return mappings;
}
public Settings getSettings() {
return settings;
}
public Set<Alias> getAliases() {
return aliases;
}
public static GeneratedDestIndexSettings fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj == null || obj.getClass() != getClass()) {
return false;
}
GeneratedDestIndexSettings other = (GeneratedDestIndexSettings) obj;
return Objects.equals(other.mappings, mappings)
&& Objects.equals(other.settings, settings)
&& Objects.equals(other.aliases, aliases);
}
@Override
public int hashCode() {
return Objects.hash(mappings, settings, aliases);
}
}
public static final ParseField PREVIEW = new ParseField("preview");
public static final ParseField GENERATED_DEST_INDEX_SETTINGS = new ParseField("generated_dest_index");
private final List<Map<String, Object>> docs;
private final GeneratedDestIndexSettings generatedDestIndexSettings;
private static final ConstructingObjectParser<PreviewTransformResponse, Void> PARSER = new ConstructingObjectParser<>(
"data_frame_transform_preview",
true,
args -> {
@SuppressWarnings("unchecked")
List<Map<String, Object>> docs = (List<Map<String, Object>>) args[0];
GeneratedDestIndexSettings generatedDestIndex = (GeneratedDestIndexSettings) args[1];
// ensure generatedDestIndex is not null
if (generatedDestIndex == null) {
// BWC parsing the output from nodes < 7.7
@SuppressWarnings("unchecked")
Map<String, Object> mappings = (Map<String, Object>) args[2];
generatedDestIndex = new GeneratedDestIndexSettings(mappings, null, null);
}
return new PreviewTransformResponse(docs, generatedDestIndex);
}
);
static {
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> p.mapOrdered(), PREVIEW);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> GeneratedDestIndexSettings.fromXContent(p), GENERATED_DEST_INDEX_SETTINGS);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> p.mapOrdered(), GeneratedDestIndexSettings.MAPPINGS);
}
public PreviewTransformResponse(List<Map<String, Object>> docs, GeneratedDestIndexSettings generatedDestIndexSettings) {
this.docs = docs;
this.generatedDestIndexSettings = generatedDestIndexSettings;
}
public List<Map<String, Object>> getDocs() {
return docs;
}
public GeneratedDestIndexSettings getGeneratedDestIndexSettings() {
return generatedDestIndexSettings;
}
public Map<String, Object> getMappings() {
return generatedDestIndexSettings.getMappings();
}
public Settings getSettings() {
return generatedDestIndexSettings.getSettings();
}
public Set<Alias> getAliases() {
return generatedDestIndexSettings.getAliases();
}
public CreateIndexRequest getCreateIndexRequest(String index) {
CreateIndexRequest createIndexRequest = new CreateIndexRequest(index);
createIndexRequest.aliases(generatedDestIndexSettings.getAliases());
createIndexRequest.settings(generatedDestIndexSettings.getSettings());
createIndexRequest.mapping(generatedDestIndexSettings.getMappings());
return createIndexRequest;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj == null || obj.getClass() != getClass()) {
return false;
}
PreviewTransformResponse other = (PreviewTransformResponse) obj;
return Objects.equals(other.docs, docs) && Objects.equals(other.generatedDestIndexSettings, generatedDestIndexSettings);
}
@Override
public int hashCode() {
return Objects.hash(docs, generatedDestIndexSettings);
}
public static PreviewTransformResponse fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
}

View File

@ -1,105 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
import java.util.Optional;
public class PutTransformRequest implements ToXContentObject, Validatable {
public static final String DEFER_VALIDATION = "defer_validation";
private final TransformConfig config;
private Boolean deferValidation;
public PutTransformRequest(TransformConfig config) {
this.config = config;
}
public TransformConfig getConfig() {
return config;
}
public Boolean getDeferValidation() {
return deferValidation;
}
/**
* Indicates if deferrable validations should be skipped until the transform starts
*
* @param deferValidation {@code true} will cause validations to be deferred
*/
public void setDeferValidation(boolean deferValidation) {
this.deferValidation = deferValidation;
}
@Override
public Optional<ValidationException> validate() {
ValidationException validationException = new ValidationException();
if (config == null) {
validationException.addValidationError("put requires a non-null transform config");
return Optional.of(validationException);
} else {
if (config.getId() == null) {
validationException.addValidationError("transform id cannot be null");
}
if (config.getSource() == null) {
validationException.addValidationError("transform source cannot be null");
}
if (config.getDestination() == null) {
validationException.addValidationError("transform destination cannot be null");
}
}
if (validationException.validationErrors().isEmpty()) {
return Optional.empty();
} else {
return Optional.of(validationException);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return config.toXContent(builder, params);
}
@Override
public int hashCode() {
return Objects.hash(config);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
PutTransformRequest other = (PutTransformRequest) obj;
return Objects.equals(config, other.config);
}
}

View File

@ -1,84 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.common.unit.TimeValue;
import java.util.Objects;
import java.util.Optional;
public class StartTransformRequest implements Validatable {
private final String id;
private TimeValue timeout;
public StartTransformRequest(String id) {
this.id = id;
}
public StartTransformRequest(String id, TimeValue timeout) {
this.id = id;
this.timeout = timeout;
}
public String getId() {
return id;
}
public TimeValue getTimeout() {
return timeout;
}
public void setTimeout(TimeValue timeout) {
this.timeout = timeout;
}
@Override
public Optional<ValidationException> validate() {
if (id == null) {
ValidationException validationException = new ValidationException();
validationException.addValidationError("transform id must not be null");
return Optional.of(validationException);
} else {
return Optional.empty();
}
}
@Override
public int hashCode() {
return Objects.hash(id, timeout);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
StartTransformRequest other = (StartTransformRequest) obj;
return Objects.equals(this.id, other.id)
&& Objects.equals(this.timeout, other.timeout);
}
}

View File

@ -1,48 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.List;
public class StartTransformResponse extends AcknowledgedTasksResponse {
private static final String ACKNOWLEDGED = "acknowledged";
private static final ConstructingObjectParser<StartTransformResponse, Void> PARSER =
AcknowledgedTasksResponse.generateParser("start_transform_response", StartTransformResponse::new,
ACKNOWLEDGED);
public static StartTransformResponse fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
public StartTransformResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
@Nullable List<? extends ElasticsearchException> nodeFailures) {
super(acknowledged, taskFailures, nodeFailures);
}
}

View File

@ -1,119 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.common.unit.TimeValue;
import java.util.Objects;
import java.util.Optional;
public class StopTransformRequest implements Validatable {
public static final String WAIT_FOR_CHECKPOINT = "wait_for_checkpoint";
private final String id;
private Boolean waitForCompletion;
private Boolean waitForCheckpoint;
private TimeValue timeout;
private Boolean allowNoMatch;
public StopTransformRequest(String id) {
this(id, null, null, null);
}
public StopTransformRequest(String id, Boolean waitForCompletion, TimeValue timeout, Boolean waitForCheckpoint) {
this.id = id;
this.waitForCompletion = waitForCompletion;
this.timeout = timeout;
this.waitForCheckpoint = waitForCheckpoint;
}
public String getId() {
return id;
}
public void setWaitForCompletion(Boolean waitForCompletion) {
this.waitForCompletion = waitForCompletion;
}
public Boolean getWaitForCompletion() {
return waitForCompletion;
}
public void setTimeout(TimeValue timeout) {
this.timeout = timeout;
}
public TimeValue getTimeout() {
return timeout;
}
public Boolean getAllowNoMatch() {
return allowNoMatch;
}
public void setAllowNoMatch(Boolean allowNoMatch) {
this.allowNoMatch = allowNoMatch;
}
public Boolean getWaitForCheckpoint() {
return waitForCheckpoint;
}
public void setWaitForCheckpoint(Boolean waitForCheckpoint) {
this.waitForCheckpoint = waitForCheckpoint;
}
@Override
public Optional<ValidationException> validate() {
if (id == null) {
ValidationException validationException = new ValidationException();
validationException.addValidationError("transform id must not be null");
return Optional.of(validationException);
} else {
return Optional.empty();
}
}
@Override
public int hashCode() {
return Objects.hash(id, waitForCompletion, timeout, allowNoMatch, waitForCheckpoint);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
StopTransformRequest other = (StopTransformRequest) obj;
return Objects.equals(this.id, other.id)
&& Objects.equals(this.waitForCompletion, other.waitForCompletion)
&& Objects.equals(this.timeout, other.timeout)
&& Objects.equals(this.waitForCheckpoint, other.waitForCheckpoint)
&& Objects.equals(this.allowNoMatch, other.allowNoMatch);
}
}

View File

@ -1,47 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.List;
public class StopTransformResponse extends AcknowledgedTasksResponse {
private static final String ACKNOWLEDGED = "acknowledged";
private static final ConstructingObjectParser<StopTransformResponse, Void> PARSER = AcknowledgedTasksResponse
.generateParser("stop_transform_response", StopTransformResponse::new, ACKNOWLEDGED);
public static StopTransformResponse fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
public StopTransformResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
@Nullable List<? extends ElasticsearchException> nodeFailures) {
super(acknowledged, taskFailures, nodeFailures);
}
}

View File

@ -1,41 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.transform.transforms.SyncConfig;
import org.elasticsearch.client.transform.transforms.TimeSyncConfig;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.plugins.spi.NamedXContentProvider;
import java.util.Arrays;
import java.util.List;
public class TransformNamedXContentProvider implements NamedXContentProvider {
@Override
public List<NamedXContentRegistry.Entry> getNamedXContentParsers() {
return Arrays.asList(
new NamedXContentRegistry.Entry(SyncConfig.class,
new ParseField(TimeSyncConfig.NAME),
TimeSyncConfig::fromXContent));
}
}

View File

@ -1,103 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.transform.transforms.TransformConfigUpdate;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
import java.util.Optional;
public class UpdateTransformRequest implements ToXContentObject, Validatable {
private final TransformConfigUpdate update;
private final String id;
private Boolean deferValidation;
public UpdateTransformRequest(TransformConfigUpdate update, String id) {
this.update = update;
this.id = id;
}
public TransformConfigUpdate getUpdate() {
return update;
}
public Boolean getDeferValidation() {
return deferValidation;
}
public String getId() {
return id;
}
/**
* Indicates if deferrable validations should be skipped until the transform starts
*
* @param deferValidation {@code true} will cause validations to be deferred
*/
public void setDeferValidation(boolean deferValidation) {
this.deferValidation = deferValidation;
}
@Override
public Optional<ValidationException> validate() {
ValidationException validationException = new ValidationException();
if (update == null) {
validationException.addValidationError("put requires a non-null transform config update object");
}
if (id == null) {
validationException.addValidationError("transform id cannot be null");
}
if (validationException.validationErrors().isEmpty()) {
return Optional.empty();
} else {
return Optional.of(validationException);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return update.toXContent(builder, params);
}
@Override
public int hashCode() {
return Objects.hash(update, deferValidation, id);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
UpdateTransformRequest other = (UpdateTransformRequest) obj;
return Objects.equals(update, other.update)
&& Objects.equals(id, other.id)
&& Objects.equals(deferValidation, other.deferValidation);
}
}

View File

@ -1,61 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.common.xcontent.XContentParser;
import java.util.Objects;
public class UpdateTransformResponse {
public static UpdateTransformResponse fromXContent(final XContentParser parser) {
return new UpdateTransformResponse(TransformConfig.PARSER.apply(parser, null));
}
private TransformConfig transformConfiguration;
public UpdateTransformResponse(TransformConfig transformConfiguration) {
this.transformConfiguration = transformConfiguration;
}
public TransformConfig getTransformConfiguration() {
return transformConfiguration;
}
@Override
public int hashCode() {
return Objects.hash(transformConfiguration);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final UpdateTransformResponse that = (UpdateTransformResponse) other;
return Objects.equals(this.transformConfiguration, that.transformConfiguration);
}
}

View File

@ -1,129 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Configuration containing the destination index for the {@link TransformConfig}
*/
public class DestConfig implements ToXContentObject {
public static final ParseField INDEX = new ParseField("index");
public static final ParseField PIPELINE = new ParseField("pipeline");
public static final ConstructingObjectParser<DestConfig, Void> PARSER = new ConstructingObjectParser<>(
"transform_config_dest",
true,
args -> new DestConfig((String) args[0], (String) args[1])
);
static {
PARSER.declareString(constructorArg(), INDEX);
PARSER.declareString(optionalConstructorArg(), PIPELINE);
}
private final String index;
private final String pipeline;
public DestConfig(String index, String pipeline) {
this.index = Objects.requireNonNull(index, INDEX.getPreferredName());
this.pipeline = pipeline;
}
public String getIndex() {
return index;
}
public String getPipeline() {
return pipeline;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(INDEX.getPreferredName(), index);
if (pipeline != null) {
builder.field(PIPELINE.getPreferredName(), pipeline);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if (other == null || other.getClass() != getClass()) {
return false;
}
DestConfig that = (DestConfig) other;
return Objects.equals(index, that.index) && Objects.equals(pipeline, that.pipeline);
}
@Override
public int hashCode() {
return Objects.hash(index, pipeline);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String index;
private String pipeline;
/**
* Sets which index to which to write the data
* @param index where to write the data
* @return The {@link Builder} with index set
*/
public Builder setIndex(String index) {
this.index = Objects.requireNonNull(index, INDEX.getPreferredName());
return this;
}
/**
* Sets the pipeline through which the indexed documents should be processed
* @param pipeline The pipeline ID
* @return The {@link Builder} with pipeline set
*/
public Builder setPipeline(String pipeline) {
this.pipeline = pipeline;
return this;
}
public DestConfig build() {
return new DestConfig(index, pipeline);
}
}
}

View File

@ -1,156 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
/**
* A Pojo class containing an Elastic Node's attributes
*/
public class NodeAttributes implements ToXContentObject {
public static final ParseField ID = new ParseField("id");
public static final ParseField NAME = new ParseField("name");
public static final ParseField EPHEMERAL_ID = new ParseField("ephemeral_id");
public static final ParseField TRANSPORT_ADDRESS = new ParseField("transport_address");
public static final ParseField ATTRIBUTES = new ParseField("attributes");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<NodeAttributes, Void> PARSER =
new ConstructingObjectParser<>("node", true,
(a) -> {
int i = 0;
String id = (String) a[i++];
String name = (String) a[i++];
String ephemeralId = (String) a[i++];
String transportAddress = (String) a[i++];
Map<String, String> attributes = (Map<String, String>) a[i];
return new NodeAttributes(id, name, ephemeralId, transportAddress, attributes);
});
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), ID);
PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME);
PARSER.declareString(ConstructingObjectParser.constructorArg(), EPHEMERAL_ID);
PARSER.declareString(ConstructingObjectParser.constructorArg(), TRANSPORT_ADDRESS);
PARSER.declareField(ConstructingObjectParser.constructorArg(),
(p, c) -> p.mapStrings(),
ATTRIBUTES,
ObjectParser.ValueType.OBJECT);
}
private final String id;
private final String name;
private final String ephemeralId;
private final String transportAddress;
private final Map<String, String> attributes;
public NodeAttributes(String id, String name, String ephemeralId, String transportAddress, Map<String, String> attributes) {
this.id = id;
this.name = name;
this.ephemeralId = ephemeralId;
this.transportAddress = transportAddress;
this.attributes = Collections.unmodifiableMap(attributes);
}
/**
* The unique identifier of the node.
*/
public String getId() {
return id;
}
/**
* The node name.
*/
public String getName() {
return name;
}
/**
* The ephemeral id of the node.
*/
public String getEphemeralId() {
return ephemeralId;
}
/**
* The host and port where transport HTTP connections are accepted.
*/
public String getTransportAddress() {
return transportAddress;
}
/**
* Additional attributes related to this node
*/
public Map<String, String> getAttributes() {
return attributes;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(ID.getPreferredName(), id);
builder.field(NAME.getPreferredName(), name);
builder.field(EPHEMERAL_ID.getPreferredName(), ephemeralId);
builder.field(TRANSPORT_ADDRESS.getPreferredName(), transportAddress);
builder.field(ATTRIBUTES.getPreferredName(), attributes);
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(id, name, ephemeralId, transportAddress, attributes);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
NodeAttributes that = (NodeAttributes) other;
return Objects.equals(id, that.id) &&
Objects.equals(name, that.name) &&
Objects.equals(ephemeralId, that.ephemeralId) &&
Objects.equals(transportAddress, that.transportAddress) &&
Objects.equals(attributes, that.attributes);
}
@Override
public String toString() {
return Strings.toString(this);
}
}

View File

@ -1,77 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import java.io.IOException;
import java.util.Objects;
/**
* Object for encapsulating the desired Query for a Transform
*/
public class QueryConfig implements ToXContentObject {
private final QueryBuilder query;
public static QueryConfig fromXContent(XContentParser parser) throws IOException {
QueryBuilder query = AbstractQueryBuilder.parseInnerQueryBuilder(parser);
return new QueryConfig(query);
}
public QueryConfig(QueryBuilder query) {
this.query = query;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
query.toXContent(builder, params);
return builder;
}
public QueryBuilder getQuery() {
return query;
}
@Override
public int hashCode() {
return Objects.hash(query);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final QueryConfig that = (QueryConfig) other;
return Objects.equals(this.query, that.query);
}
}

View File

@ -1,150 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class SettingsConfig implements ToXContentObject {
private static final ParseField MAX_PAGE_SEARCH_SIZE = new ParseField("max_page_search_size");
private static final ParseField DOCS_PER_SECOND = new ParseField("docs_per_second");
private static final int DEFAULT_MAX_PAGE_SEARCH_SIZE = -1;
private static final float DEFAULT_DOCS_PER_SECOND = -1F;
private final Integer maxPageSearchSize;
private final Float docsPerSecond;
private static final ConstructingObjectParser<SettingsConfig, Void> PARSER = new ConstructingObjectParser<>(
"settings_config",
true,
args -> new SettingsConfig((Integer) args[0], (Float) args[1])
);
static {
PARSER.declareIntOrNull(optionalConstructorArg(), DEFAULT_MAX_PAGE_SEARCH_SIZE, MAX_PAGE_SEARCH_SIZE);
PARSER.declareFloatOrNull(optionalConstructorArg(), DEFAULT_DOCS_PER_SECOND, DOCS_PER_SECOND);
}
public static SettingsConfig fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
SettingsConfig(Integer maxPageSearchSize, Float docsPerSecond) {
this.maxPageSearchSize = maxPageSearchSize;
this.docsPerSecond = docsPerSecond;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (maxPageSearchSize != null) {
if (maxPageSearchSize.equals(DEFAULT_MAX_PAGE_SEARCH_SIZE)) {
builder.field(MAX_PAGE_SEARCH_SIZE.getPreferredName(), (Integer) null);
} else {
builder.field(MAX_PAGE_SEARCH_SIZE.getPreferredName(), maxPageSearchSize);
}
}
if (docsPerSecond != null) {
if (docsPerSecond.equals(DEFAULT_DOCS_PER_SECOND)) {
builder.field(DOCS_PER_SECOND.getPreferredName(), (Float) null);
} else {
builder.field(DOCS_PER_SECOND.getPreferredName(), docsPerSecond);
}
}
builder.endObject();
return builder;
}
public Integer getMaxPageSearchSize() {
return maxPageSearchSize;
}
public Float getDocsPerSecond() {
return docsPerSecond;
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if (other == null || other.getClass() != getClass()) {
return false;
}
SettingsConfig that = (SettingsConfig) other;
return Objects.equals(maxPageSearchSize, that.maxPageSearchSize) && Objects.equals(docsPerSecond, that.docsPerSecond);
}
@Override
public int hashCode() {
return Objects.hash(maxPageSearchSize, docsPerSecond);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private Integer maxPageSearchSize;
private Float docsPerSecond;
/**
* Sets the paging maximum paging maxPageSearchSize that transform can use when
* pulling the data from the source index.
*
* If OOM is triggered, the paging maxPageSearchSize is dynamically reduced so that the transform can continue to gather data.
*
* @param maxPageSearchSize Integer value between 10 and 10_000
* @return the {@link Builder} with the paging maxPageSearchSize set.
*/
public Builder setMaxPageSearchSize(Integer maxPageSearchSize) {
this.maxPageSearchSize = maxPageSearchSize == null ? DEFAULT_MAX_PAGE_SEARCH_SIZE : maxPageSearchSize;
return this;
}
/**
* Sets the docs per second that transform can use when pulling the data from the source index.
*
* This setting throttles transform by issuing queries less often, however processing still happens in
* batches. A value of 0 disables throttling (default).
*
* @param docsPerSecond Integer value
* @return the {@link Builder} with requestsPerSecond set.
*/
public Builder setRequestsPerSecond(Float docsPerSecond) {
this.docsPerSecond = docsPerSecond == null ? DEFAULT_DOCS_PER_SECOND : docsPerSecond;
return this;
}
public SettingsConfig build() {
return new SettingsConfig(maxPageSearchSize, docsPerSecond);
}
}
}

View File

@ -1,167 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Class encapsulating all options for a {@link TransformConfig} gathering data
*/
public class SourceConfig implements ToXContentObject {
public static final ParseField QUERY = new ParseField("query");
public static final ParseField INDEX = new ParseField("index");
public static final ConstructingObjectParser<SourceConfig, Void> PARSER = new ConstructingObjectParser<>("transform_config_source",
true,
args -> {
@SuppressWarnings("unchecked")
String[] index = ((List<String>)args[0]).toArray(new String[0]);
// default handling: if the user does not specify a query, we default to match_all
QueryConfig queryConfig = (QueryConfig) args[1];
return new SourceConfig(index, queryConfig);
});
static {
PARSER.declareStringArray(constructorArg(), INDEX);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> QueryConfig.fromXContent(p), QUERY);
}
private final String[] index;
private final QueryConfig queryConfig;
/**
* Create a new SourceConfig for the provided indices.
*
* {@link QueryConfig} defaults to a MatchAll query.
*
* @param index Any number of indices. At least one non-null, non-empty, index should be provided
*/
public SourceConfig(String... index) {
this.index = index;
this.queryConfig = null;
}
/**
* Create a new SourceConfig for the provided indices, from which data is gathered with the provided {@link QueryConfig}
*
* @param index Any number of indices. At least one non-null, non-empty, index should be provided
* @param queryConfig A QueryConfig object that contains the desired query. Defaults to MatchAll query.
*/
SourceConfig(String[] index, QueryConfig queryConfig) {
this.index = index;
this.queryConfig = queryConfig;
}
public String[] getIndex() {
return index;
}
public QueryConfig getQueryConfig() {
return queryConfig;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (index != null) {
builder.array(INDEX.getPreferredName(), index);
}
if (queryConfig != null) {
builder.field(QUERY.getPreferredName(), queryConfig);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if (other == null || other.getClass() != getClass()) {
return false;
}
SourceConfig that = (SourceConfig) other;
return Arrays.equals(index, that.index) && Objects.equals(queryConfig, that.queryConfig);
}
@Override
public int hashCode(){
// Using Arrays.hashCode as Objects.hash does not deeply hash nested arrays. Since we are doing Array.equals, this is necessary
int hash = Arrays.hashCode(index);
return 31 * hash + (queryConfig == null ? 0 : queryConfig.hashCode());
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String[] index;
private QueryConfig queryConfig;
/**
* Sets what indices from which to fetch data
* @param index The indices from which to fetch data
* @return The {@link Builder} with indices set
*/
public Builder setIndex(String... index) {
this.index = index;
return this;
}
/**
* Sets the {@link QueryConfig} object that references the desired query to use when fetching the data
* @param queryConfig The {@link QueryConfig} to use when fetching data
* @return The {@link Builder} with queryConfig set
*/
public Builder setQueryConfig(QueryConfig queryConfig) {
this.queryConfig = queryConfig;
return this;
}
/**
* Sets the query to use when fetching the data. Convenience method for {@link #setQueryConfig(QueryConfig)}
* @param query The {@link QueryBuilder} to use when fetch data (overwrites the {@link QueryConfig})
* @return The {@link Builder} with queryConfig set
*/
public Builder setQuery(QueryBuilder query) {
return this.setQueryConfig(new QueryConfig(query));
}
public SourceConfig build() {
return new SourceConfig(index, queryConfig);
}
}
}

View File

@ -1,30 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.xcontent.ToXContentObject;
public interface SyncConfig extends ToXContentObject {
/**
* Returns the name of the writeable object
*/
String getName();
}

View File

@ -1,108 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class TimeSyncConfig implements SyncConfig {
public static final String NAME = "time";
private static final ParseField FIELD = new ParseField("field");
private static final ParseField DELAY = new ParseField("delay");
private final String field;
private final TimeValue delay;
private static final ConstructingObjectParser<TimeSyncConfig, Void> PARSER = new ConstructingObjectParser<>("time_sync_config", true,
args -> new TimeSyncConfig((String) args[0], args[1] != null ? (TimeValue) args[1] : TimeValue.ZERO));
static {
PARSER.declareString(constructorArg(), FIELD);
PARSER.declareField(optionalConstructorArg(), (p, c) -> TimeValue.parseTimeValue(p.textOrNull(), DELAY.getPreferredName()), DELAY,
ObjectParser.ValueType.STRING_OR_NULL);
}
public static TimeSyncConfig fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
public TimeSyncConfig(String field, TimeValue delay) {
this.field = field;
this.delay = delay;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(FIELD.getPreferredName(), field);
if (delay.duration() > 0) {
builder.field(DELAY.getPreferredName(), delay.getStringRep());
}
builder.endObject();
return builder;
}
public String getField() {
return field;
}
public TimeValue getDelay() {
return delay;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final TimeSyncConfig that = (TimeSyncConfig) other;
return Objects.equals(this.field, that.field)
&& Objects.equals(this.delay, that.delay);
}
@Override
public int hashCode() {
return Objects.hash(field, delay);
}
@Override
public String getName() {
return NAME;
}
}

View File

@ -1,123 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class TransformCheckpointStats {
public static final ParseField CHECKPOINT = new ParseField("checkpoint");
public static final ParseField POSITION = new ParseField("position");
public static final ParseField CHECKPOINT_PROGRESS = new ParseField("checkpoint_progress");
public static final ParseField TIMESTAMP_MILLIS = new ParseField("timestamp_millis");
public static final ParseField TIME_UPPER_BOUND_MILLIS = new ParseField("time_upper_bound_millis");
public static final TransformCheckpointStats EMPTY = new TransformCheckpointStats(0L, null, null, 0L, 0L);
private final long checkpoint;
private final TransformIndexerPosition position;
private final TransformProgress checkpointProgress;
private final long timestampMillis;
private final long timeUpperBoundMillis;
public static final ConstructingObjectParser<TransformCheckpointStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
"transform_checkpoint_stats", true, args -> {
long checkpoint = args[0] == null ? 0L : (Long) args[0];
TransformIndexerPosition position = (TransformIndexerPosition) args[1];
TransformProgress checkpointProgress = (TransformProgress) args[2];
long timestamp = args[3] == null ? 0L : (Long) args[3];
long timeUpperBound = args[4] == null ? 0L : (Long) args[4];
return new TransformCheckpointStats(checkpoint, position, checkpointProgress, timestamp, timeUpperBound);
});
static {
LENIENT_PARSER.declareLong(optionalConstructorArg(), CHECKPOINT);
LENIENT_PARSER.declareObject(optionalConstructorArg(), TransformIndexerPosition.PARSER, POSITION);
LENIENT_PARSER.declareObject(optionalConstructorArg(), TransformProgress.PARSER, CHECKPOINT_PROGRESS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), TIMESTAMP_MILLIS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), TIME_UPPER_BOUND_MILLIS);
}
public static TransformCheckpointStats fromXContent(XContentParser parser) throws IOException {
return LENIENT_PARSER.parse(parser, null);
}
public TransformCheckpointStats(final long checkpoint, final TransformIndexerPosition position,
final TransformProgress checkpointProgress, final long timestampMillis,
final long timeUpperBoundMillis) {
this.checkpoint = checkpoint;
this.position = position;
this.checkpointProgress = checkpointProgress;
this.timestampMillis = timestampMillis;
this.timeUpperBoundMillis = timeUpperBoundMillis;
}
public long getCheckpoint() {
return checkpoint;
}
public TransformIndexerPosition getPosition() {
return position;
}
public TransformProgress getCheckpointProgress() {
return checkpointProgress;
}
public long getTimestampMillis() {
return timestampMillis;
}
public long getTimeUpperBoundMillis() {
return timeUpperBoundMillis;
}
@Override
public int hashCode() {
return Objects.hash(checkpoint, position, checkpointProgress, timestampMillis, timeUpperBoundMillis);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
TransformCheckpointStats that = (TransformCheckpointStats) other;
return this.checkpoint == that.checkpoint
&& Objects.equals(this.position, that.position)
&& Objects.equals(this.checkpointProgress, that.checkpointProgress)
&& this.timestampMillis == that.timestampMillis
&& this.timeUpperBoundMillis == that.timeUpperBoundMillis;
}
}

View File

@ -1,124 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.time.Instant;
import java.util.Objects;
public class TransformCheckpointingInfo {
public static final ParseField LAST_CHECKPOINT = new ParseField("last", "current");
public static final ParseField NEXT_CHECKPOINT = new ParseField("next", "in_progress");
public static final ParseField OPERATIONS_BEHIND = new ParseField("operations_behind");
public static final ParseField CHANGES_LAST_DETECTED_AT = new ParseField("changes_last_detected_at");
private final TransformCheckpointStats last;
private final TransformCheckpointStats next;
private final long operationsBehind;
private final Instant changesLastDetectedAt;
private static final ConstructingObjectParser<TransformCheckpointingInfo, Void> LENIENT_PARSER =
new ConstructingObjectParser<>(
"transform_checkpointing_info",
true,
a -> {
long behind = a[2] == null ? 0L : (Long) a[2];
Instant changesLastDetectedAt = (Instant)a[3];
return new TransformCheckpointingInfo(
a[0] == null ? TransformCheckpointStats.EMPTY : (TransformCheckpointStats) a[0],
a[1] == null ? TransformCheckpointStats.EMPTY : (TransformCheckpointStats) a[1],
behind,
changesLastDetectedAt);
});
static {
LENIENT_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> TransformCheckpointStats.fromXContent(p), LAST_CHECKPOINT);
LENIENT_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> TransformCheckpointStats.fromXContent(p), NEXT_CHECKPOINT);
LENIENT_PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), OPERATIONS_BEHIND);
LENIENT_PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
p -> TimeUtil.parseTimeFieldToInstant(p, CHANGES_LAST_DETECTED_AT.getPreferredName()),
CHANGES_LAST_DETECTED_AT,
ObjectParser.ValueType.VALUE);
}
public TransformCheckpointingInfo(TransformCheckpointStats last,
TransformCheckpointStats next,
long operationsBehind,
Instant changesLastDetectedAt) {
this.last = Objects.requireNonNull(last);
this.next = Objects.requireNonNull(next);
this.operationsBehind = operationsBehind;
this.changesLastDetectedAt = changesLastDetectedAt;
}
public TransformCheckpointStats getLast() {
return last;
}
public TransformCheckpointStats getNext() {
return next;
}
public long getOperationsBehind() {
return operationsBehind;
}
@Nullable
public Instant getChangesLastDetectedAt() {
return changesLastDetectedAt;
}
public static TransformCheckpointingInfo fromXContent(XContentParser p) {
return LENIENT_PARSER.apply(p, null);
}
@Override
public int hashCode() {
return Objects.hash(last, next, operationsBehind, changesLastDetectedAt);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
TransformCheckpointingInfo that = (TransformCheckpointingInfo) other;
return Objects.equals(this.last, that.last) &&
Objects.equals(this.next, that.next) &&
this.operationsBehind == that.operationsBehind &&
Objects.equals(this.changesLastDetectedAt, that.changesLastDetectedAt);
}
}

View File

@ -1,345 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.Version;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.client.transform.transforms.pivot.PivotConfig;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import java.io.IOException;
import java.time.Instant;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class TransformConfig implements ToXContentObject {
public static final ParseField ID = new ParseField("id");
public static final ParseField SOURCE = new ParseField("source");
public static final ParseField DEST = new ParseField("dest");
public static final ParseField FREQUENCY = new ParseField("frequency");
public static final ParseField DESCRIPTION = new ParseField("description");
public static final ParseField SYNC = new ParseField("sync");
public static final ParseField SETTINGS = new ParseField("settings");
public static final ParseField VERSION = new ParseField("version");
public static final ParseField CREATE_TIME = new ParseField("create_time");
// types of transforms
public static final ParseField PIVOT_TRANSFORM = new ParseField("pivot");
private final String id;
private final SourceConfig source;
private final DestConfig dest;
private final TimeValue frequency;
private final SyncConfig syncConfig;
private final SettingsConfig settings;
private final PivotConfig pivotConfig;
private final String description;
private final Version transformVersion;
private final Instant createTime;
public static final ConstructingObjectParser<TransformConfig, Void> PARSER = new ConstructingObjectParser<>(
"transform",
true,
(args) -> {
String id = (String) args[0];
SourceConfig source = (SourceConfig) args[1];
DestConfig dest = (DestConfig) args[2];
TimeValue frequency = (TimeValue) args[3];
SyncConfig syncConfig = (SyncConfig) args[4];
PivotConfig pivotConfig = (PivotConfig) args[5];
String description = (String) args[6];
SettingsConfig settings = (SettingsConfig) args[7];
Instant createTime = (Instant) args[8];
String transformVersion = (String) args[9];
return new TransformConfig(
id,
source,
dest,
frequency,
syncConfig,
pivotConfig,
description,
settings,
createTime,
transformVersion
);
}
);
static {
PARSER.declareString(constructorArg(), ID);
PARSER.declareObject(constructorArg(), (p, c) -> SourceConfig.PARSER.apply(p, null), SOURCE);
PARSER.declareObject(constructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), DEST);
PARSER.declareField(
optionalConstructorArg(),
p -> TimeValue.parseTimeValue(p.text(), FREQUENCY.getPreferredName()),
FREQUENCY,
ObjectParser.ValueType.STRING
);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), SYNC);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> PivotConfig.fromXContent(p), PIVOT_TRANSFORM);
PARSER.declareString(optionalConstructorArg(), DESCRIPTION);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> SettingsConfig.fromXContent(p), SETTINGS);
PARSER.declareField(
optionalConstructorArg(),
p -> TimeUtil.parseTimeFieldToInstant(p, CREATE_TIME.getPreferredName()),
CREATE_TIME,
ObjectParser.ValueType.VALUE
);
PARSER.declareString(optionalConstructorArg(), VERSION);
}
private static SyncConfig parseSyncConfig(XContentParser parser) throws IOException {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser);
SyncConfig syncConfig = parser.namedObject(SyncConfig.class, parser.currentName(), true);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser);
return syncConfig;
}
public static TransformConfig fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
/**
* Helper method for previewing a transform configuration
*
* The TransformConfig returned from this method should only be used for previewing the resulting data.
*
* A new, valid, TransformConfig with an appropriate destination and ID will have to be constructed to create
* the transform.
* @param source Source configuration for gathering the data
* @param pivotConfig Pivot config to preview
* @return A TransformConfig to preview, NOTE it will have a {@code null} id, destination and index.
*/
public static TransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) {
return new TransformConfig(null, source, null, null, null, pivotConfig, null, null, null, null);
}
TransformConfig(
final String id,
final SourceConfig source,
final DestConfig dest,
final TimeValue frequency,
final SyncConfig syncConfig,
final PivotConfig pivotConfig,
final String description,
final SettingsConfig settings,
final Instant createTime,
final String version
) {
this.id = id;
this.source = source;
this.dest = dest;
this.frequency = frequency;
this.syncConfig = syncConfig;
this.pivotConfig = pivotConfig;
this.description = description;
this.settings = settings;
this.createTime = createTime == null ? null : Instant.ofEpochMilli(createTime.toEpochMilli());
this.transformVersion = version == null ? null : Version.fromString(version);
}
public String getId() {
return id;
}
public SourceConfig getSource() {
return source;
}
public DestConfig getDestination() {
return dest;
}
public TimeValue getFrequency() {
return frequency;
}
public SyncConfig getSyncConfig() {
return syncConfig;
}
public PivotConfig getPivotConfig() {
return pivotConfig;
}
public Version getVersion() {
return transformVersion;
}
public Instant getCreateTime() {
return createTime;
}
@Nullable
public String getDescription() {
return description;
}
@Nullable
public SettingsConfig getSettings() {
return settings;
}
@Override
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
builder.startObject();
if (id != null) {
builder.field(ID.getPreferredName(), id);
}
if (source != null) {
builder.field(SOURCE.getPreferredName(), source);
}
if (dest != null) {
builder.field(DEST.getPreferredName(), dest);
}
if (frequency != null) {
builder.field(FREQUENCY.getPreferredName(), frequency.getStringRep());
}
if (syncConfig != null) {
builder.startObject(SYNC.getPreferredName());
builder.field(syncConfig.getName(), syncConfig);
builder.endObject();
}
if (pivotConfig != null) {
builder.field(PIVOT_TRANSFORM.getPreferredName(), pivotConfig);
}
if (description != null) {
builder.field(DESCRIPTION.getPreferredName(), description);
}
if (settings != null) {
builder.field(SETTINGS.getPreferredName(), settings);
}
if (createTime != null) {
builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli());
}
if (transformVersion != null) {
builder.field(VERSION.getPreferredName(), transformVersion);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final TransformConfig that = (TransformConfig) other;
return Objects.equals(this.id, that.id)
&& Objects.equals(this.source, that.source)
&& Objects.equals(this.dest, that.dest)
&& Objects.equals(this.frequency, that.frequency)
&& Objects.equals(this.description, that.description)
&& Objects.equals(this.syncConfig, that.syncConfig)
&& Objects.equals(this.transformVersion, that.transformVersion)
&& Objects.equals(this.settings, that.settings)
&& Objects.equals(this.createTime, that.createTime)
&& Objects.equals(this.pivotConfig, that.pivotConfig);
}
@Override
public int hashCode() {
return Objects.hash(id, source, dest, frequency, syncConfig, settings, createTime, transformVersion, pivotConfig, description);
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String id;
private SourceConfig source;
private DestConfig dest;
private TimeValue frequency;
private SyncConfig syncConfig;
private PivotConfig pivotConfig;
private SettingsConfig settings;
private String description;
public Builder setId(String id) {
this.id = id;
return this;
}
public Builder setSource(SourceConfig source) {
this.source = source;
return this;
}
public Builder setDest(DestConfig dest) {
this.dest = dest;
return this;
}
public Builder setFrequency(TimeValue frequency) {
this.frequency = frequency;
return this;
}
public Builder setSyncConfig(SyncConfig syncConfig) {
this.syncConfig = syncConfig;
return this;
}
public Builder setPivotConfig(PivotConfig pivotConfig) {
this.pivotConfig = pivotConfig;
return this;
}
public Builder setDescription(String description) {
this.description = description;
return this;
}
public Builder setSettings(SettingsConfig settings) {
this.settings = settings;
return this;
}
public TransformConfig build() {
return new TransformConfig(id, source, dest, frequency, syncConfig, pivotConfig, description, settings, null, null);
}
}
}

View File

@ -1,233 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* This class holds the mutable configuration items for a transform
*/
public class TransformConfigUpdate implements ToXContentObject {
public static final String NAME = "transform_config_update";
private static final ConstructingObjectParser<TransformConfigUpdate, String> PARSER = new ConstructingObjectParser<>(
NAME,
false,
(args) -> {
SourceConfig source = (SourceConfig) args[0];
DestConfig dest = (DestConfig) args[1];
TimeValue frequency = args[2] == null
? null
: TimeValue.parseTimeValue((String) args[2], TransformConfig.FREQUENCY.getPreferredName());
SyncConfig syncConfig = (SyncConfig) args[3];
String description = (String) args[4];
SettingsConfig settings = (SettingsConfig) args[5];
return new TransformConfigUpdate(source, dest, frequency, syncConfig, description, settings);
}
);
static {
PARSER.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.PARSER.apply(p, null), TransformConfig.SOURCE);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), TransformConfig.DEST);
PARSER.declareString(optionalConstructorArg(), TransformConfig.FREQUENCY);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), TransformConfig.SYNC);
PARSER.declareString(optionalConstructorArg(), TransformConfig.DESCRIPTION);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> SettingsConfig.fromXContent(p), TransformConfig.SETTINGS);
}
private static SyncConfig parseSyncConfig(XContentParser parser) throws IOException {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser);
SyncConfig syncConfig = parser.namedObject(SyncConfig.class, parser.currentName(), false);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser);
return syncConfig;
}
private final SourceConfig source;
private final DestConfig dest;
private final TimeValue frequency;
private final SyncConfig syncConfig;
private final String description;
private final SettingsConfig settings;
public TransformConfigUpdate(
final SourceConfig source,
final DestConfig dest,
final TimeValue frequency,
final SyncConfig syncConfig,
final String description,
final SettingsConfig settings
) {
this.source = source;
this.dest = dest;
this.frequency = frequency;
this.syncConfig = syncConfig;
this.description = description;
this.settings = settings;
}
public SourceConfig getSource() {
return source;
}
public DestConfig getDestination() {
return dest;
}
public TimeValue getFrequency() {
return frequency;
}
public SyncConfig getSyncConfig() {
return syncConfig;
}
@Nullable
public String getDescription() {
return description;
}
@Nullable
public SettingsConfig getSettings() {
return settings;
}
@Override
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
builder.startObject();
if (source != null) {
builder.field(TransformConfig.SOURCE.getPreferredName(), source);
}
if (dest != null) {
builder.field(TransformConfig.DEST.getPreferredName(), dest);
}
if (frequency != null) {
builder.field(TransformConfig.FREQUENCY.getPreferredName(), frequency.getStringRep());
}
if (syncConfig != null) {
builder.startObject(TransformConfig.SYNC.getPreferredName());
builder.field(syncConfig.getName(), syncConfig);
builder.endObject();
}
if (description != null) {
builder.field(TransformConfig.DESCRIPTION.getPreferredName(), description);
}
if (settings != null) {
builder.field(TransformConfig.SETTINGS.getPreferredName(), settings);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final TransformConfigUpdate that = (TransformConfigUpdate) other;
return Objects.equals(this.source, that.source)
&& Objects.equals(this.dest, that.dest)
&& Objects.equals(this.frequency, that.frequency)
&& Objects.equals(this.syncConfig, that.syncConfig)
&& Objects.equals(this.description, that.description)
&& Objects.equals(this.settings, that.settings);
}
@Override
public int hashCode() {
return Objects.hash(source, dest, frequency, syncConfig, description, settings);
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
public static Builder builder() {
return new Builder();
}
public static TransformConfigUpdate fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
public static class Builder {
private SourceConfig source;
private DestConfig dest;
private TimeValue frequency;
private SyncConfig syncConfig;
private String description;
private SettingsConfig settings;
public Builder setSource(SourceConfig source) {
this.source = source;
return this;
}
public Builder setDest(DestConfig dest) {
this.dest = dest;
return this;
}
public Builder setFrequency(TimeValue frequency) {
this.frequency = frequency;
return this;
}
public Builder setSyncConfig(SyncConfig syncConfig) {
this.syncConfig = syncConfig;
return this;
}
public Builder setDescription(String description) {
this.description = description;
return this;
}
public Builder setSettings(SettingsConfig settings) {
this.settings = settings;
return this;
}
public TransformConfigUpdate build() {
return new TransformConfigUpdate(source, dest, frequency, syncConfig, description, settings);
}
}
}

View File

@ -1,99 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Holds state of the cursors:
*
* indexer_position: the position of the indexer querying the source
* bucket_position: the position used for identifying changes
*/
public class TransformIndexerPosition {
public static final ParseField INDEXER_POSITION = new ParseField("indexer_position");
public static final ParseField BUCKET_POSITION = new ParseField("bucket_position");
private final Map<String, Object> indexerPosition;
private final Map<String, Object> bucketPosition;
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<TransformIndexerPosition, Void> PARSER = new ConstructingObjectParser<>(
"transform_indexer_position",
true,
args -> new TransformIndexerPosition((Map<String, Object>) args[0],(Map<String, Object>) args[1]));
static {
PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, INDEXER_POSITION, ValueType.OBJECT);
PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, BUCKET_POSITION, ValueType.OBJECT);
}
public TransformIndexerPosition(Map<String, Object> indexerPosition, Map<String, Object> bucketPosition) {
this.indexerPosition = indexerPosition == null ? null : Collections.unmodifiableMap(indexerPosition);
this.bucketPosition = bucketPosition == null ? null : Collections.unmodifiableMap(bucketPosition);
}
public Map<String, Object> getIndexerPosition() {
return indexerPosition;
}
public Map<String, Object> getBucketsPosition() {
return bucketPosition;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
TransformIndexerPosition that = (TransformIndexerPosition) other;
return Objects.equals(this.indexerPosition, that.indexerPosition) &&
Objects.equals(this.bucketPosition, that.bucketPosition);
}
@Override
public int hashCode() {
return Objects.hash(indexerPosition, bucketPosition);
}
public static TransformIndexerPosition fromXContent(XContentParser parser) {
try {
return PARSER.parse(parser, null);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -1,335 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class TransformIndexerStats {
public static final String NAME = "transform_indexer_stats";
static ParseField EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS = new ParseField("exponential_avg_checkpoint_duration_ms");
static ParseField EXPONENTIAL_AVG_DOCUMENTS_INDEXED = new ParseField("exponential_avg_documents_indexed");
static ParseField EXPONENTIAL_AVG_DOCUMENTS_PROCESSED = new ParseField("exponential_avg_documents_processed");
static ParseField PAGES_PROCESSED = new ParseField("pages_processed");
static ParseField DOCUMENTS_PROCESSED = new ParseField("documents_processed");
static ParseField DOCUMENTS_INDEXED = new ParseField("documents_indexed");
static ParseField TRIGGER_COUNT = new ParseField("trigger_count");
static ParseField INDEX_TIME_IN_MS = new ParseField("index_time_in_ms");
static ParseField SEARCH_TIME_IN_MS = new ParseField("search_time_in_ms");
static ParseField PROCESSING_TIME_IN_MS = new ParseField("processing_time_in_ms");
static ParseField INDEX_TOTAL = new ParseField("index_total");
static ParseField SEARCH_TOTAL = new ParseField("search_total");
static ParseField PROCESSING_TOTAL = new ParseField("processing_total");
static ParseField SEARCH_FAILURES = new ParseField("search_failures");
static ParseField INDEX_FAILURES = new ParseField("index_failures");
public static final ConstructingObjectParser<TransformIndexerStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
NAME,
true,
args -> new TransformIndexerStats(
unboxSafe(args[0], 0L),
unboxSafe(args[1], 0L),
unboxSafe(args[2], 0L),
unboxSafe(args[3], 0L),
unboxSafe(args[4], 0L),
unboxSafe(args[5], 0L),
unboxSafe(args[6], 0L),
unboxSafe(args[7], 0L),
unboxSafe(args[8], 0L),
unboxSafe(args[9], 0L),
unboxSafe(args[10], 0L),
unboxSafe(args[11], 0L),
unboxSafe(args[12], 0.0),
unboxSafe(args[13], 0.0),
unboxSafe(args[14], 0.0)
)
);
static {
LENIENT_PARSER.declareLong(optionalConstructorArg(), PAGES_PROCESSED);
LENIENT_PARSER.declareLong(optionalConstructorArg(), DOCUMENTS_PROCESSED);
LENIENT_PARSER.declareLong(optionalConstructorArg(), DOCUMENTS_INDEXED);
LENIENT_PARSER.declareLong(optionalConstructorArg(), TRIGGER_COUNT);
LENIENT_PARSER.declareLong(optionalConstructorArg(), INDEX_TIME_IN_MS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), SEARCH_TIME_IN_MS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), PROCESSING_TIME_IN_MS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), INDEX_TOTAL);
LENIENT_PARSER.declareLong(optionalConstructorArg(), SEARCH_TOTAL);
LENIENT_PARSER.declareLong(optionalConstructorArg(), PROCESSING_TOTAL);
LENIENT_PARSER.declareLong(optionalConstructorArg(), INDEX_FAILURES);
LENIENT_PARSER.declareLong(optionalConstructorArg(), SEARCH_FAILURES);
LENIENT_PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS);
LENIENT_PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_DOCUMENTS_INDEXED);
LENIENT_PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_DOCUMENTS_PROCESSED);
}
public static TransformIndexerStats fromXContent(XContentParser parser) throws IOException {
return LENIENT_PARSER.parse(parser, null);
}
private final double expAvgCheckpointDurationMs;
private final double expAvgDocumentsIndexed;
private final double expAvgDocumentsProcessed;
private final long pagesProcessed;
private final long documentsProcessed;
private final long documentsIndexed;
private final long triggerCount;
private final long indexTime;
private final long indexTotal;
private final long searchTime;
private final long searchTotal;
private final long processingTime;
private final long processingTotal;
private final long indexFailures;
private final long searchFailures;
public TransformIndexerStats(
long pagesProcessed,
long documentsProcessed,
long documentsIndexed,
long triggerCount,
long indexTime,
long searchTime,
long processingTime,
long indexTotal,
long searchTotal,
long processingTotal,
long indexFailures,
long searchFailures,
double expAvgCheckpointDurationMs,
double expAvgDocumentsIndexed,
double expAvgDocumentsProcessed
) {
this.pagesProcessed = pagesProcessed;
this.documentsProcessed = documentsProcessed;
this.documentsIndexed = documentsIndexed;
this.triggerCount = triggerCount;
this.indexTime = indexTime;
this.indexTotal = indexTotal;
this.searchTime = searchTime;
this.searchTotal = searchTotal;
this.processingTime = processingTime;
this.processingTotal = processingTotal;
this.indexFailures = indexFailures;
this.searchFailures = searchFailures;
this.expAvgCheckpointDurationMs = expAvgCheckpointDurationMs;
this.expAvgDocumentsIndexed = expAvgDocumentsIndexed;
this.expAvgDocumentsProcessed = expAvgDocumentsProcessed;
}
public double getExpAvgCheckpointDurationMs() {
return expAvgCheckpointDurationMs;
}
public double getExpAvgDocumentsIndexed() {
return expAvgDocumentsIndexed;
}
public double getExpAvgDocumentsProcessed() {
return expAvgDocumentsProcessed;
}
/**
* The number of pages read from the input indices
*/
public long getPagesProcessed() {
return pagesProcessed;
}
/**
* The number of documents read from the input indices
*/
public long getDocumentsProcessed() {
return documentsProcessed;
}
/**
* Number of times that the job woke up to write documents
*/
public long getTriggerCount() {
return triggerCount;
}
/**
* Number of documents written
*/
public long getDocumentsIndexed() {
return documentsIndexed;
}
/**
* The number of pages read from the input indices
* Deprecated, use {@link TransformIndexerStats#getPagesProcessed()} instead
*/
@Deprecated
public long getNumPages() {
return getPagesProcessed();
}
/**
* The number of documents read from the input indices
* Deprecated, use {@link TransformIndexerStats#getDocumentsProcessed()} instead
*/
@Deprecated
public long getNumDocuments() {
return getDocumentsProcessed();
}
/**
* Number of times that the job woke up to write documents
* Deprecated, use {@link TransformIndexerStats#getTriggerCount()} instead
*/
@Deprecated
public long getNumInvocations() {
return getTriggerCount();
}
/**
* Number of documents written
* Deprecated, use {@link TransformIndexerStats#getDocumentsIndexed()} instead
*/
@Deprecated
public long getOutputDocuments() {
return getDocumentsIndexed();
}
/**
* Number of index failures that have occurred
*/
public long getIndexFailures() {
return indexFailures;
}
/**
* Number of failures that have occurred
*/
public long getSearchFailures() {
return searchFailures;
}
/**
* Returns the time spent indexing (cumulative) in milliseconds
*/
public long getIndexTime() {
return indexTime;
}
/**
* Returns the time spent searching (cumulative) in milliseconds
*/
public long getSearchTime() {
return searchTime;
}
/**
* Returns the time spent processing (cumulative) in milliseconds
*/
public long getProcessingTime() {
return processingTime;
}
/**
* Returns the total number of indexing requests that have been processed
* (Note: this is not the number of _documents_ that have been indexed)
*/
public long getIndexTotal() {
return indexTotal;
}
/**
* Returns the total number of search requests that have been made
*/
public long getSearchTotal() {
return searchTotal;
}
/**
* Returns the total number of processing runs that have been made
*/
public long getProcessingTotal() {
return processingTotal;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
TransformIndexerStats that = (TransformIndexerStats) other;
return Objects.equals(this.pagesProcessed, that.pagesProcessed)
&& Objects.equals(this.documentsProcessed, that.documentsProcessed)
&& Objects.equals(this.documentsIndexed, that.documentsIndexed)
&& Objects.equals(this.triggerCount, that.triggerCount)
&& Objects.equals(this.indexTime, that.indexTime)
&& Objects.equals(this.searchTime, that.searchTime)
&& Objects.equals(this.processingTime, that.processingTime)
&& Objects.equals(this.indexFailures, that.indexFailures)
&& Objects.equals(this.searchFailures, that.searchFailures)
&& Objects.equals(this.indexTotal, that.indexTotal)
&& Objects.equals(this.searchTotal, that.searchTotal)
&& Objects.equals(this.processingTotal, that.processingTotal)
&& Objects.equals(this.expAvgCheckpointDurationMs, that.expAvgCheckpointDurationMs)
&& Objects.equals(this.expAvgDocumentsIndexed, that.expAvgDocumentsIndexed)
&& Objects.equals(this.expAvgDocumentsProcessed, that.expAvgDocumentsProcessed);
}
@Override
public int hashCode() {
return Objects.hash(
pagesProcessed,
documentsProcessed,
documentsIndexed,
triggerCount,
indexTime,
searchTime,
processingTime,
indexFailures,
searchFailures,
indexTotal,
searchTotal,
processingTotal,
expAvgCheckpointDurationMs,
expAvgDocumentsIndexed,
expAvgDocumentsProcessed
);
}
@SuppressWarnings("unchecked")
private static <T> T unboxSafe(Object l, T default_value) {
if (l == null) {
return default_value;
} else {
return (T) l;
}
}
}

View File

@ -1,119 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class TransformProgress {
public static final ParseField TOTAL_DOCS = new ParseField("total_docs");
public static final ParseField DOCS_REMAINING = new ParseField("docs_remaining");
public static final ParseField PERCENT_COMPLETE = new ParseField("percent_complete");
public static final ParseField DOCS_PROCESSED = new ParseField("docs_processed");
public static final ParseField DOCS_INDEXED = new ParseField("docs_indexed");
public static final ConstructingObjectParser<TransformProgress, Void> PARSER = new ConstructingObjectParser<>(
"transform_progress",
true,
a -> new TransformProgress((Long) a[0], (Long)a[1], (Double)a[2], (Long)a[3], (Long)a[4]));
static {
PARSER.declareLong(optionalConstructorArg(), TOTAL_DOCS);
PARSER.declareLong(optionalConstructorArg(), DOCS_REMAINING);
PARSER.declareDouble(optionalConstructorArg(), PERCENT_COMPLETE);
PARSER.declareLong(optionalConstructorArg(), DOCS_PROCESSED);
PARSER.declareLong(optionalConstructorArg(), DOCS_INDEXED);
}
public static TransformProgress fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private final Long totalDocs;
private final Long remainingDocs;
private final Double percentComplete;
private final long documentsProcessed;
private final long documentsIndexed;
public TransformProgress(Long totalDocs,
Long remainingDocs,
Double percentComplete,
Long documentsProcessed,
Long documentsIndexed) {
this.totalDocs = totalDocs;
this.remainingDocs = remainingDocs == null ? totalDocs : remainingDocs;
this.percentComplete = percentComplete;
this.documentsProcessed = documentsProcessed == null ? 0 : documentsProcessed;
this.documentsIndexed = documentsIndexed == null ? 0 : documentsIndexed;
}
@Nullable
public Double getPercentComplete() {
return percentComplete;
}
@Nullable
public Long getTotalDocs() {
return totalDocs;
}
@Nullable
public Long getRemainingDocs() {
return remainingDocs;
}
public long getDocumentsProcessed() {
return documentsProcessed;
}
public long getDocumentsIndexed() {
return documentsIndexed;
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if (other == null || other.getClass() != getClass()) {
return false;
}
TransformProgress that = (TransformProgress) other;
return Objects.equals(this.remainingDocs, that.remainingDocs)
&& Objects.equals(this.totalDocs, that.totalDocs)
&& Objects.equals(this.percentComplete, that.percentComplete)
&& Objects.equals(this.documentsIndexed, that.documentsIndexed)
&& Objects.equals(this.documentsProcessed, that.documentsProcessed);
}
@Override
public int hashCode(){
return Objects.hash(remainingDocs, totalDocs, percentComplete, documentsIndexed, documentsProcessed);
}
}

View File

@ -1,141 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class TransformStats {
public static final ParseField ID = new ParseField("id");
public static final ParseField STATE_FIELD = new ParseField("state");
public static final ParseField REASON_FIELD = new ParseField("reason");
public static final ParseField NODE_FIELD = new ParseField("node");
public static final ParseField STATS_FIELD = new ParseField("stats");
public static final ParseField CHECKPOINTING_INFO_FIELD = new ParseField("checkpointing");
public static final ConstructingObjectParser<TransformStats, Void> PARSER = new ConstructingObjectParser<>(
"data_frame_transform_state_and_stats_info", true,
a -> new TransformStats((String) a[0], (State) a[1], (String) a[2],
(NodeAttributes) a[3], (TransformIndexerStats) a[4], (TransformCheckpointingInfo) a[5]));
static {
PARSER.declareString(constructorArg(), ID);
PARSER.declareField(optionalConstructorArg(), p -> State.fromString(p.text()), STATE_FIELD,
ObjectParser.ValueType.STRING);
PARSER.declareString(optionalConstructorArg(), REASON_FIELD);
PARSER.declareField(optionalConstructorArg(), NodeAttributes.PARSER::apply, NODE_FIELD, ObjectParser.ValueType.OBJECT);
PARSER.declareObject(constructorArg(), (p, c) -> TransformIndexerStats.fromXContent(p), STATS_FIELD);
PARSER.declareObject(optionalConstructorArg(),
(p, c) -> TransformCheckpointingInfo.fromXContent(p), CHECKPOINTING_INFO_FIELD);
}
public static TransformStats fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private final String id;
private final String reason;
private final State state;
private final NodeAttributes node;
private final TransformIndexerStats indexerStats;
private final TransformCheckpointingInfo checkpointingInfo;
public TransformStats(String id, State state, String reason, NodeAttributes node, TransformIndexerStats stats,
TransformCheckpointingInfo checkpointingInfo) {
this.id = id;
this.state = state;
this.reason = reason;
this.node = node;
this.indexerStats = stats;
this.checkpointingInfo = checkpointingInfo;
}
public String getId() {
return id;
}
public State getState() {
return state;
}
public String getReason() {
return reason;
}
public NodeAttributes getNode() {
return node;
}
public TransformIndexerStats getIndexerStats() {
return indexerStats;
}
public TransformCheckpointingInfo getCheckpointingInfo() {
return checkpointingInfo;
}
@Override
public int hashCode() {
return Objects.hash(id, state, reason, node, indexerStats, checkpointingInfo);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
TransformStats that = (TransformStats) other;
return Objects.equals(this.id, that.id)
&& Objects.equals(this.state, that.state)
&& Objects.equals(this.reason, that.reason)
&& Objects.equals(this.node, that.node)
&& Objects.equals(this.indexerStats, that.indexerStats)
&& Objects.equals(this.checkpointingInfo, that.checkpointingInfo);
}
public enum State {
STARTED, INDEXING, ABORTING, STOPPING, STOPPED, FAILED;
public static State fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT));
}
public String value() {
return name().toLowerCase(Locale.ROOT);
}
}
}

View File

@ -1,77 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import java.io.IOException;
import java.util.Collection;
import java.util.Objects;
public class AggregationConfig implements ToXContentObject {
private final AggregatorFactories.Builder aggregations;
public static AggregationConfig fromXContent(XContentParser parser) throws IOException {
if (parser.currentToken() == null) {
parser.nextToken();
}
AggregatorFactories.Builder aggregations = AggregatorFactories.parseAggregators(parser);
return new AggregationConfig(aggregations);
}
public AggregationConfig(AggregatorFactories.Builder aggregations) {
this.aggregations = aggregations;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
aggregations.toXContent(builder, params);
return builder;
}
public Collection<AggregationBuilder> getAggregatorFactories() {
return aggregations.getAggregatorFactories();
}
@Override
public int hashCode() {
return Objects.hash(aggregations);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final AggregationConfig that = (AggregationConfig) other;
return Objects.equals(this.aggregations, that.aggregations);
}
}

View File

@ -1,365 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import java.io.IOException;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* A grouping via a date histogram aggregation referencing a timefield
*/
public class DateHistogramGroupSource extends SingleGroupSource implements ToXContentObject {
private static final ParseField TIME_ZONE = new ParseField("time_zone");
// From DateHistogramAggregationBuilder in core, transplanted and modified to a set
// so we don't need to import a dependency on the class
private static final Set<String> DATE_FIELD_UNITS = Collections.unmodifiableSet(
new HashSet<>(
Arrays.asList(
"year",
"1y",
"quarter",
"1q",
"month",
"1M",
"week",
"1w",
"day",
"1d",
"hour",
"1h",
"minute",
"1m",
"second",
"1s"
)
)
);
/**
* Interval can be specified in 2 ways:
*
* fixed_interval fixed intervals like 1h, 1m, 1d
* calendar_interval calendar aware intervals like 1M, 1Y, ...
*
* Note: transform does not support the deprecated interval option
*/
public interface Interval extends ToXContentFragment {
String getName();
DateHistogramInterval getInterval();
}
public static class FixedInterval implements Interval {
private static final String NAME = "fixed_interval";
private final DateHistogramInterval interval;
public FixedInterval(DateHistogramInterval interval) {
this.interval = interval;
}
@Override
public String getName() {
return NAME;
}
@Override
public DateHistogramInterval getInterval() {
return interval;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(NAME);
interval.toXContent(builder, params);
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final FixedInterval that = (FixedInterval) other;
return Objects.equals(this.interval, that.interval);
}
@Override
public int hashCode() {
return Objects.hash(interval);
}
}
public static class CalendarInterval implements Interval {
private static final String NAME = "calendar_interval";
private final DateHistogramInterval interval;
public CalendarInterval(DateHistogramInterval interval) {
this.interval = interval;
if (DATE_FIELD_UNITS.contains(interval.toString()) == false) {
throw new IllegalArgumentException(
"The supplied interval [" + interval + "] could not be parsed " + "as a calendar interval."
);
}
}
@Override
public String getName() {
return NAME;
}
@Override
public DateHistogramInterval getInterval() {
return interval;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(NAME);
interval.toXContent(builder, params);
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final CalendarInterval that = (CalendarInterval) other;
return Objects.equals(this.interval, that.interval);
}
@Override
public int hashCode() {
return Objects.hash(interval);
}
}
private static final ConstructingObjectParser<DateHistogramGroupSource, Void> PARSER = new ConstructingObjectParser<>(
"date_histogram_group_source",
true,
(args) -> {
String field = (String) args[0];
Script script = (Script) args[1];
boolean missingBucket = args[2] == null ? false : (boolean) args[2];
String fixedInterval = (String) args[3];
String calendarInterval = (String) args[4];
ZoneId zoneId = (ZoneId) args[5];
Interval interval = null;
if (fixedInterval != null && calendarInterval != null) {
throw new IllegalArgumentException("You must specify either fixed_interval or calendar_interval, found both");
} else if (fixedInterval != null) {
interval = new FixedInterval(new DateHistogramInterval(fixedInterval));
} else if (calendarInterval != null) {
interval = new CalendarInterval(new DateHistogramInterval(calendarInterval));
} else {
throw new IllegalArgumentException("You must specify either fixed_interval or calendar_interval, found none");
}
return new DateHistogramGroupSource(field, script, missingBucket, interval, zoneId);
}
);
static {
PARSER.declareString(optionalConstructorArg(), FIELD);
Script.declareScript(PARSER, optionalConstructorArg(), SCRIPT);
PARSER.declareBoolean(optionalConstructorArg(), MISSING_BUCKET);
PARSER.declareString(optionalConstructorArg(), new ParseField(FixedInterval.NAME));
PARSER.declareString(optionalConstructorArg(), new ParseField(CalendarInterval.NAME));
PARSER.declareField(optionalConstructorArg(), p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
return ZoneId.of(p.text());
} else {
return ZoneOffset.ofHours(p.intValue());
}
}, TIME_ZONE, ObjectParser.ValueType.LONG);
}
public static DateHistogramGroupSource fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
private final Interval interval;
private final ZoneId timeZone;
DateHistogramGroupSource(String field, Script script, Interval interval, ZoneId timeZone) {
this(field, script, false, interval, timeZone);
}
DateHistogramGroupSource(String field, Script script, boolean missingBucket, Interval interval, ZoneId timeZone) {
super(field, script, missingBucket);
this.interval = interval;
this.timeZone = timeZone;
}
@Override
public Type getType() {
return Type.DATE_HISTOGRAM;
}
public Interval getInterval() {
return interval;
}
public ZoneId getTimeZone() {
return timeZone;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
super.innerXContent(builder, params);
interval.toXContent(builder, params);
if (timeZone != null) {
builder.field(TIME_ZONE.getPreferredName(), timeZone.toString());
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final DateHistogramGroupSource that = (DateHistogramGroupSource) other;
return this.missingBucket == that.missingBucket
&& Objects.equals(this.field, that.field)
&& Objects.equals(this.script, that.script)
&& Objects.equals(this.interval, that.interval)
&& Objects.equals(this.timeZone, that.timeZone);
}
@Override
public int hashCode() {
return Objects.hash(field, script, missingBucket, interval, timeZone);
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String field;
private Script script;
private Interval interval;
private ZoneId timeZone;
private boolean missingBucket;
/**
* The field with which to construct the date histogram grouping
* @param field The field name
* @return The {@link Builder} with the field set.
*/
public Builder setField(String field) {
this.field = field;
return this;
}
/**
* The script with which to construct the date histogram grouping
* @param script The script
* @return The {@link Builder} with the script set.
*/
public Builder setScript(Script script) {
this.script = script;
return this;
}
/**
* Set the interval for the DateHistogram grouping
* @param interval a fixed or calendar interval
* @return the {@link Builder} with the interval set.
*/
public Builder setInterval(Interval interval) {
this.interval = interval;
return this;
}
/**
* Sets the time zone to use for this aggregation
* @param timeZone The zoneId for the timeZone
* @return The {@link Builder} with the timeZone set.
*/
public Builder setTimeZone(ZoneId timeZone) {
this.timeZone = timeZone;
return this;
}
/**
* Sets the value of "missing_bucket"
* @param missingBucket value of "missing_bucket" to be set
* @return The {@link Builder} with "missing_bucket" set.
*/
public Builder setMissingBucket(boolean missingBucket) {
this.missingBucket = missingBucket;
return this;
}
public DateHistogramGroupSource build() {
return new DateHistogramGroupSource(field, script, missingBucket, interval, timeZone);
}
}
}

View File

@ -1,184 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoBoundingBox;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/*
* A geotile_grid aggregation source for group_by
*/
public class GeoTileGroupSource extends SingleGroupSource implements ToXContentObject {
private static final String NAME = "transform_geo_tile_group";
private static final ParseField PRECISION = new ParseField("precision");
private static final ConstructingObjectParser<GeoTileGroupSource, Void> PARSER = new ConstructingObjectParser<>(NAME, true, (args) -> {
String field = (String) args[0];
boolean missingBucket = args[1] == null ? false : (boolean) args[1];
Integer precision = (Integer) args[2];
GeoBoundingBox boundingBox = (GeoBoundingBox) args[3];
return new GeoTileGroupSource(field, missingBucket, precision, boundingBox);
});
static {
PARSER.declareString(optionalConstructorArg(), FIELD);
PARSER.declareBoolean(optionalConstructorArg(), MISSING_BUCKET);
PARSER.declareInt(optionalConstructorArg(), PRECISION);
PARSER.declareField(
optionalConstructorArg(),
(p, context) -> GeoBoundingBox.parseBoundingBox(p),
GeoBoundingBox.BOUNDS_FIELD,
ObjectParser.ValueType.OBJECT
);
}
private final Integer precision;
private final GeoBoundingBox geoBoundingBox;
public GeoTileGroupSource(final String field, final Integer precision, final GeoBoundingBox boundingBox) {
this(field, false, precision, boundingBox);
}
public GeoTileGroupSource(final String field, final boolean missingBucket, final Integer precision, final GeoBoundingBox boundingBox) {
super(field, null, missingBucket);
if (precision != null) {
GeoTileUtils.checkPrecisionRange(precision);
}
this.precision = precision;
this.geoBoundingBox = boundingBox;
}
@Override
public Type getType() {
return Type.GEOTILE_GRID;
}
public Integer getPrecision() {
return precision;
}
public GeoBoundingBox getGeoBoundingBox() {
return geoBoundingBox;
}
public static GeoTileGroupSource fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
super.innerXContent(builder, params);
if (precision != null) {
builder.field(PRECISION.getPreferredName(), precision);
}
if (geoBoundingBox != null) {
geoBoundingBox.toXContent(builder, params);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final GeoTileGroupSource that = (GeoTileGroupSource) other;
return this.missingBucket == that.missingBucket
&& Objects.equals(this.field, that.field)
&& Objects.equals(this.precision, that.precision)
&& Objects.equals(this.geoBoundingBox, that.geoBoundingBox);
}
@Override
public int hashCode() {
return Objects.hash(field, missingBucket, precision, geoBoundingBox);
}
public static class Builder {
private String field;
private boolean missingBucket;
private Integer precision;
private GeoBoundingBox boundingBox;
/**
* The field with which to construct the geo tile grouping
* @param field The field name
* @return The {@link Builder} with the field set.
*/
public Builder setField(String field) {
this.field = field;
return this;
}
/**
* Sets the value of "missing_bucket"
* @param missingBucket value of "missing_bucket" to be set
* @return The {@link Builder} with "missing_bucket" set.
*/
public Builder setMissingBucket(boolean missingBucket) {
this.missingBucket = missingBucket;
return this;
}
/**
* The precision with which to construct the geo tile grouping
* @param precision The precision
* @return The {@link Builder} with the precision set.
*/
public Builder setPrecission(Integer precision) {
this.precision = precision;
return this;
}
/**
* Set the bounding box for the geo tile grouping
* @param boundingBox The bounding box
* @return the {@link Builder} with the bounding box set.
*/
public Builder setBoundingBox(GeoBoundingBox boundingBox) {
this.boundingBox = boundingBox;
return this;
}
public GeoTileGroupSource build() {
return new GeoTileGroupSource(field, missingBucket, precision, boundingBox);
}
}
}

View File

@ -1,203 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
/**
* Class describing how to group data
*/
public class GroupConfig implements ToXContentObject {
private final Map<String, SingleGroupSource> groups;
/**
* Leniently parse a {@code GroupConfig}.
* Parsing is lenient in that unknown fields in the root of the
* object are ignored. Unknown group types {@link SingleGroupSource.Type}
* will cause a parsing error.
*
* @param parser The XContent parser
* @return The parsed object
* @throws IOException On parsing error
*/
public static GroupConfig fromXContent(final XContentParser parser) throws IOException {
LinkedHashMap<String, SingleGroupSource> groups = new LinkedHashMap<>();
// be parsing friendly, whether the token needs to be advanced or not (similar to what ObjectParser does)
XContentParser.Token token;
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new ParsingException(parser.getTokenLocation(), "Failed to parse object: Expected START_OBJECT but was: " + token);
}
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
// leniently skip over key-value and array fields in the root of the object
if (token == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
}
continue;
}
String destinationFieldName = parser.currentName();
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
token = parser.nextToken();
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
String groupType = parser.currentName();
token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
// need to consume up to dest field end obj
consumeUntilEndObject(parser, 1);
continue;
}
SingleGroupSource groupSource = null;
switch (groupType) {
case "terms":
groupSource = TermsGroupSource.fromXContent(parser);
break;
case "histogram":
groupSource = HistogramGroupSource.fromXContent(parser);
break;
case "date_histogram":
groupSource = DateHistogramGroupSource.fromXContent(parser);
break;
case "geotile_grid":
groupSource = GeoTileGroupSource.fromXContent(parser);
break;
default:
// not a valid group source. Consume up to the dest field end object
consumeUntilEndObject(parser, 2);
}
if (groupSource != null) {
groups.put(destinationFieldName, groupSource);
// destination field end_object
parser.nextToken();
}
}
return new GroupConfig(groups);
}
/**
* Consume tokens from the parser until {@code endObjectCount} of end object
* tokens have been read. Nested objects that start and end inside the current
* field are skipped and do contribute to the end object count.
* @param parser The XContent parser
* @param endObjectCount Number of end object tokens to consume
* @throws IOException On parsing error
*/
private static void consumeUntilEndObject(XContentParser parser, int endObjectCount) throws IOException {
do {
XContentParser.Token token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
endObjectCount++;
} else if (token == XContentParser.Token.END_OBJECT) {
endObjectCount--;
}
} while (endObjectCount != 0);
}
GroupConfig(Map<String, SingleGroupSource> groups) {
this.groups = groups;
}
public Map <String, SingleGroupSource> getGroups() {
return groups;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
for (Map.Entry<String, SingleGroupSource> entry : groups.entrySet()) {
builder.startObject(entry.getKey());
builder.field(entry.getValue().getType().value(), entry.getValue());
builder.endObject();
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final GroupConfig that = (GroupConfig) other;
return Objects.equals(this.groups, that.groups);
}
@Override
public int hashCode() {
return Objects.hash(groups);
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private final Map<String, SingleGroupSource> groups = new HashMap<>();
/**
* Add a new grouping to the builder
* @param name The name of the resulting grouped field
* @param group The type of grouping referenced
* @return The {@link Builder} with a new grouping entry added
*/
public Builder groupBy(String name, SingleGroupSource group) {
groups.put(name, group);
return this;
}
public GroupConfig build() {
return new GroupConfig(groups);
}
}
}

View File

@ -1,168 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* A grouping via a histogram aggregation referencing a numeric field
*/
public class HistogramGroupSource extends SingleGroupSource implements ToXContentObject {
protected static final ParseField INTERVAL = new ParseField("interval");
private static final ConstructingObjectParser<HistogramGroupSource, Void> PARSER = new ConstructingObjectParser<>(
"histogram_group_source",
true,
args -> new HistogramGroupSource((String) args[0], (Script) args[1], args[2] == null ? false : (boolean) args[2], (double) args[3])
);
static {
PARSER.declareString(optionalConstructorArg(), FIELD);
Script.declareScript(PARSER, optionalConstructorArg(), SCRIPT);
PARSER.declareBoolean(optionalConstructorArg(), MISSING_BUCKET);
PARSER.declareDouble(optionalConstructorArg(), INTERVAL);
}
public static HistogramGroupSource fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
private final double interval;
HistogramGroupSource(String field, Script script, double interval) {
this(field, script, false, interval);
}
HistogramGroupSource(String field, Script script, boolean missingBucket, double interval) {
super(field, script, missingBucket);
if (interval <= 0) {
throw new IllegalArgumentException("[interval] must be greater than 0.");
}
this.interval = interval;
}
@Override
public Type getType() {
return Type.HISTOGRAM;
}
public double getInterval() {
return interval;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
super.innerXContent(builder, params);
builder.field(INTERVAL.getPreferredName(), interval);
builder.endObject();
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final HistogramGroupSource that = (HistogramGroupSource) other;
return this.missingBucket == that.missingBucket
&& Objects.equals(this.field, that.field)
&& Objects.equals(this.script, that.script)
&& Objects.equals(this.interval, that.interval);
}
@Override
public int hashCode() {
return Objects.hash(field, script, interval, missingBucket);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String field;
private Script script;
private boolean missingBucket;
private double interval;
/**
* The field to reference in the histogram grouping
* @param field The numeric field name to use in the histogram grouping
* @return The {@link Builder} with the field set.
*/
public Builder setField(String field) {
this.field = field;
return this;
}
/**
* Set the interval for the histogram grouping
* @param interval The numeric interval for the histogram grouping
* @return The {@link Builder} with the interval set.
*/
public Builder setInterval(double interval) {
this.interval = interval;
return this;
}
/**
* The script with which to construct the histogram grouping
* @param script The script
* @return The {@link Builder} with the script set.
*/
public Builder setScript(Script script) {
this.script = script;
return this;
}
/**
* Sets the value of "missing_bucket"
* @param missingBucket value of "missing_bucket" to be set
* @return The {@link Builder} with "missing_bucket" set.
*/
public Builder setMissingBucket(boolean missingBucket) {
this.missingBucket = missingBucket;
return this;
}
public HistogramGroupSource build() {
return new HistogramGroupSource(field, script, missingBucket, interval);
}
}
}

View File

@ -1,175 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Class describing how to pivot data via {@link GroupConfig} and {@link AggregationConfig} objects
*/
public class PivotConfig implements ToXContentObject {
private static final ParseField GROUP_BY = new ParseField("group_by");
private static final ParseField AGGREGATIONS = new ParseField("aggregations");
private static final ParseField MAX_PAGE_SEARCH_SIZE = new ParseField("max_page_search_size");
private final GroupConfig groups;
private final AggregationConfig aggregationConfig;
private final Integer maxPageSearchSize;
private static final ConstructingObjectParser<PivotConfig, Void> PARSER = new ConstructingObjectParser<>(
"pivot_config",
true,
args -> new PivotConfig((GroupConfig) args[0], (AggregationConfig) args[1], (Integer) args[2])
);
static {
PARSER.declareObject(constructorArg(), (p, c) -> (GroupConfig.fromXContent(p)), GROUP_BY);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p), AGGREGATIONS);
PARSER.declareInt(optionalConstructorArg(), MAX_PAGE_SEARCH_SIZE);
}
public static PivotConfig fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
PivotConfig(GroupConfig groups, final AggregationConfig aggregationConfig, Integer maxPageSearchSize) {
this.groups = groups;
this.aggregationConfig = aggregationConfig;
this.maxPageSearchSize = maxPageSearchSize;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(GROUP_BY.getPreferredName(), groups);
builder.field(AGGREGATIONS.getPreferredName(), aggregationConfig);
if (maxPageSearchSize != null) {
builder.field(MAX_PAGE_SEARCH_SIZE.getPreferredName(), maxPageSearchSize);
}
builder.endObject();
return builder;
}
public AggregationConfig getAggregationConfig() {
return aggregationConfig;
}
public GroupConfig getGroupConfig() {
return groups;
}
@Deprecated
public Integer getMaxPageSearchSize() {
return maxPageSearchSize;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final PivotConfig that = (PivotConfig) other;
return Objects.equals(this.groups, that.groups)
&& Objects.equals(this.aggregationConfig, that.aggregationConfig)
&& Objects.equals(this.maxPageSearchSize, that.maxPageSearchSize);
}
@Override
public int hashCode() {
return Objects.hash(groups, aggregationConfig, maxPageSearchSize);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private GroupConfig groups;
private AggregationConfig aggregationConfig;
private Integer maxPageSearchSize;
/**
* Set how to group the source data
* @param groups The configuration describing how to group and pivot the source data
* @return the {@link Builder} with the interval set.
*/
public Builder setGroups(GroupConfig groups) {
this.groups = groups;
return this;
}
/**
* Set the aggregated fields to include in the pivot config
* @param aggregationConfig The configuration describing the aggregated fields
* @return the {@link Builder} with the aggregations set.
*/
public Builder setAggregationConfig(AggregationConfig aggregationConfig) {
this.aggregationConfig = aggregationConfig;
return this;
}
/**
* Set the aggregated fields to include in the pivot config
* @param aggregations The aggregated field builders
* @return the {@link Builder} with the aggregations set.
*/
public Builder setAggregations(AggregatorFactories.Builder aggregations) {
this.aggregationConfig = new AggregationConfig(aggregations);
return this;
}
/**
* Sets the paging maximum paging maxPageSearchSize that date frame transform can use when
* pulling the data from the source index.
*
* If OOM is triggered, the paging maxPageSearchSize is dynamically reduced so that the transform can continue to gather data.
* Deprecated, use {@link org.elasticsearch.client.transform.transforms.SettingsConfig.Builder#setMaxPageSearchSize} instead
* @param maxPageSearchSize Integer value between 10 and 10_000
* @return the {@link Builder} with the paging maxPageSearchSize set.
*/
@Deprecated
public Builder setMaxPageSearchSize(Integer maxPageSearchSize) {
this.maxPageSearchSize = maxPageSearchSize;
return this;
}
public PivotConfig build() {
return new PivotConfig(groups, aggregationConfig, maxPageSearchSize);
}
}
}

View File

@ -1,105 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.script.Script;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
public abstract class SingleGroupSource implements ToXContentObject {
protected static final ParseField FIELD = new ParseField("field");
protected static final ParseField SCRIPT = new ParseField("script");
protected static final ParseField MISSING_BUCKET = new ParseField("missing_bucket");
public enum Type {
TERMS,
HISTOGRAM,
DATE_HISTOGRAM,
GEOTILE_GRID;
public String value() {
return name().toLowerCase(Locale.ROOT);
}
}
protected final String field;
protected final Script script;
protected final boolean missingBucket;
public SingleGroupSource(final String field, final Script script, final boolean missingBucket) {
this.field = field;
this.script = script;
this.missingBucket = missingBucket;
}
public abstract Type getType();
public String getField() {
return field;
}
public Script getScript() {
return script;
}
public boolean getMissingBucket() {
return missingBucket;
}
protected void innerXContent(XContentBuilder builder, Params params) throws IOException {
if (field != null) {
builder.field(FIELD.getPreferredName(), field);
}
if (script != null) {
builder.field(SCRIPT.getPreferredName(), script);
}
if (missingBucket) {
builder.field(MISSING_BUCKET.getPreferredName(), missingBucket);
}
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other instanceof SingleGroupSource == false) {
return false;
}
final SingleGroupSource that = (SingleGroupSource) other;
return this.missingBucket == that.missingBucket
&& Objects.equals(this.field, that.field)
&& Objects.equals(this.script, that.script);
}
@Override
public int hashCode() {
return Objects.hash(field, script, missingBucket);
}
}

View File

@ -1,116 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import java.io.IOException;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class TermsGroupSource extends SingleGroupSource implements ToXContentObject {
private static final ConstructingObjectParser<TermsGroupSource, Void> PARSER = new ConstructingObjectParser<>(
"terms_group_source",
true,
args -> new TermsGroupSource((String) args[0], (Script) args[1], args[2] == null ? false : (boolean) args[2])
);
static {
PARSER.declareString(optionalConstructorArg(), FIELD);
Script.declareScript(PARSER, optionalConstructorArg(), SCRIPT);
PARSER.declareBoolean(optionalConstructorArg(), MISSING_BUCKET);
}
public static TermsGroupSource fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
TermsGroupSource(final String field, final Script script) {
this(field, script, false);
}
TermsGroupSource(final String field, final Script script, final boolean missingBucket) {
super(field, script, missingBucket);
}
@Override
public Type getType() {
return Type.TERMS;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
super.innerXContent(builder, params);
builder.endObject();
return builder;
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String field;
private Script script;
private boolean missingBucket;
/**
* The field with which to construct the terms grouping
* @param field The field name
* @return The {@link Builder} with the field set.
*/
public Builder setField(String field) {
this.field = field;
return this;
}
/**
* The script with which to construct the terms grouping
* @param script The script
* @return The {@link Builder} with the script set.
*/
public Builder setScript(Script script) {
this.script = script;
return this;
}
/**
* Sets the value of "missing_bucket"
* @param missingBucket value of "missing_bucket" to be set
* @return The {@link Builder} with "missing_bucket" set.
*/
public Builder setMissingBucket(boolean missingBucket) {
this.missingBucket = missingBucket;
return this;
}
public TermsGroupSource build() {
return new TermsGroupSource(field, script, missingBucket);
}
}
}

View File

@ -1,5 +1 @@
org.elasticsearch.client.ml.dataframe.MlDataFrameAnalysisNamedXContentProvider
org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider
org.elasticsearch.client.ml.dataframe.stats.AnalysisStatsNamedXContentProvider
org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider
org.elasticsearch.client.transform.TransformNamedXContentProvider

View File

@ -45,8 +45,6 @@ import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.client.core.MainRequest;
import org.elasticsearch.client.core.MainResponse;
import org.elasticsearch.client.transform.transforms.SyncConfig;
import org.elasticsearch.client.transform.transforms.TimeSyncConfig;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
@ -689,78 +687,6 @@ public class RestHighLevelClientTests extends ESTestCase {
assertTrue(names.contains(MeanReciprocalRank.NAME));
assertTrue(names.contains(DiscountedCumulativeGain.NAME));
assertTrue(names.contains(ExpectedReciprocalRank.NAME));
assertEquals(Integer.valueOf(12), categories.get(LifecycleAction.class));
assertTrue(names.contains(UnfollowAction.NAME));
assertTrue(names.contains(AllocateAction.NAME));
assertTrue(names.contains(DeleteAction.NAME));
assertTrue(names.contains(ForceMergeAction.NAME));
assertTrue(names.contains(ReadOnlyAction.NAME));
assertTrue(names.contains(RolloverAction.NAME));
assertTrue(names.contains(WaitForSnapshotAction.NAME));
assertTrue(names.contains(ShrinkAction.NAME));
assertTrue(names.contains(FreezeAction.NAME));
assertTrue(names.contains(SetPriorityAction.NAME));
assertTrue(names.contains(SearchableSnapshotAction.NAME));
assertEquals(Integer.valueOf(3), categories.get(DataFrameAnalysis.class));
assertTrue(names.contains(org.elasticsearch.client.ml.dataframe.OutlierDetection.NAME.getPreferredName()));
assertTrue(names.contains(org.elasticsearch.client.ml.dataframe.Regression.NAME.getPreferredName()));
assertTrue(names.contains(org.elasticsearch.client.ml.dataframe.Classification.NAME.getPreferredName()));
assertTrue(names.contains(OutlierDetectionStats.NAME.getPreferredName()));
assertTrue(names.contains(RegressionStats.NAME.getPreferredName()));
assertTrue(names.contains(ClassificationStats.NAME.getPreferredName()));
assertEquals(Integer.valueOf(1), categories.get(SyncConfig.class));
assertTrue(names.contains(TimeSyncConfig.NAME));
assertEquals(Integer.valueOf(3), categories.get(org.elasticsearch.client.ml.dataframe.evaluation.Evaluation.class));
assertThat(names, hasItems(OutlierDetection.NAME, Classification.NAME, Regression.NAME));
assertEquals(Integer.valueOf(13), categories.get(org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric.class));
assertThat(names,
hasItems(
registeredMetricName(
OutlierDetection.NAME, org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.AucRocMetric.NAME),
registeredMetricName(
OutlierDetection.NAME, org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.PrecisionMetric.NAME),
registeredMetricName(
OutlierDetection.NAME, org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.RecallMetric.NAME),
registeredMetricName(OutlierDetection.NAME, ConfusionMatrixMetric.NAME),
registeredMetricName(Classification.NAME, AucRocMetric.NAME),
registeredMetricName(Classification.NAME, AccuracyMetric.NAME),
registeredMetricName(Classification.NAME, PrecisionMetric.NAME),
registeredMetricName(Classification.NAME, RecallMetric.NAME),
registeredMetricName(Classification.NAME, MulticlassConfusionMatrixMetric.NAME),
registeredMetricName(Regression.NAME, MeanSquaredErrorMetric.NAME),
registeredMetricName(Regression.NAME, MeanSquaredLogarithmicErrorMetric.NAME),
registeredMetricName(Regression.NAME, HuberMetric.NAME),
registeredMetricName(Regression.NAME, RSquaredMetric.NAME)));
assertEquals(Integer.valueOf(13), categories.get(org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric.Result.class));
assertThat(names,
hasItems(
registeredMetricName(
OutlierDetection.NAME, org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.AucRocMetric.NAME),
registeredMetricName(
OutlierDetection.NAME, org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.PrecisionMetric.NAME),
registeredMetricName(
OutlierDetection.NAME, org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.RecallMetric.NAME),
registeredMetricName(OutlierDetection.NAME, ConfusionMatrixMetric.NAME),
registeredMetricName(Classification.NAME, AucRocMetric.NAME),
registeredMetricName(Classification.NAME, AccuracyMetric.NAME),
registeredMetricName(Classification.NAME, PrecisionMetric.NAME),
registeredMetricName(Classification.NAME, RecallMetric.NAME),
registeredMetricName(Classification.NAME, MulticlassConfusionMatrixMetric.NAME),
registeredMetricName(Regression.NAME, MeanSquaredErrorMetric.NAME),
registeredMetricName(Regression.NAME, MeanSquaredLogarithmicErrorMetric.NAME),
registeredMetricName(Regression.NAME, HuberMetric.NAME),
registeredMetricName(Regression.NAME, RSquaredMetric.NAME)));
assertEquals(Integer.valueOf(5), categories.get(org.elasticsearch.client.ml.inference.preprocessing.PreProcessor.class));
assertThat(names,
hasItems(FrequencyEncoding.NAME, OneHotEncoding.NAME, TargetMeanEncoding.NAME, CustomWordEmbedding.NAME, NGram.NAME));
assertEquals(Integer.valueOf(3), categories.get(org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel.class));
assertThat(names, hasItems(Tree.NAME, Ensemble.NAME, LangIdentNeuralNetwork.NAME));
assertEquals(Integer.valueOf(4),
categories.get(org.elasticsearch.client.ml.inference.trainedmodel.ensemble.OutputAggregator.class));
assertThat(names, hasItems(WeightedMode.NAME, WeightedSum.NAME, LogisticRegression.NAME, Exponent.NAME));
assertEquals(Integer.valueOf(2),
categories.get(org.elasticsearch.client.ml.inference.trainedmodel.InferenceConfig.class));
assertThat(names, hasItems(ClassificationConfig.NAME.getPreferredName(), RegressionConfig.NAME.getPreferredName()));
}
public void testApiNamingConventions() throws Exception {

View File

@ -1,476 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.logging.log4j.Level;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.CreateIndexResponse;
import org.elasticsearch.client.transform.DeleteTransformRequest;
import org.elasticsearch.client.transform.GetTransformRequest;
import org.elasticsearch.client.transform.GetTransformResponse;
import org.elasticsearch.client.transform.GetTransformStatsRequest;
import org.elasticsearch.client.transform.GetTransformStatsResponse;
import org.elasticsearch.client.transform.PreviewTransformRequest;
import org.elasticsearch.client.transform.PreviewTransformResponse;
import org.elasticsearch.client.transform.PutTransformRequest;
import org.elasticsearch.client.transform.StartTransformRequest;
import org.elasticsearch.client.transform.StartTransformResponse;
import org.elasticsearch.client.transform.StopTransformRequest;
import org.elasticsearch.client.transform.StopTransformResponse;
import org.elasticsearch.client.transform.UpdateTransformRequest;
import org.elasticsearch.client.transform.UpdateTransformResponse;
import org.elasticsearch.client.transform.transforms.DestConfig;
import org.elasticsearch.client.transform.transforms.SourceConfig;
import org.elasticsearch.client.transform.transforms.TimeSyncConfig;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.client.transform.transforms.TransformConfigUpdate;
import org.elasticsearch.client.transform.transforms.TransformIndexerStats;
import org.elasticsearch.client.transform.transforms.TransformStats;
import org.elasticsearch.client.transform.transforms.pivot.GroupConfig;
import org.elasticsearch.client.transform.transforms.pivot.PivotConfig;
import org.elasticsearch.client.transform.transforms.pivot.TermsGroupSource;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.joda.time.Instant;
import org.junit.After;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.oneOf;
public class TransformIT extends ESRestHighLevelClientTestCase {
private List<String> transformsToClean = new ArrayList<>();
private void createIndex(String indexName) throws IOException {
XContentBuilder builder = jsonBuilder();
builder.startObject()
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.endObject()
.startObject("user_id")
.field("type", "keyword")
.endObject()
.startObject("stars")
.field("type", "integer")
.endObject()
.endObject()
.endObject();
CreateIndexRequest request = new CreateIndexRequest(indexName);
request.mapping(builder);
CreateIndexResponse response = highLevelClient().indices().create(request, RequestOptions.DEFAULT);
assertTrue(response.isAcknowledged());
}
private void indexData(String indexName) throws IOException {
BulkRequest request = new BulkRequest();
{
Map<String, Object> doc = new HashMap<>();
doc.put("timestamp", "2019-03-10T12:00:00+00");
doc.put("user_id", "theresa");
doc.put("stars", 2);
request.add(new IndexRequest(indexName).source(doc, XContentType.JSON));
doc = new HashMap<>();
doc.put("timestamp", "2019-03-10T18:00:00+00");
doc.put("user_id", "theresa");
doc.put("stars", 3);
request.add(new IndexRequest(indexName).source(doc, XContentType.JSON));
doc = new HashMap<>();
doc.put("timestamp", "2019-03-10T12:00:00+00");
doc.put("user_id", "michel");
doc.put("stars", 5);
request.add(new IndexRequest(indexName).source(doc, XContentType.JSON));
doc = new HashMap<>();
doc.put("timestamp", "2019-03-10T18:00:00+00");
doc.put("user_id", "michel");
doc.put("stars", 3);
request.add(new IndexRequest(indexName).source(doc, XContentType.JSON));
doc = new HashMap<>();
doc.put("timestamp", "2019-03-11T12:00:00+00");
doc.put("user_id", "michel");
doc.put("stars", 3);
request.add(new IndexRequest(indexName).source(doc, XContentType.JSON));
request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
}
BulkResponse response = highLevelClient().bulk(request, RequestOptions.DEFAULT);
assertFalse(response.hasFailures());
}
@After
public void cleanUpTransformsAndLogAudits() throws Exception {
for (String transformId : transformsToClean) {
highLevelClient().transform()
.stopTransform(new StopTransformRequest(transformId, Boolean.TRUE, null, false), RequestOptions.DEFAULT);
}
for (String transformId : transformsToClean) {
highLevelClient().transform().deleteTransform(new DeleteTransformRequest(transformId), RequestOptions.DEFAULT);
}
transformsToClean = new ArrayList<>();
waitForPendingTasks(adminClient());
// using '*' to make this lenient and do not fail if the audit index does not exist
SearchRequest searchRequest = new SearchRequest(".transform-notifications-*");
searchRequest.source(new SearchSourceBuilder().query(new MatchAllQueryBuilder()).size(100).sort("timestamp", SortOrder.ASC));
for (SearchHit hit : searchAll(searchRequest)) {
Map<String, Object> source = hit.getSourceAsMap();
String level = (String) source.getOrDefault("level", "info");
logger.log(
Level.getLevel(level.toUpperCase(Locale.ROOT)),
"Transform audit: [{}] [{}] [{}] [{}]",
Instant.ofEpochMilli((long) source.getOrDefault("timestamp", 0)),
source.getOrDefault("transform_id", "n/a"),
source.getOrDefault("message", "n/a"),
source.getOrDefault("node_name", "n/a")
);
}
}
public void testCreateDelete() throws IOException {
String sourceIndex = "transform-source";
createIndex(sourceIndex);
String id = "test-crud";
TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
TransformClient client = highLevelClient().transform();
AcknowledgedResponse ack = execute(new PutTransformRequest(transform), client::putTransform,
client::putTransformAsync);
assertTrue(ack.isAcknowledged());
ack = execute(new DeleteTransformRequest(transform.getId()), client::deleteTransform,
client::deleteTransformAsync);
assertTrue(ack.isAcknowledged());
// The second delete should fail
ElasticsearchStatusException deleteError = expectThrows(ElasticsearchStatusException.class,
() -> execute(new DeleteTransformRequest(transform.getId()), client::deleteTransform,
client::deleteTransformAsync));
assertThat(deleteError.getMessage(), containsString("Transform with id [test-crud] could not be found"));
}
public void testUpdate() throws IOException {
String sourceIndex = "update-transform-source";
createIndex(sourceIndex);
String id = "test-update";
TransformConfig transform = validDataFrameTransformConfigBuilder(id, sourceIndex, "pivot-dest")
.setSyncConfig(new TimeSyncConfig("timefield", TimeValue.timeValueSeconds(60)))
.build();
TransformClient client = highLevelClient().transform();
AcknowledgedResponse ack = execute(new PutTransformRequest(transform), client::putTransform,
client::putTransformAsync);
assertTrue(ack.isAcknowledged());
String updatedDescription = "my new description";
TransformConfigUpdate update = TransformConfigUpdate.builder().setDescription(updatedDescription).build();
UpdateTransformResponse response = execute(
new UpdateTransformRequest(update, id), client::updateTransform,
client::updateTransformAsync);
assertThat(response.getTransformConfiguration().getDescription(), equalTo(updatedDescription));
ElasticsearchStatusException updateError = expectThrows(ElasticsearchStatusException.class,
() -> execute(new UpdateTransformRequest(update, "missing-transform"), client::updateTransform,
client::updateTransformAsync));
assertThat(updateError.getMessage(), containsString("Transform with id [missing-transform] could not be found"));
}
public void testCreateDeleteWithDefer() throws IOException {
String sourceIndex = "missing-source-index";
String id = "test-with-defer";
TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
TransformClient client = highLevelClient().transform();
PutTransformRequest request = new PutTransformRequest(transform);
request.setDeferValidation(true);
AcknowledgedResponse ack = execute(request, client::putTransform, client::putTransformAsync);
assertTrue(ack.isAcknowledged());
ack = execute(new DeleteTransformRequest(transform.getId()), client::deleteTransform,
client::deleteTransformAsync);
assertTrue(ack.isAcknowledged());
}
public void testGetTransform() throws IOException {
String sourceIndex = "transform-source";
createIndex(sourceIndex);
String id = "test-get";
TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
TransformClient client = highLevelClient().transform();
putTransform(transform);
GetTransformRequest getRequest = new GetTransformRequest(id);
GetTransformResponse getResponse = execute(getRequest, client::getTransform,
client::getTransformAsync);
assertNull(getResponse.getInvalidTransforms());
assertThat(getResponse.getTransformConfigurations(), hasSize(1));
assertEquals(transform.getId(), getResponse.getTransformConfigurations().get(0).getId());
}
public void testGetAllAndPageTransforms() throws IOException {
String sourceIndex = "transform-source";
createIndex(sourceIndex);
TransformClient client = highLevelClient().transform();
TransformConfig transform = validDataFrameTransformConfig("test-get-all-1", sourceIndex, "pivot-dest-1");
putTransform(transform);
transform = validDataFrameTransformConfig("test-get-all-2", sourceIndex, "pivot-dest-2");
putTransform(transform);
GetTransformRequest getRequest = new GetTransformRequest("_all");
GetTransformResponse getResponse = execute(getRequest, client::getTransform,
client::getTransformAsync);
assertNull(getResponse.getInvalidTransforms());
assertThat(getResponse.getTransformConfigurations(), hasSize(2));
assertEquals(transform.getId(), getResponse.getTransformConfigurations().get(1).getId());
getRequest.setPageParams(new PageParams(0,1));
getResponse = execute(getRequest, client::getTransform,
client::getTransformAsync);
assertNull(getResponse.getInvalidTransforms());
assertThat(getResponse.getTransformConfigurations(), hasSize(1));
GetTransformRequest getMulitple = new GetTransformRequest("test-get-all-1", "test-get-all-2");
getResponse = execute(getMulitple, client::getTransform,
client::getTransformAsync);
assertNull(getResponse.getInvalidTransforms());
assertThat(getResponse.getTransformConfigurations(), hasSize(2));
}
public void testGetMissingTransform() {
TransformClient client = highLevelClient().transform();
ElasticsearchStatusException missingError = expectThrows(ElasticsearchStatusException.class,
() -> execute(new GetTransformRequest("unknown"), client::getTransform,
client::getTransformAsync));
assertThat(missingError.status(), equalTo(RestStatus.NOT_FOUND));
}
public void testStartStop() throws IOException {
String sourceIndex = "transform-source";
createIndex(sourceIndex);
String id = "test-stop-start";
TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
TransformClient client = highLevelClient().transform();
putTransform(transform);
StartTransformRequest startRequest = new StartTransformRequest(id);
StartTransformResponse startResponse =
execute(startRequest, client::startTransform, client::startTransformAsync);
assertTrue(startResponse.isAcknowledged());
assertThat(startResponse.getNodeFailures(), empty());
assertThat(startResponse.getTaskFailures(), empty());
GetTransformStatsResponse statsResponse = execute(new GetTransformStatsRequest(id),
client::getTransformStats, client::getTransformStatsAsync);
assertThat(statsResponse.getTransformsStats(), hasSize(1));
TransformStats.State taskState = statsResponse.getTransformsStats().get(0).getState();
// Since we are non-continuous, the transform could auto-stop between being started earlier and us gathering the statistics
assertThat(taskState, oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING,
TransformStats.State.STOPPING, TransformStats.State.STOPPED));
StopTransformRequest stopRequest = new StopTransformRequest(id, Boolean.TRUE, null, false);
StopTransformResponse stopResponse =
execute(stopRequest, client::stopTransform, client::stopTransformAsync);
assertTrue(stopResponse.isAcknowledged());
assertThat(stopResponse.getNodeFailures(), empty());
assertThat(stopResponse.getTaskFailures(), empty());
// Calling stop with wait_for_completion assures that we will be in the `STOPPED` state for the transform task
statsResponse = execute(new GetTransformStatsRequest(id),
client::getTransformStats, client::getTransformStatsAsync);
taskState = statsResponse.getTransformsStats().get(0).getState();
assertThat(taskState, is(TransformStats.State.STOPPED));
}
@SuppressWarnings("unchecked")
public void testPreview() throws IOException {
String sourceIndex = "transform-source";
createIndex(sourceIndex);
indexData(sourceIndex);
TransformConfig transform = validDataFrameTransformConfig("test-preview", sourceIndex, null);
TransformClient client = highLevelClient().transform();
PreviewTransformResponse preview = execute(new PreviewTransformRequest(transform),
client::previewTransform,
client::previewTransformAsync);
List<Map<String, Object>> docs = preview.getDocs();
assertThat(docs, hasSize(2));
Optional<Map<String, Object>> theresa = docs.stream().filter(doc -> "theresa".equals(doc.get("reviewer"))).findFirst();
assertTrue(theresa.isPresent());
assertEquals(2.5d, (double) theresa.get().get("avg_rating"), 0.01d);
Optional<Map<String, Object>> michel = docs.stream().filter(doc -> "michel".equals(doc.get("reviewer"))).findFirst();
assertTrue(michel.isPresent());
assertEquals(3.6d, (double) michel.get().get("avg_rating"), 0.1d);
Map<String, Object> mappings = preview.getMappings();
assertThat(mappings, hasKey("properties"));
Map<String, Object> fields = (Map<String, Object>)mappings.get("properties");
assertThat(fields.get("reviewer"), equalTo(Collections.singletonMap("type", "keyword")));
assertThat(fields.get("avg_rating"), equalTo(Collections.singletonMap("type", "double")));
}
private TransformConfig validDataFrameTransformConfig(String id, String source, String destination) {
return validDataFrameTransformConfigBuilder(id, source, destination).build();
}
private TransformConfig.Builder validDataFrameTransformConfigBuilder(String id, String source, String destination) {
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer",
TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregations(aggBuilder).build();
DestConfig destConfig = (destination != null) ? DestConfig.builder().setIndex(destination).build() : null;
return TransformConfig.builder()
.setId(id)
.setSource(SourceConfig.builder().setIndex(source).setQuery(new MatchAllQueryBuilder()).build())
.setDest(destConfig)
.setPivotConfig(pivotConfig)
.setDescription("this is a test transform");
}
// TODO add tests to cover continuous situations
public void testGetStats() throws Exception {
String sourceIndex = "transform-source";
createIndex(sourceIndex);
indexData(sourceIndex);
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer",
TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregations(aggBuilder).build();
String id = "test-get-stats";
TransformConfig transform = TransformConfig.builder()
.setId(id)
.setSource(SourceConfig.builder().setIndex(sourceIndex).setQuery(new MatchAllQueryBuilder()).build())
.setDest(DestConfig.builder().setIndex("pivot-dest").build())
.setPivotConfig(pivotConfig)
.setDescription("transform for testing stats")
.build();
TransformClient client = highLevelClient().transform();
putTransform(transform);
GetTransformStatsResponse statsResponse = execute(new GetTransformStatsRequest(id),
client::getTransformStats, client::getTransformStatsAsync);
assertEquals(1, statsResponse.getTransformsStats().size());
TransformStats stats = statsResponse.getTransformsStats().get(0);
assertEquals(TransformStats.State.STOPPED, stats.getState());
TransformIndexerStats zeroIndexerStats = new TransformIndexerStats(
0L,
0L,
0L,
0L,
0L,
0L,
0L,
0L,
0L,
0L,
0L,
0L,
0.0,
0.0,
0.0);
assertEquals(zeroIndexerStats, stats.getIndexerStats());
// start the transform
StartTransformResponse startTransformResponse = execute(new StartTransformRequest(id),
client::startTransform,
client::startTransformAsync);
assertThat(startTransformResponse.isAcknowledged(), is(true));
assertBusy(() -> {
GetTransformStatsResponse response = execute(new GetTransformStatsRequest(id),
client::getTransformStats, client::getTransformStatsAsync);
TransformStats stateAndStats = response.getTransformsStats().get(0);
assertNotEquals(zeroIndexerStats, stateAndStats.getIndexerStats());
assertThat(stateAndStats.getState(), oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING,
TransformStats.State.STOPPING, TransformStats.State.STOPPED));
assertThat(stateAndStats.getReason(), is(nullValue()));
});
}
void putTransform(TransformConfig config) throws IOException {
TransformClient client = highLevelClient().transform();
AcknowledgedResponse ack = execute(new PutTransformRequest(config), client::putTransform,
client::putTransformAsync);
assertTrue(ack.isAcknowledged());
transformsToClean.add(config.getId());
}
}

View File

@ -1,269 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.transform.DeleteTransformRequest;
import org.elasticsearch.client.transform.GetTransformRequest;
import org.elasticsearch.client.transform.GetTransformStatsRequest;
import org.elasticsearch.client.transform.PreviewTransformRequest;
import org.elasticsearch.client.transform.PutTransformRequest;
import org.elasticsearch.client.transform.StartTransformRequest;
import org.elasticsearch.client.transform.StopTransformRequest;
import org.elasticsearch.client.transform.TransformNamedXContentProvider;
import org.elasticsearch.client.transform.UpdateTransformRequest;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.client.transform.transforms.TransformConfigTests;
import org.elasticsearch.client.transform.transforms.TransformConfigUpdate;
import org.elasticsearch.client.transform.transforms.TransformConfigUpdateTests;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.client.transform.GetTransformRequest.ALLOW_NO_MATCH;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.not;
public class TransformRequestConvertersTests extends ESTestCase {
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContents);
}
public void testPutDataFrameTransform() throws IOException {
PutTransformRequest putRequest = new PutTransformRequest(
TransformConfigTests.randomTransformConfig());
Request request = TransformRequestConverters.putTransform(putRequest);
assertThat(request.getParameters(), not(hasKey("defer_validation")));
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_transform/" + putRequest.getConfig().getId()));
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
TransformConfig parsedConfig = TransformConfig.PARSER.apply(parser, null);
assertThat(parsedConfig, equalTo(putRequest.getConfig()));
}
putRequest.setDeferValidation(true);
request = TransformRequestConverters.putTransform(putRequest);
assertThat(request.getParameters(), hasEntry("defer_validation", Boolean.toString(putRequest.getDeferValidation())));
}
public void testUpdateDataFrameTransform() throws IOException {
String transformId = randomAlphaOfLength(10);
UpdateTransformRequest updateDataFrameTransformRequest = new UpdateTransformRequest(
TransformConfigUpdateTests.randomTransformConfigUpdate(),
transformId);
Request request = TransformRequestConverters.updateTransform(updateDataFrameTransformRequest);
assertThat(request.getParameters(), not(hasKey("defer_validation")));
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_transform/" + transformId + "/_update"));
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
TransformConfigUpdate parsedConfig = TransformConfigUpdate.fromXContent(parser);
assertThat(parsedConfig, equalTo(updateDataFrameTransformRequest.getUpdate()));
}
updateDataFrameTransformRequest.setDeferValidation(true);
request = TransformRequestConverters.updateTransform(updateDataFrameTransformRequest);
assertThat(request.getParameters(),
hasEntry("defer_validation", Boolean.toString(updateDataFrameTransformRequest.getDeferValidation())));
}
public void testDeleteDataFrameTransform() {
DeleteTransformRequest deleteRequest = new DeleteTransformRequest("foo");
Request request = TransformRequestConverters.deleteTransform(deleteRequest);
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_transform/foo"));
assertThat(request.getParameters(), not(hasKey("force")));
deleteRequest.setForce(true);
request = TransformRequestConverters.deleteTransform(deleteRequest);
assertThat(request.getParameters(), hasEntry("force", "true"));
}
public void testStartDataFrameTransform() {
String id = randomAlphaOfLength(10);
TimeValue timeValue = null;
if (randomBoolean()) {
timeValue = TimeValue.parseTimeValue(randomTimeValue(), "timeout");
}
StartTransformRequest startRequest = new StartTransformRequest(id, timeValue);
Request request = TransformRequestConverters.startTransform(startRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_transform/" + startRequest.getId() + "/_start"));
if (timeValue != null) {
assertTrue(request.getParameters().containsKey("timeout"));
assertEquals(startRequest.getTimeout(), TimeValue.parseTimeValue(request.getParameters().get("timeout"), "timeout"));
} else {
assertFalse(request.getParameters().containsKey("timeout"));
}
}
public void testStopDataFrameTransform() {
String id = randomAlphaOfLength(10);
Boolean waitForCompletion = null;
if (randomBoolean()) {
waitForCompletion = randomBoolean();
}
TimeValue timeValue = null;
if (randomBoolean()) {
timeValue = TimeValue.parseTimeValue(randomTimeValue(), "timeout");
}
Boolean waitForCheckpoint = null;
if (randomBoolean()) {
waitForCheckpoint = randomBoolean();
}
StopTransformRequest stopRequest = new StopTransformRequest(id, waitForCompletion, timeValue, waitForCheckpoint);
Request request = TransformRequestConverters.stopTransform(stopRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_transform/" + stopRequest.getId() + "/_stop"));
if (waitForCompletion != null) {
assertTrue(request.getParameters().containsKey("wait_for_completion"));
assertEquals(stopRequest.getWaitForCompletion(), Boolean.parseBoolean(request.getParameters().get("wait_for_completion")));
} else {
assertFalse(request.getParameters().containsKey("wait_for_completion"));
}
if (timeValue != null) {
assertTrue(request.getParameters().containsKey("timeout"));
assertEquals(stopRequest.getTimeout(), TimeValue.parseTimeValue(request.getParameters().get("timeout"), "timeout"));
} else {
assertFalse(request.getParameters().containsKey("timeout"));
}
if (waitForCheckpoint != null) {
assertTrue(request.getParameters().containsKey("wait_for_checkpoint"));
assertEquals(stopRequest.getWaitForCheckpoint(), Boolean.parseBoolean(request.getParameters().get("wait_for_checkpoint")));
} else {
assertFalse(request.getParameters().containsKey("wait_for_checkpoint"));
}
assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH));
stopRequest.setAllowNoMatch(randomBoolean());
request = TransformRequestConverters.stopTransform(stopRequest);
assertEquals(stopRequest.getAllowNoMatch(), Boolean.parseBoolean(request.getParameters().get(ALLOW_NO_MATCH)));
}
public void testPreviewDataFrameTransform() throws IOException {
PreviewTransformRequest previewRequest = new PreviewTransformRequest(
TransformConfigTests.randomTransformConfig());
Request request = TransformRequestConverters.previewTransform(previewRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_transform/_preview"));
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
TransformConfig parsedConfig = TransformConfig.PARSER.apply(parser, null);
assertThat(parsedConfig, equalTo(previewRequest.getConfig()));
}
}
public void testGetDataFrameTransformStats() {
GetTransformStatsRequest getStatsRequest = new GetTransformStatsRequest("foo");
Request request = TransformRequestConverters.getTransformStats(getStatsRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_transform/foo/_stats"));
assertFalse(request.getParameters().containsKey("from"));
assertFalse(request.getParameters().containsKey("size"));
assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH));
getStatsRequest.setPageParams(new PageParams(0, null));
request = TransformRequestConverters.getTransformStats(getStatsRequest);
assertThat(request.getParameters(), hasEntry("from", "0"));
assertEquals(null, request.getParameters().get("size"));
getStatsRequest.setPageParams(new PageParams(null, 50));
request = TransformRequestConverters.getTransformStats(getStatsRequest);
assertEquals(null, request.getParameters().get("from"));
assertThat(request.getParameters(), hasEntry("size", "50"));
getStatsRequest.setPageParams(new PageParams(0, 10));
request = TransformRequestConverters.getTransformStats(getStatsRequest);
assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10")));
getStatsRequest.setAllowNoMatch(false);
request = TransformRequestConverters.getTransformStats(getStatsRequest);
assertThat(request.getParameters(), hasEntry("allow_no_match", "false"));
}
public void testGetDataFrameTransform() {
GetTransformRequest getRequest = new GetTransformRequest("bar");
Request request = TransformRequestConverters.getTransform(getRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_transform/bar"));
assertFalse(request.getParameters().containsKey("from"));
assertFalse(request.getParameters().containsKey("size"));
assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH));
getRequest.setPageParams(new PageParams(0, null));
request = TransformRequestConverters.getTransform(getRequest);
assertThat(request.getParameters(), hasEntry("from", "0"));
assertEquals(null, request.getParameters().get("size"));
getRequest.setPageParams(new PageParams(null, 50));
request = TransformRequestConverters.getTransform(getRequest);
assertEquals(null, request.getParameters().get("from"));
assertThat(request.getParameters(), hasEntry("size", "50"));
getRequest.setPageParams(new PageParams(0, 10));
request = TransformRequestConverters.getTransform(getRequest);
assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10")));
getRequest.setAllowNoMatch(false);
request = TransformRequestConverters.getTransform(getRequest);
assertThat(request.getParameters(), hasEntry("allow_no_match", "false"));
}
public void testGetDataFrameTransform_givenMulitpleIds() {
GetTransformRequest getRequest = new GetTransformRequest("foo", "bar", "baz");
Request request = TransformRequestConverters.getTransform(getRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_transform/foo,bar,baz"));
}
}

View File

@ -1,735 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.documentation;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.CreateIndexResponse;
import org.elasticsearch.client.transform.DeleteTransformRequest;
import org.elasticsearch.client.transform.GetTransformRequest;
import org.elasticsearch.client.transform.GetTransformResponse;
import org.elasticsearch.client.transform.GetTransformStatsRequest;
import org.elasticsearch.client.transform.GetTransformStatsResponse;
import org.elasticsearch.client.transform.PreviewTransformRequest;
import org.elasticsearch.client.transform.PreviewTransformResponse;
import org.elasticsearch.client.transform.PutTransformRequest;
import org.elasticsearch.client.transform.StartTransformRequest;
import org.elasticsearch.client.transform.StartTransformResponse;
import org.elasticsearch.client.transform.StopTransformRequest;
import org.elasticsearch.client.transform.StopTransformResponse;
import org.elasticsearch.client.transform.UpdateTransformRequest;
import org.elasticsearch.client.transform.UpdateTransformResponse;
import org.elasticsearch.client.transform.transforms.DestConfig;
import org.elasticsearch.client.transform.transforms.NodeAttributes;
import org.elasticsearch.client.transform.transforms.QueryConfig;
import org.elasticsearch.client.transform.transforms.SettingsConfig;
import org.elasticsearch.client.transform.transforms.SourceConfig;
import org.elasticsearch.client.transform.transforms.TimeSyncConfig;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.client.transform.transforms.TransformConfigUpdate;
import org.elasticsearch.client.transform.transforms.TransformIndexerStats;
import org.elasticsearch.client.transform.transforms.TransformProgress;
import org.elasticsearch.client.transform.transforms.TransformStats;
import org.elasticsearch.client.transform.transforms.pivot.AggregationConfig;
import org.elasticsearch.client.transform.transforms.pivot.GroupConfig;
import org.elasticsearch.client.transform.transforms.pivot.PivotConfig;
import org.elasticsearch.client.transform.transforms.pivot.TermsGroupSource;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.junit.After;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
public class TransformDocumentationIT extends ESRestHighLevelClientTestCase {
private List<String> transformsToClean = new ArrayList<>();
@After
public void cleanUpTransforms() throws Exception {
for (String transformId : transformsToClean) {
highLevelClient().transform()
.stopTransform(new StopTransformRequest(transformId, true, TimeValue.timeValueSeconds(20), false), RequestOptions.DEFAULT);
}
for (String transformId : transformsToClean) {
highLevelClient().transform().deleteTransform(new DeleteTransformRequest(transformId), RequestOptions.DEFAULT);
}
transformsToClean = new ArrayList<>();
waitForPendingTasks(adminClient());
}
private void createIndex(String indexName) throws IOException {
XContentBuilder builder = jsonBuilder();
builder.startObject()
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.endObject()
.startObject("user_id")
.field("type", "keyword")
.endObject()
.startObject("stars")
.field("type", "integer")
.endObject()
.endObject()
.endObject();
CreateIndexRequest request = new CreateIndexRequest(indexName);
request.mapping(builder);
CreateIndexResponse response = highLevelClient().indices().create(request, RequestOptions.DEFAULT);
assertTrue(response.isAcknowledged());
}
public void testPutTransform() throws IOException, InterruptedException {
createIndex("source-index");
RestHighLevelClient client = highLevelClient();
// tag::put-transform-query-config
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
// end::put-transform-query-config
// tag::put-transform-source-config
SourceConfig sourceConfig = SourceConfig.builder()
.setIndex("source-index")
.setQueryConfig(queryConfig).build();
// end::put-transform-source-config
// tag::put-transform-dest-config
DestConfig destConfig = DestConfig.builder()
.setIndex("pivot-destination")
.setPipeline("my-pipeline").build();
// end::put-transform-dest-config
destConfig = DestConfig.builder().setIndex("pivot-destination").build();
// tag::put-transform-group-config
GroupConfig groupConfig = GroupConfig.builder()
.groupBy("reviewer", // <1>
TermsGroupSource.builder().setField("user_id").build()) // <2>
.build();
// end::put-transform-group-config
// tag::put-transform-agg-config
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(
AggregationBuilders.avg("avg_rating").field("stars")); // <1>
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
// end::put-transform-agg-config
// tag::put-transform-pivot-config
PivotConfig pivotConfig = PivotConfig.builder()
.setGroups(groupConfig) // <1>
.setAggregationConfig(aggConfig) // <2>
.build();
// end::put-transform-pivot-config
// tag::put-transform-settings-config
SettingsConfig settings = SettingsConfig.builder()
.setMaxPageSearchSize(1000) // <1>
.build();
// end::put-transform-settings-config
// tag::put-transform-config
TransformConfig transformConfig = TransformConfig
.builder()
.setId("reviewer-avg-rating") // <1>
.setSource(sourceConfig) // <2>
.setDest(destConfig) // <3>
.setFrequency(TimeValue.timeValueSeconds(15)) // <4>
.setPivotConfig(pivotConfig) // <5>
.setDescription("This is my test transform") // <6>
.setSettings(settings) // <7>
.build();
// end::put-transform-config
{
// tag::put-transform-request
PutTransformRequest request =
new PutTransformRequest(transformConfig); // <1>
request.setDeferValidation(false); // <2>
// end::put-transform-request
// tag::put-transform-execute
AcknowledgedResponse response =
client.transform().putTransform(
request, RequestOptions.DEFAULT);
// end::put-transform-execute
transformsToClean.add(request.getConfig().getId());
assertTrue(response.isAcknowledged());
}
{
TransformConfig configWithDifferentId = TransformConfig.builder()
.setId("reviewer-avg-rating2")
.setSource(transformConfig.getSource())
.setDest(transformConfig.getDestination())
.setPivotConfig(transformConfig.getPivotConfig())
.build();
PutTransformRequest request = new PutTransformRequest(configWithDifferentId);
// tag::put-transform-execute-listener
ActionListener<AcknowledgedResponse> listener =
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-transform-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-transform-execute-async
client.transform().putTransformAsync(
request, RequestOptions.DEFAULT, listener); // <1>
// end::put-transform-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
transformsToClean.add(request.getConfig().getId());
}
}
public void testUpdateTransform() throws IOException, InterruptedException {
createIndex("source-data");
RestHighLevelClient client = highLevelClient();
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer", TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
TransformConfig transformConfig = TransformConfig.builder()
.setId("my-transform-to-update")
.setSource(SourceConfig.builder().setIndex("source-data").setQueryConfig(queryConfig).build())
.setDest(DestConfig.builder().setIndex("pivot-dest").build())
.setPivotConfig(pivotConfig)
.setSyncConfig(new TimeSyncConfig("time-field", TimeValue.timeValueSeconds(120)))
.build();
client.transform().putTransform(new PutTransformRequest(transformConfig), RequestOptions.DEFAULT);
transformsToClean.add(transformConfig.getId());
// tag::update-transform-config
TransformConfigUpdate update = TransformConfigUpdate
.builder()
.setSource(SourceConfig.builder()
.setIndex("source-data")
.build()) // <1>
.setDest(DestConfig.builder()
.setIndex("pivot-dest")
.build()) // <2>
.setFrequency(TimeValue.timeValueSeconds(15)) // <3>
.setSyncConfig(new TimeSyncConfig("time-field",
TimeValue.timeValueSeconds(120))) // <4>
.setDescription("This is my updated transform") // <5>
.build();
// end::update-transform-config
{
// tag::update-transform-request
UpdateTransformRequest request =
new UpdateTransformRequest(
update, // <1>
"my-transform-to-update"); // <2>
request.setDeferValidation(false); // <3>
// end::update-transform-request
// tag::update-transform-execute
UpdateTransformResponse response =
client.transform().updateTransform(request,
RequestOptions.DEFAULT);
TransformConfig updatedConfig =
response.getTransformConfiguration();
// end::update-transform-execute
assertThat(updatedConfig.getDescription(), equalTo("This is my updated transform"));
}
{
UpdateTransformRequest request = new UpdateTransformRequest(update, "my-transform-to-update");
// tag::update-transform-execute-listener
ActionListener<UpdateTransformResponse> listener =
new ActionListener<UpdateTransformResponse>() {
@Override
public void onResponse(UpdateTransformResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::update-transform-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::update-transform-execute-async
client.transform().updateTransformAsync(
request, RequestOptions.DEFAULT, listener); // <1>
// end::update-transform-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testStartStop() throws IOException, InterruptedException {
createIndex("source-data");
RestHighLevelClient client = highLevelClient();
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer", TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
TransformConfig transformConfig = TransformConfig.builder()
.setId("mega-transform")
.setSource(SourceConfig.builder().setIndex("source-data").setQueryConfig(queryConfig).build())
.setDest(DestConfig.builder().setIndex("pivot-dest").build())
.setPivotConfig(pivotConfig)
.build();
client.transform().putTransform(new PutTransformRequest(transformConfig), RequestOptions.DEFAULT);
transformsToClean.add(transformConfig.getId());
{
// tag::start-transform-request
StartTransformRequest request =
new StartTransformRequest("mega-transform"); // <1>
// end::start-transform-request
// tag::start-transform-request-options
request.setTimeout(TimeValue.timeValueSeconds(20)); // <1>
// end::start-transform-request-options
// tag::start-transform-execute
StartTransformResponse response =
client.transform().startTransform(
request, RequestOptions.DEFAULT);
// end::start-transform-execute
assertTrue(response.isAcknowledged());
}
{
// tag::stop-transform-request
StopTransformRequest request =
new StopTransformRequest("mega-transform"); // <1>
// end::stop-transform-request
// tag::stop-transform-request-options
request.setWaitForCompletion(Boolean.TRUE); // <1>
request.setTimeout(TimeValue.timeValueSeconds(30)); // <2>
request.setAllowNoMatch(true); // <3>
// end::stop-transform-request-options
// tag::stop-transform-execute
StopTransformResponse response =
client.transform().stopTransform(
request, RequestOptions.DEFAULT);
// end::stop-transform-execute
assertTrue(response.isAcknowledged());
}
{
// tag::start-transform-execute-listener
ActionListener<StartTransformResponse> listener =
new ActionListener<StartTransformResponse>() {
@Override
public void onResponse(
StartTransformResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::start-transform-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
StartTransformRequest request = new StartTransformRequest("mega-transform");
// tag::start-transform-execute-async
client.transform().startTransformAsync(
request, RequestOptions.DEFAULT, listener); // <1>
// end::start-transform-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
{
// tag::stop-transform-execute-listener
ActionListener<StopTransformResponse> listener =
new ActionListener<StopTransformResponse>() {
@Override
public void onResponse(
StopTransformResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::stop-transform-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
StopTransformRequest request = new StopTransformRequest("mega-transform");
// tag::stop-transform-execute-async
client.transform().stopTransformAsync(
request, RequestOptions.DEFAULT, listener); // <1>
// end::stop-transform-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteDataFrameTransform() throws IOException, InterruptedException {
createIndex("source-data");
RestHighLevelClient client = highLevelClient();
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer", TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
TransformConfig transformConfig1 = TransformConfig.builder()
.setId("mega-transform")
.setSource(SourceConfig.builder().setIndex("source-data").setQuery(new MatchAllQueryBuilder()).build())
.setDest(DestConfig.builder().setIndex("pivot-dest").build())
.setPivotConfig(pivotConfig)
.build();
TransformConfig transformConfig2 = TransformConfig.builder()
.setId("mega-transform2")
.setSource(SourceConfig.builder().setIndex("source-data").setQuery(new MatchAllQueryBuilder()).build())
.setDest(DestConfig.builder().setIndex("pivot-dest2").build())
.setPivotConfig(pivotConfig)
.build();
client.transform().putTransform(new PutTransformRequest(transformConfig1), RequestOptions.DEFAULT);
client.transform().putTransform(new PutTransformRequest(transformConfig2), RequestOptions.DEFAULT);
{
// tag::delete-transform-request
DeleteTransformRequest request =
new DeleteTransformRequest("mega-transform"); // <1>
request.setForce(false); // <2>
// end::delete-transform-request
// tag::delete-transform-execute
AcknowledgedResponse response =
client.transform()
.deleteTransform(request, RequestOptions.DEFAULT);
// end::delete-transform-execute
assertTrue(response.isAcknowledged());
}
{
// tag::delete-transform-execute-listener
ActionListener<AcknowledgedResponse> listener =
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-transform-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
DeleteTransformRequest request = new DeleteTransformRequest("mega-transform2");
// tag::delete-transform-execute-async
client.transform().deleteTransformAsync(
request, RequestOptions.DEFAULT, listener); // <1>
// end::delete-transform-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPreview() throws IOException, InterruptedException {
createIndex("source-data");
RestHighLevelClient client = highLevelClient();
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer", TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
// tag::preview-transform-request
TransformConfig transformConfig =
TransformConfig.forPreview(
SourceConfig.builder()
.setIndex("source-data")
.setQueryConfig(queryConfig)
.build(), // <1>
pivotConfig); // <2>
PreviewTransformRequest request =
new PreviewTransformRequest(transformConfig); // <3>
// end::preview-transform-request
{
// tag::preview-transform-execute
PreviewTransformResponse response =
client.transform()
.previewTransform(request, RequestOptions.DEFAULT);
// end::preview-transform-execute
assertNotNull(response.getDocs());
assertNotNull(response.getMappings());
}
{
// tag::preview-transform-execute-listener
ActionListener<PreviewTransformResponse> listener =
new ActionListener<PreviewTransformResponse>() {
@Override
public void onResponse(PreviewTransformResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::preview-transform-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::preview-transform-execute-async
client.transform().previewTransformAsync(
request, RequestOptions.DEFAULT, listener); // <1>
// end::preview-transform-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetStats() throws IOException, InterruptedException {
createIndex("source-data");
RestHighLevelClient client = highLevelClient();
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer", TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
String id = "statisitcal-transform";
TransformConfig transformConfig = TransformConfig.builder()
.setId(id)
.setSource(SourceConfig.builder().setIndex("source-data").setQuery(new MatchAllQueryBuilder()).build())
.setDest(DestConfig.builder().setIndex("pivot-dest").build())
.setPivotConfig(pivotConfig)
.build();
client.transform().putTransform(new PutTransformRequest(transformConfig), RequestOptions.DEFAULT);
transformsToClean.add(id);
// tag::get-transform-stats-request
GetTransformStatsRequest request =
new GetTransformStatsRequest(id); // <1>
// end::get-transform-stats-request
// tag::get-transform-stats-request-options
request.setPageParams(new PageParams(0, 100)); // <1>
request.setAllowNoMatch(true); // <2>
// end::get-transform-stats-request-options
{
// tag::get-transform-stats-execute
GetTransformStatsResponse response =
client.transform()
.getTransformStats(request, RequestOptions.DEFAULT);
// end::get-transform-stats-execute
assertThat(response.getTransformsStats(), hasSize(1));
// tag::get-transform-stats-response
TransformStats stats =
response.getTransformsStats().get(0); // <1>
TransformStats.State state =
stats.getState(); // <2>
TransformIndexerStats indexerStats =
stats.getIndexerStats(); // <3>
TransformProgress progress =
stats.getCheckpointingInfo()
.getNext().getCheckpointProgress(); // <4>
NodeAttributes node =
stats.getNode(); // <5>
// end::get-transform-stats-response
assertEquals(TransformStats.State.STOPPED, state);
assertNotNull(indexerStats);
assertNull(progress);
}
{
// tag::get-transform-stats-execute-listener
ActionListener<GetTransformStatsResponse> listener =
new ActionListener<GetTransformStatsResponse>() {
@Override
public void onResponse(
GetTransformStatsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-transform-stats-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-transform-stats-execute-async
client.transform().getTransformStatsAsync(
request, RequestOptions.DEFAULT, listener); // <1>
// end::get-transform-stats-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetDataFrameTransform() throws IOException, InterruptedException {
createIndex("source-data");
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer", TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
TransformConfig putTransformConfig = TransformConfig.builder()
.setId("mega-transform")
.setSource(SourceConfig.builder().setIndex("source-data").setQuery(new MatchAllQueryBuilder()).build())
.setDest(DestConfig.builder().setIndex("pivot-dest").build())
.setPivotConfig(pivotConfig)
.build();
RestHighLevelClient client = highLevelClient();
client.transform().putTransform(new PutTransformRequest(putTransformConfig), RequestOptions.DEFAULT);
transformsToClean.add(putTransformConfig.getId());
{
// tag::get-transform-request
GetTransformRequest request =
new GetTransformRequest("mega-transform"); // <1>
// end::get-transform-request
// tag::get-transform-request-options
request.setPageParams(new PageParams(0, 100)); // <1>
request.setAllowNoMatch(true); // <2>
// end::get-transform-request-options
// tag::get-transform-execute
GetTransformResponse response =
client.transform()
.getTransform(request, RequestOptions.DEFAULT);
// end::get-transform-execute
// tag::get-transform-response
List<TransformConfig> transformConfigs =
response.getTransformConfigurations();
// end::get-transform-response
assertEquals(1, transformConfigs.size());
}
{
// tag::get-transform-execute-listener
ActionListener<GetTransformResponse> listener =
new ActionListener<GetTransformResponse>() {
@Override
public void onResponse(GetTransformResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-transform-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
GetTransformRequest request = new GetTransformRequest("mega-transform");
// tag::get-transform-execute-async
client.transform().getTransformAsync(
request, RequestOptions.DEFAULT, listener); // <1>
// end::get-transform-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
}

View File

@ -1,157 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.function.BiPredicate;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
import static org.hamcrest.Matchers.containsString;
public class AcknowledgedTasksResponseTests extends ESTestCase {
public void testFromXContent() throws IOException {
xContentTester(this::createParser,
this::createTestInstance,
AcknowledgedTasksResponseTests::toXContent,
AcknowledgedTasksResponseTests::fromXContent)
.assertEqualsConsumer(AcknowledgedTasksResponseTests::assertEqualInstances)
.assertToXContentEquivalence(false)
.supportsUnknownFields(false)
.test();
}
// Serialisation of TaskOperationFailure and ElasticsearchException changes
// the object so use a custom compare method rather than Object.equals
private static void assertEqualInstances(AcknowledgedTasksResponse expected, AcknowledgedTasksResponse actual) {
assertNotSame(expected, actual);
assertEquals(expected.isAcknowledged(), actual.isAcknowledged());
assertTaskOperationFailuresEqual(expected.getTaskFailures(), actual.getTaskFailures());
assertNodeFailuresEqual(expected.getNodeFailures(), actual.getNodeFailures());
}
private static <T> void assertListEquals(List<T> expected, List<T> actual, BiPredicate<T, T> comparator) {
if (expected == null) {
assertNull(actual);
return;
} else {
assertNotNull(actual);
}
assertEquals(expected.size(), actual.size());
for (int i=0; i<expected.size(); i++) {
assertTrue(comparator.test(expected.get(i), actual.get(i)));
}
}
public static void assertTaskOperationFailuresEqual(List<TaskOperationFailure> expected,
List<TaskOperationFailure> actual) {
assertListEquals(expected, actual, (a, b) ->
Objects.equals(a.getNodeId(), b.getNodeId())
&& Objects.equals(a.getTaskId(), b.getTaskId())
&& Objects.equals(a.getStatus(), b.getStatus())
);
}
public static void assertNodeFailuresEqual(List<ElasticsearchException> expected,
List<ElasticsearchException> actual) {
// actualException is a wrapped copy of expectedException so the
// error messages won't be the same but actualException should contain
// the error message from expectedException
assertListEquals(expected, actual, (expectedException, actualException) -> {
assertThat(actualException.getDetailedMessage(), containsString(expectedException.getMessage()));
return true;
});
}
private static AcknowledgedTasksResponse fromXContent(XContentParser parser) {
return AcknowledgedTasksResponse.generateParser("ack_tasks_response",
AcknowledgedTasksResponse::new, "acknowleged")
.apply(parser, null);
}
private AcknowledgedTasksResponse createTestInstance() {
List<TaskOperationFailure> taskFailures = null;
if (randomBoolean()) {
taskFailures = new ArrayList<>();
int numTaskFailures = randomIntBetween(1, 4);
for (int i=0; i<numTaskFailures; i++) {
taskFailures.add(new TaskOperationFailure(randomAlphaOfLength(4), randomNonNegativeLong(), new IllegalStateException()));
}
}
List<ElasticsearchException> nodeFailures = null;
if (randomBoolean()) {
nodeFailures = new ArrayList<>();
int numNodeFailures = randomIntBetween(1, 4);
for (int i=0; i<numNodeFailures; i++) {
nodeFailures.add(new ElasticsearchException("AcknowledgedTasksResponseTest"));
}
}
return new AcknowledgedTasksResponse(randomBoolean(), taskFailures, nodeFailures);
}
public static void toXContent(AcknowledgedTasksResponse response, XContentBuilder builder) throws IOException {
builder.startObject();
{
builder.field("acknowleged", response.isAcknowledged());
taskFailuresToXContent(response.getTaskFailures(), builder);
nodeFailuresToXContent(response.getNodeFailures(), builder);
}
builder.endObject();
}
public static void taskFailuresToXContent(List<TaskOperationFailure> taskFailures, XContentBuilder builder) throws IOException {
if (taskFailures != null && taskFailures.isEmpty() == false) {
builder.startArray(AcknowledgedTasksResponse.TASK_FAILURES.getPreferredName());
for (TaskOperationFailure failure : taskFailures) {
builder.startObject();
failure.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
}
builder.endArray();
}
}
public static void nodeFailuresToXContent(List<ElasticsearchException> nodeFailures, XContentBuilder builder) throws IOException {
if (nodeFailures != null && nodeFailures.isEmpty() == false) {
builder.startArray(AcknowledgedTasksResponse.NODE_FAILURES.getPreferredName());
for (ElasticsearchException failure : nodeFailures) {
builder.startObject();
failure.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
}
builder.endArray();
}
}
}

View File

@ -1,33 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.Matchers.containsString;
public class DeleteTransformRequestTests extends ESTestCase {
public void testValidate() {
assertFalse(new DeleteTransformRequest("valid-id").validate().isPresent());
assertThat(new DeleteTransformRequest(null).validate().get().getMessage(),
containsString("transform id must not be null"));
}
}

View File

@ -1,32 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.Matchers.containsString;
public class GetTransformRequestTests extends ESTestCase {
public void testValidate() {
assertFalse(new GetTransformRequest("valid-id").validate().isPresent());
assertThat(new GetTransformRequest(new String[0]).validate().get().getMessage(),
containsString("transform id must not be null"));
}
}

View File

@ -1,86 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.client.transform.transforms.TransformConfigTests;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
public class GetTransformResponseTests extends ESTestCase {
public void testXContentParser() throws IOException {
xContentTester(this::createParser,
GetTransformResponseTests::createTestInstance,
GetTransformResponseTests::toXContent,
GetTransformResponse::fromXContent)
.supportsUnknownFields(false)
.test();
}
private static GetTransformResponse createTestInstance() {
int numTransforms = randomIntBetween(0, 3);
List<TransformConfig> transforms = new ArrayList<>();
for (int i=0; i<numTransforms; i++) {
transforms.add(TransformConfigTests.randomTransformConfig());
}
GetTransformResponse.InvalidTransforms invalidTransforms = null;
if (randomBoolean()) {
List<String> invalidIds = Arrays.asList(generateRandomStringArray(5, 6, false, false));
invalidTransforms = new GetTransformResponse.InvalidTransforms(invalidIds);
}
return new GetTransformResponse(transforms, transforms.size() + 10, invalidTransforms);
}
private static void toXContent(GetTransformResponse response, XContentBuilder builder) throws IOException {
builder.startObject();
{
builder.field("count", response.getCount());
builder.field("transforms", response.getTransformConfigurations());
if (response.getInvalidTransforms() != null) {
builder.startObject("invalid_transforms");
builder.field("count", response.getInvalidTransforms().getCount());
builder.field("transforms", response.getInvalidTransforms().getTransformIds());
builder.endObject();
}
}
builder.endObject();
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContents);
}
}

View File

@ -1,32 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.Matchers.containsString;
public class GetTransformStatsRequestTests extends ESTestCase {
public void testValidate() {
assertFalse(new GetTransformStatsRequest("valid-id").validate().isPresent());
assertThat(new GetTransformStatsRequest(null).validate().get().getMessage(),
containsString("transform id must not be null"));
}
}

View File

@ -1,101 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.client.transform.transforms.TransformStats;
import org.elasticsearch.client.transform.transforms.TransformStatsTests;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
public class GetTransformStatsResponseTests extends ESTestCase {
public void testXContentParser() throws IOException {
xContentTester(
this::createParser,
GetTransformStatsResponseTests::createTestInstance,
GetTransformStatsResponseTests::toXContent,
GetTransformStatsResponse::fromXContent
).assertEqualsConsumer(GetTransformStatsResponseTests::assertEqualInstances)
.assertToXContentEquivalence(false)
.supportsUnknownFields(true)
.randomFieldsExcludeFilter(path -> path.isEmpty() == false)
.test();
}
private static GetTransformStatsResponse createTestInstance() {
int count = randomIntBetween(1, 3);
List<TransformStats> stats = new ArrayList<>();
for (int i = 0; i < count; i++) {
stats.add(TransformStatsTests.randomInstance());
}
List<TaskOperationFailure> taskFailures = null;
if (randomBoolean()) {
taskFailures = new ArrayList<>();
int numTaskFailures = randomIntBetween(1, 4);
for (int i = 0; i < numTaskFailures; i++) {
taskFailures.add(new TaskOperationFailure(randomAlphaOfLength(4), randomNonNegativeLong(), new IllegalStateException()));
}
}
List<ElasticsearchException> nodeFailures = null;
if (randomBoolean()) {
nodeFailures = new ArrayList<>();
int numNodeFailures = randomIntBetween(1, 4);
for (int i = 0; i < numNodeFailures; i++) {
nodeFailures.add(new ElasticsearchException("GetTransformStatsResponseTests"));
}
}
return new GetTransformStatsResponse(stats, stats.size() + randomLongBetween(0, 10), taskFailures, nodeFailures);
}
private static void toXContent(GetTransformStatsResponse response, XContentBuilder builder) throws IOException {
builder.startObject();
{
builder.field("count", response.getCount());
builder.startArray("transforms");
for (TransformStats stats : response.getTransformsStats()) {
TransformStatsTests.toXContent(stats, builder);
}
builder.endArray();
AcknowledgedTasksResponseTests.taskFailuresToXContent(response.getTaskFailures(), builder);
AcknowledgedTasksResponseTests.nodeFailuresToXContent(response.getNodeFailures(), builder);
}
builder.endObject();
}
// Serialisation of TaskOperationFailure and ElasticsearchException changes
// the object so use a custom compare method rather than Object.equals
private static void assertEqualInstances(GetTransformStatsResponse expected, GetTransformStatsResponse actual) {
assertEquals(expected.getCount(), actual.getCount());
assertEquals(expected.getTransformsStats(), actual.getTransformsStats());
AcknowledgedTasksResponseTests.assertTaskOperationFailuresEqual(expected.getTaskFailures(), actual.getTaskFailures());
AcknowledgedTasksResponseTests.assertNodeFailuresEqual(expected.getNodeFailures(), actual.getNodeFailures());
}
}

View File

@ -1,83 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.client.transform.transforms.TransformConfigTests;
import org.elasticsearch.client.transform.transforms.pivot.PivotConfigTests;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import static org.elasticsearch.client.transform.transforms.SourceConfigTests.randomSourceConfig;
import static org.hamcrest.Matchers.containsString;
public class PreviewTransformRequestTests extends AbstractXContentTestCase<PreviewTransformRequest> {
@Override
protected PreviewTransformRequest createTestInstance() {
return new PreviewTransformRequest(TransformConfigTests.randomTransformConfig());
}
@Override
protected PreviewTransformRequest doParseInstance(XContentParser parser) throws IOException {
return new PreviewTransformRequest(TransformConfig.fromXContent(parser));
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContents);
}
public void testValidate() {
assertFalse(new PreviewTransformRequest(TransformConfigTests.randomTransformConfig())
.validate().isPresent());
assertThat(new PreviewTransformRequest(null).validate().get().getMessage(),
containsString("preview requires a non-null transform config"));
// null id and destination is valid
TransformConfig config = TransformConfig.forPreview(randomSourceConfig(), PivotConfigTests.randomPivotConfig());
assertFalse(new PreviewTransformRequest(config).validate().isPresent());
// null source is not valid
config = TransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build();
Optional<ValidationException> error = new PreviewTransformRequest(config).validate();
assertTrue(error.isPresent());
assertThat(error.get().getMessage(), containsString("transform source cannot be null"));
}
}

View File

@ -1,160 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
import static org.hamcrest.Matchers.equalTo;
public class PreviewTransformResponseTests extends ESTestCase {
public void testFromXContent() throws IOException {
xContentTester(this::createParser, this::createTestInstance, this::toXContent, PreviewTransformResponse::fromXContent)
.supportsUnknownFields(true)
.randomFieldsExcludeFilter(path -> path.isEmpty() == false)
.test();
}
public void testCreateIndexRequest() throws IOException {
PreviewTransformResponse previewResponse = randomPreviewResponse();
CreateIndexRequest createIndexRequest = previewResponse.getCreateIndexRequest("dest_index");
assertEquals("dest_index", createIndexRequest.index());
assertThat(createIndexRequest.aliases(), equalTo(previewResponse.getAliases()));
assertThat(createIndexRequest.settings(), equalTo(previewResponse.getSettings()));
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(previewResponse.getMappings());
assertThat(BytesReference.bytes(builder), equalTo(createIndexRequest.mappings()));
}
public void testBWCPre77XContent() throws IOException {
PreviewTransformResponse response = randomPreviewResponse();
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
builder.startArray("preview");
for (Map<String, Object> doc : response.getDocs()) {
builder.map(doc);
}
builder.endArray();
builder.field("mappings", response.getGeneratedDestIndexSettings().getMappings());
builder.endObject();
XContentParser parser = createParser(builder);
PreviewTransformResponse oldResponse = PreviewTransformResponse.fromXContent(parser);
assertThat(response.getDocs(), equalTo(oldResponse.getDocs()));
assertThat(response.getMappings(), equalTo(oldResponse.getMappings()));
assertTrue(oldResponse.getAliases().isEmpty());
assertThat(oldResponse.getSettings(), equalTo(Settings.EMPTY));
}
private PreviewTransformResponse createTestInstance() {
return randomPreviewResponse();
}
private void toXContent(PreviewTransformResponse response, XContentBuilder builder) throws IOException {
builder.startObject();
builder.startArray("preview");
for (Map<String, Object> doc : response.getDocs()) {
builder.map(doc);
}
builder.endArray();
builder.startObject("generated_dest_index");
builder.field("mappings", response.getGeneratedDestIndexSettings().getMappings());
builder.startObject("settings");
response.getGeneratedDestIndexSettings().getSettings().toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
builder.startObject("aliases");
for (Alias alias : response.getGeneratedDestIndexSettings().getAliases()) {
alias.toXContent(builder, ToXContent.EMPTY_PARAMS);
}
builder.endObject();
builder.endObject();
builder.endObject();
}
private static PreviewTransformResponse randomPreviewResponse() {
int size = randomIntBetween(0, 10);
List<Map<String, Object>> data = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
data.add(Collections.singletonMap(randomAlphaOfLength(10), Collections.singletonMap("value1", randomIntBetween(1, 100))));
}
return new PreviewTransformResponse(data, randomGeneratedDestIndexSettings());
}
private static PreviewTransformResponse.GeneratedDestIndexSettings randomGeneratedDestIndexSettings() {
int size = randomIntBetween(0, 10);
Map<String, Object> mappings = null;
if (randomBoolean()) {
mappings = new HashMap<>(size);
for (int i = 0; i < size; i++) {
mappings.put(randomAlphaOfLength(10), Collections.singletonMap("type", randomAlphaOfLength(10)));
}
}
Settings settings = null;
if (randomBoolean()) {
Settings.Builder settingsBuilder = Settings.builder();
size = randomIntBetween(0, 10);
for (int i = 0; i < size; i++) {
settingsBuilder.put(randomAlphaOfLength(10), randomBoolean());
}
settings = settingsBuilder.build();
}
Set<Alias> aliases = null;
if (randomBoolean()) {
aliases = new HashSet<>();
size = randomIntBetween(0, 10);
for (int i = 0; i < size; i++) {
aliases.add(new Alias(randomAlphaOfLength(10)));
}
}
return new PreviewTransformResponse.GeneratedDestIndexSettings(mappings, settings, aliases);
}
}

View File

@ -1,80 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.client.transform.transforms.TransformConfigTests;
import org.elasticsearch.client.transform.transforms.pivot.PivotConfigTests;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import static org.hamcrest.Matchers.containsString;
public class PutTransformRequestTests extends AbstractXContentTestCase<PutTransformRequest> {
public void testValidate() {
assertFalse(createTestInstance().validate().isPresent());
TransformConfig config = TransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build();
Optional<ValidationException> error = new PutTransformRequest(config).validate();
assertTrue(error.isPresent());
assertThat(error.get().getMessage(), containsString("transform id cannot be null"));
assertThat(error.get().getMessage(), containsString("transform source cannot be null"));
assertThat(error.get().getMessage(), containsString("transform destination cannot be null"));
error = new PutTransformRequest(null).validate();
assertTrue(error.isPresent());
assertThat(error.get().getMessage(), containsString("put requires a non-null transform config"));
}
@Override
protected PutTransformRequest createTestInstance() {
return new PutTransformRequest(TransformConfigTests.randomTransformConfig());
}
@Override
protected PutTransformRequest doParseInstance(XContentParser parser) throws IOException {
return new PutTransformRequest(TransformConfig.fromXContent(parser));
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContents);
}
}

View File

@ -1,42 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.test.ESTestCase;
import java.util.Optional;
import static org.hamcrest.Matchers.containsString;
public class StartDataFrameTransformRequestTests extends ESTestCase {
public void testValidate_givenNullId() {
StartTransformRequest request = new StartTransformRequest(null, null);
Optional<ValidationException> validate = request.validate();
assertTrue(validate.isPresent());
assertThat(validate.get().getMessage(), containsString("transform id must not be null"));
}
public void testValidate_givenValid() {
StartTransformRequest request = new StartTransformRequest("foo", null);
Optional<ValidationException> validate = request.validate();
assertFalse(validate.isPresent());
}
}

View File

@ -1,42 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.test.ESTestCase;
import java.util.Optional;
import static org.hamcrest.Matchers.containsString;
public class StopTransformRequestTests extends ESTestCase {
public void testValidate_givenNullId() {
StopTransformRequest request = new StopTransformRequest(null);
Optional<ValidationException> validate = request.validate();
assertTrue(validate.isPresent());
assertThat(validate.get().getMessage(), containsString("transform id must not be null"));
}
public void testValidate_givenValid() {
StopTransformRequest request = new StopTransformRequest("foo");
Optional<ValidationException> validate = request.validate();
assertFalse(validate.isPresent());
}
}

View File

@ -1,63 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.transform.transforms.TransformConfigTests;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
public class UpdateDataFrameTransformResponseTests extends ESTestCase {
public void testXContentParser() throws IOException {
xContentTester(this::createParser,
UpdateDataFrameTransformResponseTests::createTestInstance,
UpdateDataFrameTransformResponseTests::toXContent,
UpdateTransformResponse::fromXContent)
.assertToXContentEquivalence(false)
.supportsUnknownFields(false)
.test();
}
private static UpdateTransformResponse createTestInstance() {
return new UpdateTransformResponse(TransformConfigTests.randomTransformConfig());
}
private static void toXContent(UpdateTransformResponse response, XContentBuilder builder) throws IOException {
response.getTransformConfiguration().toXContent(builder, null);
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContents);
}
}

View File

@ -1,78 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.transform.transforms.TransformConfigUpdate;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import static org.elasticsearch.client.transform.transforms.TransformConfigUpdateTests.randomTransformConfigUpdate;
import static org.hamcrest.Matchers.containsString;
public class UpdateTransformRequestTests extends AbstractXContentTestCase<UpdateTransformRequest> {
public void testValidate() {
assertFalse(createTestInstance().validate().isPresent());
TransformConfigUpdate config = randomTransformConfigUpdate();
Optional<ValidationException> error = new UpdateTransformRequest(config, null).validate();
assertTrue(error.isPresent());
assertThat(error.get().getMessage(), containsString("transform id cannot be null"));
error = new UpdateTransformRequest(null, "123").validate();
assertTrue(error.isPresent());
assertThat(error.get().getMessage(), containsString("put requires a non-null transform config"));
}
private final String transformId = randomAlphaOfLength(10);
@Override
protected UpdateTransformRequest createTestInstance() {
return new UpdateTransformRequest(randomTransformConfigUpdate(), transformId);
}
@Override
protected UpdateTransformRequest doParseInstance(XContentParser parser) throws IOException {
return new UpdateTransformRequest(TransformConfigUpdate.fromXContent(parser), transformId);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContents);
}
}

View File

@ -1,107 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.hlrc;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.client.transform.GetTransformStatsResponse;
import org.elasticsearch.client.transform.transforms.hlrc.TransformCheckpointingInfoTests;
import org.elasticsearch.client.transform.transforms.hlrc.TransformIndexerStatsTests;
import org.elasticsearch.client.transform.transforms.hlrc.TransformStatsTests;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction;
import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction.Response;
import org.elasticsearch.xpack.core.transform.transforms.NodeAttributes;
import org.elasticsearch.xpack.core.transform.transforms.TransformStats;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import static org.hamcrest.Matchers.equalTo;
public class GetTransformStatsResponseTests extends AbstractResponseTestCase<
GetTransformStatsAction.Response,
org.elasticsearch.client.transform.GetTransformStatsResponse> {
private static NodeAttributes randomNodeAttributes() {
return new NodeAttributes(
randomAlphaOfLength(10),
randomAlphaOfLength(10),
randomAlphaOfLength(10),
randomAlphaOfLength(10),
randomBoolean() ? Collections.emptyMap() : Collections.singletonMap(randomAlphaOfLength(10), randomAlphaOfLength(10))
);
}
private static TransformStats randomTransformStats() {
return new TransformStats(
randomAlphaOfLength(10),
randomFrom(TransformStats.State.values()),
randomBoolean() ? null : randomAlphaOfLength(100),
randomBoolean() ? null : randomNodeAttributes(),
TransformIndexerStatsTests.randomStats(),
TransformCheckpointingInfoTests.randomTransformCheckpointingInfo()
);
}
public static Response randomStatsResponse() {
List<TransformStats> stats = new ArrayList<>();
int totalStats = randomInt(10);
for (int i = 0; i < totalStats; ++i) {
stats.add(randomTransformStats());
}
int totalErrors = randomInt(10);
List<TaskOperationFailure> taskFailures = new ArrayList<>(totalErrors);
List<ElasticsearchException> nodeFailures = new ArrayList<>(totalErrors);
for (int i = 0; i < totalErrors; i++) {
taskFailures.add(new TaskOperationFailure("node1", randomLongBetween(1, 10), new Exception("error")));
nodeFailures.add(new FailedNodeException("node1", "message", new Exception("error")));
}
return new Response(stats, randomLongBetween(stats.size(), 10_000_000L), taskFailures, nodeFailures);
}
@Override
protected Response createServerTestInstance(XContentType xContentType) {
return randomStatsResponse();
}
@Override
protected GetTransformStatsResponse doParseToClientInstance(XContentParser parser) throws IOException {
return org.elasticsearch.client.transform.GetTransformStatsResponse.fromXContent(parser);
}
@Override
protected void assertInstances(Response serverTestInstance, GetTransformStatsResponse clientInstance) {
assertEquals(serverTestInstance.getTransformsStats().size(), clientInstance.getTransformsStats().size());
Iterator<TransformStats> serverIt = serverTestInstance.getTransformsStats().iterator();
Iterator<org.elasticsearch.client.transform.transforms.TransformStats> clientIt = clientInstance.getTransformsStats().iterator();
while (serverIt.hasNext()) {
TransformStatsTests.assertHlrcEquals(serverIt.next(), clientIt.next());
}
assertThat(serverTestInstance.getCount(), equalTo(clientInstance.getCount()));
}
}

View File

@ -1,119 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.hlrc;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.client.transform.PreviewTransformResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction;
import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction.Response;
import org.elasticsearch.xpack.core.transform.transforms.TransformDestIndexSettings;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.hamcrest.Matchers.equalTo;
public class PreviewTransformResponseTests extends AbstractResponseTestCase<
PreviewTransformAction.Response,
org.elasticsearch.client.transform.PreviewTransformResponse> {
public static Response randomPreviewResponse() {
int size = randomIntBetween(0, 10);
List<Map<String, Object>> data = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
data.add(Collections.singletonMap(randomAlphaOfLength(10), Collections.singletonMap("value1", randomIntBetween(1, 100))));
}
return new Response(data, randomGeneratedDestIndexSettings());
}
private static TransformDestIndexSettings randomGeneratedDestIndexSettings() {
int size = randomIntBetween(0, 10);
Map<String, Object> mappings = null;
if (randomBoolean()) {
mappings = new HashMap<>(size);
for (int i = 0; i < size; i++) {
mappings.put(randomAlphaOfLength(10), Collections.singletonMap("type", randomAlphaOfLength(10)));
}
}
Settings settings = null;
if (randomBoolean()) {
Settings.Builder settingsBuilder = Settings.builder();
size = randomIntBetween(0, 10);
for (int i = 0; i < size; i++) {
settingsBuilder.put(randomAlphaOfLength(10), randomBoolean());
}
settings = settingsBuilder.build();
}
Set<Alias> aliases = null;
if (randomBoolean()) {
aliases = new HashSet<>();
size = randomIntBetween(0, 10);
for (int i = 0; i < size; i++) {
aliases.add(new Alias(randomAlphaOfLength(10)));
}
}
return new TransformDestIndexSettings(mappings, settings, aliases);
}
@Override
protected Response createServerTestInstance(XContentType xContentType) {
return randomPreviewResponse();
}
@Override
protected PreviewTransformResponse doParseToClientInstance(XContentParser parser) throws IOException {
return org.elasticsearch.client.transform.PreviewTransformResponse.fromXContent(parser);
}
@Override
protected void assertInstances(Response serverTestInstance, PreviewTransformResponse clientInstance) {
assertThat(serverTestInstance.getDocs(), equalTo(clientInstance.getDocs()));
assertThat(
serverTestInstance.getGeneratedDestIndexSettings().getAliases(),
equalTo(clientInstance.getGeneratedDestIndexSettings().getAliases())
);
assertThat(
serverTestInstance.getGeneratedDestIndexSettings().getMappings(),
equalTo(clientInstance.getGeneratedDestIndexSettings().getMappings())
);
assertThat(
serverTestInstance.getGeneratedDestIndexSettings().getSettings(),
equalTo(clientInstance.getGeneratedDestIndexSettings().getSettings())
);
}
}

View File

@ -1,49 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class DestConfigTests extends AbstractXContentTestCase<DestConfig> {
public static DestConfig randomDestConfig() {
return new DestConfig(randomAlphaOfLength(10),
randomBoolean() ? null : randomAlphaOfLength(10));
}
@Override
protected DestConfig doParseInstance(XContentParser parser) throws IOException {
return DestConfig.PARSER.apply(parser, null);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected DestConfig createTestInstance() {
return randomDestConfig();
}
}

View File

@ -1,64 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Predicate;
public class NodeAttributesTests extends AbstractXContentTestCase<NodeAttributes> {
public static NodeAttributes createRandom() {
int numberOfAttributes = randomIntBetween(1, 10);
Map<String, String> attributes = new HashMap<>(numberOfAttributes);
for(int i = 0; i < numberOfAttributes; i++) {
String val = randomAlphaOfLength(10);
attributes.put("key-"+i, val);
}
return new NodeAttributes(randomAlphaOfLength(10),
randomAlphaOfLength(10),
randomAlphaOfLength(10),
randomAlphaOfLength(10),
attributes);
}
@Override
protected NodeAttributes createTestInstance() {
return createRandom();
}
@Override
protected NodeAttributes doParseInstance(XContentParser parser) throws IOException {
return NodeAttributes.PARSER.parse(parser, null);
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
return field -> !field.isEmpty();
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -1,62 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.MatchNoneQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import static java.util.Collections.emptyList;
public class QueryConfigTests extends AbstractXContentTestCase<QueryConfig> {
public static QueryConfig randomQueryConfig() {
QueryBuilder queryBuilder = randomBoolean() ? new MatchAllQueryBuilder() : new MatchNoneQueryBuilder();
return new QueryConfig(queryBuilder);
}
@Override
protected QueryConfig createTestInstance() {
return randomQueryConfig();
}
@Override
protected QueryConfig doParseInstance(XContentParser parser) throws IOException {
return QueryConfig.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
}

View File

@ -1,118 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
public class SettingsConfigTests extends AbstractXContentTestCase<SettingsConfig> {
public static SettingsConfig randomSettingsConfig() {
return new SettingsConfig(randomBoolean() ? null : randomIntBetween(10, 10_000), randomBoolean() ? null : randomFloat());
}
@Override
protected SettingsConfig createTestInstance() {
return randomSettingsConfig();
}
@Override
protected SettingsConfig doParseInstance(XContentParser parser) throws IOException {
return SettingsConfig.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
public void testExplicitNullOnWriteParser() throws IOException {
// test that an explicit null is handled differently than not set
SettingsConfig config = fromString("{\"max_page_search_size\" : null}");
assertThat(config.getMaxPageSearchSize(), equalTo(-1));
Map<String, Object> settingsAsMap = xContentToMap(config);
assertNull(settingsAsMap.getOrDefault("max_page_search_size", "not_set"));
assertThat(settingsAsMap.getOrDefault("docs_per_second", "not_set"), equalTo("not_set"));
SettingsConfig emptyConfig = fromString("{}");
assertNull(emptyConfig.getMaxPageSearchSize());
settingsAsMap = xContentToMap(emptyConfig);
assertTrue(settingsAsMap.isEmpty());
config = fromString("{\"docs_per_second\" : null}");
assertThat(config.getDocsPerSecond(), equalTo(-1F));
settingsAsMap = xContentToMap(config);
assertThat(settingsAsMap.getOrDefault("max_page_search_size", "not_set"), equalTo("not_set"));
assertNull(settingsAsMap.getOrDefault("docs_per_second", "not_set"));
}
public void testExplicitNullOnWriteBuilder() throws IOException {
// test that an explicit null is handled differently than not set
SettingsConfig config = new SettingsConfig.Builder().setMaxPageSearchSize(null).build();
assertThat(config.getMaxPageSearchSize(), equalTo(-1));
Map<String, Object> settingsAsMap = xContentToMap(config);
assertNull(settingsAsMap.getOrDefault("max_page_search_size", "not_set"));
assertThat(settingsAsMap.getOrDefault("docs_per_second", "not_set"), equalTo("not_set"));
SettingsConfig emptyConfig = new SettingsConfig.Builder().build();
assertNull(emptyConfig.getMaxPageSearchSize());
settingsAsMap = xContentToMap(emptyConfig);
assertTrue(settingsAsMap.isEmpty());
config = new SettingsConfig.Builder().setRequestsPerSecond(null).build();
assertThat(config.getDocsPerSecond(), equalTo(-1F));
settingsAsMap = xContentToMap(config);
assertThat(settingsAsMap.getOrDefault("max_page_search_size", "not_set"), equalTo("not_set"));
assertNull(settingsAsMap.getOrDefault("docs_per_second", "not_set"));
}
private Map<String, Object> xContentToMap(ToXContent xcontent) throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder();
xcontent.toXContent(builder, ToXContent.EMPTY_PARAMS);
XContentParser parser = XContentType.JSON.xContent()
.createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput());
return parser.map();
}
private SettingsConfig fromString(String source) throws IOException {
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
return SettingsConfig.fromXContent(parser);
}
}
}

View File

@ -1,68 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.function.Predicate;
import static java.util.Collections.emptyList;
public class SourceConfigTests extends AbstractXContentTestCase<SourceConfig> {
public static SourceConfig randomSourceConfig() {
return new SourceConfig(generateRandomStringArray(10, 10, false, false),
QueryConfigTests.randomQueryConfig());
}
@Override
protected SourceConfig doParseInstance(XContentParser parser) throws IOException {
return SourceConfig.PARSER.apply(parser, null);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only as QueryConfig stores a Map<String, Object>
return field -> !field.isEmpty();
}
@Override
protected SourceConfig createTestInstance() {
return randomSourceConfig();
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
}

View File

@ -1,49 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class TimeSyncConfigTests extends AbstractXContentTestCase<TimeSyncConfig> {
public static TimeSyncConfig randomTimeSyncConfig() {
return new TimeSyncConfig(randomAlphaOfLengthBetween(1, 10), new TimeValue(randomNonNegativeLong()));
}
@Override
protected TimeSyncConfig createTestInstance() {
return randomTimeSyncConfig();
}
@Override
protected TimeSyncConfig doParseInstance(XContentParser parser) throws IOException {
return TimeSyncConfig.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -1,63 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
public class TransformCheckpointStatsTests extends ESTestCase {
public void testFromXContent() throws IOException {
xContentTester(this::createParser,
TransformCheckpointStatsTests::randomTransformCheckpointStats,
TransformCheckpointStatsTests::toXContent,
TransformCheckpointStats::fromXContent)
.supportsUnknownFields(true)
.randomFieldsExcludeFilter(field -> field.startsWith("position"))
.test();
}
public static TransformCheckpointStats randomTransformCheckpointStats() {
return new TransformCheckpointStats(randomLongBetween(1, 1_000_000),
randomBoolean() ? null : TransformIndexerPositionTests.randomTransformIndexerPosition(),
randomBoolean() ? null : TransformProgressTests.randomInstance(),
randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000));
}
public static void toXContent(TransformCheckpointStats stats, XContentBuilder builder) throws IOException {
builder.startObject();
builder.field(TransformCheckpointStats.CHECKPOINT.getPreferredName(), stats.getCheckpoint());
if (stats.getPosition() != null) {
builder.field(TransformCheckpointStats.POSITION.getPreferredName());
TransformIndexerPositionTests.toXContent(stats.getPosition(), builder);
}
if (stats.getCheckpointProgress() != null) {
builder.field(TransformCheckpointStats.CHECKPOINT_PROGRESS.getPreferredName());
TransformProgressTests.toXContent(stats.getCheckpointProgress(), builder);
}
builder.field(TransformCheckpointStats.TIMESTAMP_MILLIS.getPreferredName(), stats.getTimestampMillis());
builder.field(TransformCheckpointStats.TIME_UPPER_BOUND_MILLIS.getPreferredName(), stats.getTimeUpperBoundMillis());
builder.endObject();
}
}

View File

@ -1,65 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.time.Instant;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
public class TransformCheckpointingInfoTests extends ESTestCase {
public void testFromXContent() throws IOException {
xContentTester(this::createParser,
TransformCheckpointingInfoTests::randomTransformCheckpointingInfo,
TransformCheckpointingInfoTests::toXContent,
TransformCheckpointingInfo::fromXContent)
.supportsUnknownFields(false)
.test();
}
public static TransformCheckpointingInfo randomTransformCheckpointingInfo() {
return new TransformCheckpointingInfo(
TransformCheckpointStatsTests.randomTransformCheckpointStats(),
TransformCheckpointStatsTests.randomTransformCheckpointStats(),
randomLongBetween(0, 10000),
randomBoolean() ? null : Instant.ofEpochMilli(randomNonNegativeLong()));
}
public static void toXContent(TransformCheckpointingInfo info, XContentBuilder builder) throws IOException {
builder.startObject();
if (info.getLast().getTimestampMillis() > 0) {
builder.field(TransformCheckpointingInfo.LAST_CHECKPOINT.getPreferredName());
TransformCheckpointStatsTests.toXContent(info.getLast(), builder);
}
if (info.getNext().getTimestampMillis() > 0) {
builder.field(TransformCheckpointingInfo.NEXT_CHECKPOINT.getPreferredName());
TransformCheckpointStatsTests.toXContent(info.getNext(), builder);
}
builder.field(TransformCheckpointingInfo.OPERATIONS_BEHIND.getPreferredName(), info.getOperationsBehind());
if (info.getChangesLastDetectedAt() != null) {
builder.field(TransformCheckpointingInfo.CHANGES_LAST_DETECTED_AT.getPreferredName(), info.getChangesLastDetectedAt());
}
builder.endObject();
}
}

View File

@ -1,91 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.Version;
import org.elasticsearch.client.transform.TransformNamedXContentProvider;
import org.elasticsearch.client.transform.transforms.pivot.PivotConfigTests;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.time.Instant;
import java.util.Collections;
import java.util.List;
import java.util.function.Predicate;
import static org.elasticsearch.client.transform.transforms.DestConfigTests.randomDestConfig;
import static org.elasticsearch.client.transform.transforms.SourceConfigTests.randomSourceConfig;
public class TransformConfigTests extends AbstractXContentTestCase<TransformConfig> {
public static TransformConfig randomTransformConfig() {
return new TransformConfig(
randomAlphaOfLengthBetween(1, 10),
randomSourceConfig(),
randomDestConfig(),
randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1000, 1000000)),
randomBoolean() ? null : randomSyncConfig(),
PivotConfigTests.randomPivotConfig(),
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 100),
SettingsConfigTests.randomSettingsConfig(),
randomBoolean() ? null : Instant.now(),
randomBoolean() ? null : Version.CURRENT.toString()
);
}
public static SyncConfig randomSyncConfig() {
return TimeSyncConfigTests.randomTimeSyncConfig();
}
@Override
protected TransformConfig createTestInstance() {
return randomTransformConfig();
}
@Override
protected TransformConfig doParseInstance(XContentParser parser) throws IOException {
return TransformConfig.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only
return field -> !field.isEmpty();
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContents);
}
}

View File

@ -1,78 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.client.transform.TransformNamedXContentProvider;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.client.transform.transforms.DestConfigTests.randomDestConfig;
import static org.elasticsearch.client.transform.transforms.SettingsConfigTests.randomSettingsConfig;
import static org.elasticsearch.client.transform.transforms.SourceConfigTests.randomSourceConfig;
public class TransformConfigUpdateTests extends AbstractXContentTestCase<TransformConfigUpdate> {
public static TransformConfigUpdate randomTransformConfigUpdate() {
return new TransformConfigUpdate(
randomBoolean() ? null : randomSourceConfig(),
randomBoolean() ? null : randomDestConfig(),
randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)),
randomBoolean() ? null : randomSyncConfig(),
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000),
randomBoolean() ? null : randomSettingsConfig()
);
}
public static SyncConfig randomSyncConfig() {
return TimeSyncConfigTests.randomTimeSyncConfig();
}
@Override
protected TransformConfigUpdate doParseInstance(XContentParser parser) throws IOException {
return TransformConfigUpdate.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
@Override
protected TransformConfigUpdate createTestInstance() {
return randomTransformConfigUpdate();
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContents);
}
}

View File

@ -1,76 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
public class TransformIndexerPositionTests extends ESTestCase {
public void testFromXContent() throws IOException {
xContentTester(this::createParser,
TransformIndexerPositionTests::randomTransformIndexerPosition,
TransformIndexerPositionTests::toXContent,
TransformIndexerPosition::fromXContent)
.supportsUnknownFields(true)
.randomFieldsExcludeFilter(field -> field.equals("indexer_position") ||
field.equals("bucket_position"))
.test();
}
public static TransformIndexerPosition randomTransformIndexerPosition() {
return new TransformIndexerPosition(randomPositionMap(), randomPositionMap());
}
public static void toXContent(TransformIndexerPosition position, XContentBuilder builder) throws IOException {
builder.startObject();
if (position.getIndexerPosition() != null) {
builder.field("indexer_position", position.getIndexerPosition());
}
if (position.getBucketsPosition() != null) {
builder.field("bucket_position", position.getBucketsPosition());
}
builder.endObject();
}
private static Map<String, Object> randomPositionMap() {
if (randomBoolean()) {
return null;
}
int numFields = randomIntBetween(1, 5);
Map<String, Object> position = new LinkedHashMap<>();
for (int i = 0; i < numFields; i++) {
Object value;
if (randomBoolean()) {
value = randomLong();
} else {
value = randomAlphaOfLengthBetween(1, 10);
}
position.put(randomAlphaOfLengthBetween(3, 10), value);
}
return position;
}
}

View File

@ -1,132 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
public class TransformIndexerStatsTests extends ESTestCase {
public void testFromXContent() throws IOException {
xContentTester(
this::createParser,
TransformIndexerStatsTests::randomStats,
TransformIndexerStatsTests::toXContent,
TransformIndexerStats::fromXContent
).supportsUnknownFields(true).test();
}
public static TransformIndexerStats randomStats() {
return new TransformIndexerStats(
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomDouble(),
randomDouble(),
randomDouble()
);
}
public static void toXContent(TransformIndexerStats stats, XContentBuilder builder) throws IOException {
builder.startObject();
if (randomBoolean()) {
builder.field(TransformIndexerStats.PAGES_PROCESSED.getPreferredName(), stats.getPagesProcessed());
builder.field(TransformIndexerStats.DOCUMENTS_PROCESSED.getPreferredName(), stats.getDocumentsProcessed());
builder.field(TransformIndexerStats.DOCUMENTS_INDEXED.getPreferredName(), stats.getDocumentsIndexed());
builder.field(TransformIndexerStats.TRIGGER_COUNT.getPreferredName(), stats.getTriggerCount());
builder.field(TransformIndexerStats.INDEX_TIME_IN_MS.getPreferredName(), stats.getIndexTime());
builder.field(TransformIndexerStats.INDEX_TOTAL.getPreferredName(), stats.getIndexTotal());
builder.field(TransformIndexerStats.INDEX_FAILURES.getPreferredName(), stats.getIndexFailures());
builder.field(TransformIndexerStats.SEARCH_TIME_IN_MS.getPreferredName(), stats.getSearchTime());
builder.field(TransformIndexerStats.SEARCH_TOTAL.getPreferredName(), stats.getSearchTotal());
builder.field(TransformIndexerStats.PROCESSING_TIME_IN_MS.getPreferredName(), stats.getProcessingTime());
builder.field(TransformIndexerStats.PROCESSING_TOTAL.getPreferredName(), stats.getProcessingTotal());
builder.field(TransformIndexerStats.SEARCH_FAILURES.getPreferredName(), stats.getSearchFailures());
builder.field(
TransformIndexerStats.EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS.getPreferredName(),
stats.getExpAvgCheckpointDurationMs()
);
builder.field(TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_INDEXED.getPreferredName(), stats.getExpAvgDocumentsIndexed());
builder.field(
TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_PROCESSED.getPreferredName(),
stats.getExpAvgDocumentsProcessed()
);
} else {
// a toXContent version which leaves out field with value 0 (simulating the case that an older version misses a field)
xContentFieldIfNotZero(builder, TransformIndexerStats.PAGES_PROCESSED.getPreferredName(), stats.getPagesProcessed());
xContentFieldIfNotZero(builder, TransformIndexerStats.DOCUMENTS_PROCESSED.getPreferredName(), stats.getDocumentsProcessed());
xContentFieldIfNotZero(builder, TransformIndexerStats.DOCUMENTS_INDEXED.getPreferredName(), stats.getDocumentsIndexed());
xContentFieldIfNotZero(builder, TransformIndexerStats.TRIGGER_COUNT.getPreferredName(), stats.getTriggerCount());
xContentFieldIfNotZero(builder, TransformIndexerStats.INDEX_TIME_IN_MS.getPreferredName(), stats.getIndexTime());
xContentFieldIfNotZero(builder, TransformIndexerStats.INDEX_TOTAL.getPreferredName(), stats.getIndexTotal());
xContentFieldIfNotZero(builder, TransformIndexerStats.INDEX_FAILURES.getPreferredName(), stats.getIndexFailures());
xContentFieldIfNotZero(builder, TransformIndexerStats.SEARCH_TIME_IN_MS.getPreferredName(), stats.getSearchTime());
xContentFieldIfNotZero(builder, TransformIndexerStats.SEARCH_TOTAL.getPreferredName(), stats.getSearchTotal());
xContentFieldIfNotZero(builder, TransformIndexerStats.PROCESSING_TIME_IN_MS.getPreferredName(), stats.getProcessingTime());
xContentFieldIfNotZero(builder, TransformIndexerStats.PROCESSING_TOTAL.getPreferredName(), stats.getProcessingTotal());
xContentFieldIfNotZero(builder, TransformIndexerStats.SEARCH_FAILURES.getPreferredName(), stats.getSearchFailures());
xContentFieldIfNotZero(
builder,
TransformIndexerStats.EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS.getPreferredName(),
stats.getExpAvgCheckpointDurationMs()
);
xContentFieldIfNotZero(
builder,
TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_INDEXED.getPreferredName(),
stats.getExpAvgDocumentsIndexed()
);
xContentFieldIfNotZero(
builder,
TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_PROCESSED.getPreferredName(),
stats.getExpAvgDocumentsProcessed()
);
}
builder.endObject();
}
private static XContentBuilder xContentFieldIfNotZero(XContentBuilder builder, String name, long value) throws IOException {
if (value > 0) {
builder.field(name, value);
}
return builder;
}
private static XContentBuilder xContentFieldIfNotZero(XContentBuilder builder, String name, double value) throws IOException {
if (value > 0.0) {
builder.field(name, value);
}
return builder;
}
}

View File

@ -1,64 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
public class TransformProgressTests extends ESTestCase {
public void testFromXContent() throws IOException {
xContentTester(this::createParser,
TransformProgressTests::randomInstance,
TransformProgressTests::toXContent,
TransformProgress::fromXContent)
.supportsUnknownFields(true)
.test();
}
public static TransformProgress randomInstance() {
return new TransformProgress(
randomBoolean() ? null : randomNonNegativeLong(),
randomBoolean() ? null : randomNonNegativeLong(),
randomBoolean() ? null : randomDouble(),
randomBoolean() ? null : randomNonNegativeLong(),
randomBoolean() ? null : randomNonNegativeLong());
}
public static void toXContent(TransformProgress progress, XContentBuilder builder) throws IOException {
builder.startObject();
if (progress.getTotalDocs() != null) {
builder.field(TransformProgress.TOTAL_DOCS.getPreferredName(), progress.getTotalDocs());
}
if (progress.getPercentComplete() != null) {
builder.field(TransformProgress.PERCENT_COMPLETE.getPreferredName(), progress.getPercentComplete());
}
if (progress.getRemainingDocs() != null) {
builder.field(TransformProgress.DOCS_REMAINING.getPreferredName(), progress.getRemainingDocs());
}
builder.field(TransformProgress.DOCS_INDEXED.getPreferredName(), progress.getDocumentsIndexed());
builder.field(TransformProgress.DOCS_PROCESSED.getPreferredName(), progress.getDocumentsProcessed());
builder.endObject();
}
}

View File

@ -1,73 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
public class TransformStatsTests extends ESTestCase {
public void testFromXContent() throws IOException {
xContentTester(this::createParser,
TransformStatsTests::randomInstance,
TransformStatsTests::toXContent,
TransformStats::fromXContent)
.supportsUnknownFields(true)
.randomFieldsExcludeFilter(field -> field.equals("node.attributes") || field.contains("position"))
.test();
}
public static TransformStats randomInstance() {
return new TransformStats(randomAlphaOfLength(10),
randomBoolean() ? null : randomFrom(TransformStats.State.values()),
randomBoolean() ? null : randomAlphaOfLength(100),
randomBoolean() ? null : NodeAttributesTests.createRandom(),
TransformIndexerStatsTests.randomStats(),
randomBoolean() ? null : TransformCheckpointingInfoTests.randomTransformCheckpointingInfo());
}
public static void toXContent(TransformStats stats, XContentBuilder builder) throws IOException {
builder.startObject();
builder.field(TransformStats.ID.getPreferredName(), stats.getId());
if (stats.getState() != null) {
builder.field(TransformStats.STATE_FIELD.getPreferredName(),
stats.getState().value());
}
if (stats.getReason() != null) {
builder.field(TransformStats.REASON_FIELD.getPreferredName(), stats.getReason());
}
if (stats.getNode() != null) {
builder.field(TransformStats.NODE_FIELD.getPreferredName());
stats.getNode().toXContent(builder, ToXContent.EMPTY_PARAMS);
}
builder.field(TransformStats.STATS_FIELD.getPreferredName());
TransformIndexerStatsTests.toXContent(stats.getIndexerStats(), builder);
if (stats.getCheckpointingInfo() != null) {
builder.field(TransformStats.CHECKPOINTING_INFO_FIELD.getPreferredName());
TransformCheckpointingInfoTests.toXContent(stats.getCheckpointingInfo(), builder);
}
builder.endObject();
}
}

View File

@ -1,66 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.client.transform.transforms.SettingsConfig;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
public class SettingsConfigTests extends AbstractResponseTestCase<
org.elasticsearch.xpack.core.transform.transforms.SettingsConfig,
SettingsConfig> {
public static org.elasticsearch.xpack.core.transform.transforms.SettingsConfig randomSettingsConfig() {
return new org.elasticsearch.xpack.core.transform.transforms.SettingsConfig(
randomBoolean() ? null : randomIntBetween(10, 10_000),
randomBoolean() ? null : randomFloat()
);
}
public static void assertHlrcEquals(
org.elasticsearch.xpack.core.transform.transforms.SettingsConfig serverTestInstance,
SettingsConfig clientInstance
) {
assertEquals(serverTestInstance.getMaxPageSearchSize(), clientInstance.getMaxPageSearchSize());
assertEquals(serverTestInstance.getDocsPerSecond(), clientInstance.getDocsPerSecond());
}
@Override
protected org.elasticsearch.xpack.core.transform.transforms.SettingsConfig createServerTestInstance(XContentType xContentType) {
return randomSettingsConfig();
}
@Override
protected SettingsConfig doParseToClientInstance(XContentParser parser) throws IOException {
return SettingsConfig.fromXContent(parser);
}
@Override
protected void assertInstances(
org.elasticsearch.xpack.core.transform.transforms.SettingsConfig serverTestInstance,
SettingsConfig clientInstance
) {
assertHlrcEquals(serverTestInstance, clientInstance);
}
}

View File

@ -1,60 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.client.transform.transforms.TimeSyncConfig;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
public class TimeSyncConfigTests
extends AbstractResponseTestCase<org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig, TimeSyncConfig> {
public static org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig randomTimeSyncConfig() {
return new org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig(randomAlphaOfLengthBetween(1, 10),
new TimeValue(randomNonNegativeLong()));
}
public static void assertHlrcEquals(org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig serverTestInstance,
TimeSyncConfig clientInstance) {
assertEquals(serverTestInstance.getField(), clientInstance.getField());
assertEquals(serverTestInstance.getDelay(), clientInstance.getDelay());
}
@Override
protected org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig createServerTestInstance(XContentType xContentType) {
return randomTimeSyncConfig();
}
@Override
protected TimeSyncConfig doParseToClientInstance(XContentParser parser) throws IOException {
return TimeSyncConfig.fromXContent(parser);
}
@Override
protected void assertInstances(org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig serverTestInstance,
TimeSyncConfig clientInstance) {
assertHlrcEquals(serverTestInstance, clientInstance);
}
}

View File

@ -1,81 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointStats;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
public class TransformCheckpointStatsTests extends AbstractResponseTestCase<
TransformCheckpointStats,
org.elasticsearch.client.transform.transforms.TransformCheckpointStats> {
public static TransformCheckpointStats fromHlrc(
org.elasticsearch.client.transform.transforms.TransformCheckpointStats instance) {
return new TransformCheckpointStats(instance.getCheckpoint(),
TransformIndexerPositionTests.fromHlrc(instance.getPosition()),
TransformProgressTests.fromHlrc(instance.getCheckpointProgress()),
instance.getTimestampMillis(),
instance.getTimeUpperBoundMillis());
}
public static TransformCheckpointStats randomTransformCheckpointStats() {
return new TransformCheckpointStats(randomLongBetween(1, 1_000_000),
TransformIndexerPositionTests.randomTransformIndexerPosition(),
randomBoolean() ? null : TransformProgressTests.randomTransformProgress(),
randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000));
}
@Override
protected TransformCheckpointStats createServerTestInstance(XContentType xContentType) {
return randomTransformCheckpointStats();
}
@Override
protected org.elasticsearch.client.transform.transforms.TransformCheckpointStats doParseToClientInstance(XContentParser parser)
throws IOException {
return org.elasticsearch.client.transform.transforms.TransformCheckpointStats.fromXContent(parser);
}
@Override
protected void assertInstances(TransformCheckpointStats serverTestInstance,
org.elasticsearch.client.transform.transforms.TransformCheckpointStats clientInstance) {
assertThat(serverTestInstance.getCheckpoint(), equalTo(clientInstance.getCheckpoint()));
assertThat(serverTestInstance.getPosition().getBucketsPosition(), equalTo(clientInstance.getPosition().getBucketsPosition()));
assertThat(serverTestInstance.getPosition().getIndexerPosition(), equalTo(clientInstance.getPosition().getIndexerPosition()));
assertThat(serverTestInstance.getTimestampMillis(), equalTo(clientInstance.getTimestampMillis()));
assertThat(serverTestInstance.getTimeUpperBoundMillis(), equalTo(clientInstance.getTimeUpperBoundMillis()));
if (serverTestInstance.getCheckpointProgress() != null) {
assertThat(serverTestInstance.getCheckpointProgress().getDocumentsIndexed(),
equalTo(clientInstance.getCheckpointProgress().getDocumentsIndexed()));
assertThat(serverTestInstance.getCheckpointProgress().getDocumentsProcessed(),
equalTo(clientInstance.getCheckpointProgress().getDocumentsProcessed()));
assertThat(serverTestInstance.getCheckpointProgress().getPercentComplete(),
equalTo(clientInstance.getCheckpointProgress().getPercentComplete()));
assertThat(serverTestInstance.getCheckpointProgress().getTotalDocs(),
equalTo(clientInstance.getCheckpointProgress().getTotalDocs()));
}
}
}

View File

@ -1,60 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.time.Instant;
import static org.elasticsearch.client.transform.transforms.hlrc.TransformStatsTests.assertTransformCheckpointInfo;
public class TransformCheckpointingInfoTests extends AbstractResponseTestCase<
org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo,
TransformCheckpointingInfo> {
public static org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo randomTransformCheckpointingInfo() {
return new org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo(
TransformCheckpointStatsTests.randomTransformCheckpointStats(),
TransformCheckpointStatsTests.randomTransformCheckpointStats(),
randomNonNegativeLong(),
randomBoolean() ? null : Instant.ofEpochMilli(randomNonNegativeLong()));
}
@Override
protected org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo
createServerTestInstance(XContentType xContentType) {
return randomTransformCheckpointingInfo();
}
@Override
protected TransformCheckpointingInfo doParseToClientInstance(XContentParser parser) throws IOException {
return TransformCheckpointingInfo.fromXContent(parser);
}
@Override
protected void assertInstances(org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo serverTestInstance,
TransformCheckpointingInfo clientInstance) {
assertTransformCheckpointInfo(serverTestInstance, clientInstance);
}
}

View File

@ -1,82 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerPosition;
import java.util.LinkedHashMap;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
public class TransformIndexerPositionTests extends AbstractResponseTestCase<
TransformIndexerPosition,
org.elasticsearch.client.transform.transforms.TransformIndexerPosition> {
public static TransformIndexerPosition fromHlrc(
org.elasticsearch.client.transform.transforms.TransformIndexerPosition instance) {
if (instance == null) {
return null;
}
return new TransformIndexerPosition(instance.getIndexerPosition(), instance.getBucketsPosition());
}
public static TransformIndexerPosition randomTransformIndexerPosition() {
return new TransformIndexerPosition(randomPositionMap(), randomPositionMap());
}
@Override
protected TransformIndexerPosition createServerTestInstance(XContentType xContentType) {
return randomTransformIndexerPosition();
}
@Override
protected org.elasticsearch.client.transform.transforms.TransformIndexerPosition doParseToClientInstance(XContentParser parser) {
return org.elasticsearch.client.transform.transforms.TransformIndexerPosition.fromXContent(parser);
}
@Override
protected void assertInstances(TransformIndexerPosition serverTestInstance,
org.elasticsearch.client.transform.transforms.TransformIndexerPosition clientInstance) {
assertThat(serverTestInstance.getIndexerPosition(), equalTo(clientInstance.getIndexerPosition()));
assertThat(serverTestInstance.getBucketsPosition(), equalTo(clientInstance.getBucketsPosition()));
}
private static Map<String, Object> randomPositionMap() {
if (randomBoolean()) {
return null;
}
int numFields = randomIntBetween(1, 5);
Map<String, Object> position = new LinkedHashMap<>();
for (int i = 0; i < numFields; i++) {
Object value;
if (randomBoolean()) {
value = randomLong();
} else {
value = randomAlphaOfLengthBetween(1, 10);
}
position.put(randomAlphaOfLengthBetween(3, 10), value);
}
return position;
}
}

View File

@ -1,72 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.client.transform.transforms.TransformIndexerStats;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import static org.elasticsearch.client.transform.transforms.hlrc.TransformStatsTests.assertTransformIndexerStats;
public class TransformIndexerStatsTests extends AbstractResponseTestCase<
org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats,
TransformIndexerStats> {
public static org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats randomStats() {
return new org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats(
randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L),
randomDouble(),
randomDouble(),
randomDouble()
);
}
@Override
protected org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats createServerTestInstance(XContentType xContentType) {
return randomStats();
}
@Override
protected TransformIndexerStats doParseToClientInstance(XContentParser parser) throws IOException {
return TransformIndexerStats.fromXContent(parser);
}
@Override
protected void assertInstances(
org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats serverTestInstance,
TransformIndexerStats clientInstance
) {
assertTransformIndexerStats(serverTestInstance, clientInstance);
}
}

View File

@ -1,72 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.transform.transforms.TransformProgress;
import static org.hamcrest.Matchers.equalTo;
public class TransformProgressTests extends AbstractResponseTestCase<
TransformProgress,
org.elasticsearch.client.transform.transforms.TransformProgress> {
public static TransformProgress fromHlrc(
org.elasticsearch.client.transform.transforms.TransformProgress instance) {
if (instance == null) {
return null;
}
return new TransformProgress(instance.getTotalDocs(),
instance.getRemainingDocs(),
instance.getDocumentsProcessed(),
instance.getDocumentsIndexed());
}
public static TransformProgress randomTransformProgress() {
Long totalDocs = randomBoolean() ? null : randomNonNegativeLong();
Long docsRemaining = totalDocs != null ? randomLongBetween(0, totalDocs) : null;
return new TransformProgress(
totalDocs,
docsRemaining,
totalDocs != null ? totalDocs - docsRemaining : randomNonNegativeLong(),
randomBoolean() ? null : randomNonNegativeLong());
}
@Override
protected TransformProgress createServerTestInstance(XContentType xContentType) {
return randomTransformProgress();
}
@Override
protected org.elasticsearch.client.transform.transforms.TransformProgress doParseToClientInstance(XContentParser parser) {
return org.elasticsearch.client.transform.transforms.TransformProgress.fromXContent(parser);
}
@Override
protected void assertInstances(TransformProgress serverTestInstance,
org.elasticsearch.client.transform.transforms.TransformProgress clientInstance) {
assertThat(serverTestInstance.getTotalDocs(), equalTo(clientInstance.getTotalDocs()));
assertThat(serverTestInstance.getDocumentsProcessed(), equalTo(clientInstance.getDocumentsProcessed()));
assertThat(serverTestInstance.getPercentComplete(), equalTo(clientInstance.getPercentComplete()));
assertThat(serverTestInstance.getDocumentsIndexed(), equalTo(clientInstance.getDocumentsIndexed()));
}
}

View File

@ -1,174 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.client.transform.transforms.NodeAttributes;
import org.elasticsearch.client.transform.transforms.TransformCheckpointStats;
import org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo;
import org.elasticsearch.client.transform.transforms.TransformIndexerPosition;
import org.elasticsearch.client.transform.transforms.TransformIndexerStats;
import org.elasticsearch.client.transform.transforms.TransformProgress;
import org.elasticsearch.client.transform.transforms.TransformStats;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
public class TransformStatsTests extends AbstractResponseTestCase<
org.elasticsearch.xpack.core.transform.transforms.TransformStats,
org.elasticsearch.client.transform.transforms.TransformStats> {
public static org.elasticsearch.xpack.core.transform.transforms.NodeAttributes randomNodeAttributes() {
int numberOfAttributes = randomIntBetween(1, 10);
Map<String, String> attributes = new HashMap<>(numberOfAttributes);
for (int i = 0; i < numberOfAttributes; i++) {
String val = randomAlphaOfLength(10);
attributes.put("key-" + i, val);
}
return new org.elasticsearch.xpack.core.transform.transforms.NodeAttributes(
randomAlphaOfLength(10),
randomAlphaOfLength(10),
randomAlphaOfLength(10),
randomAlphaOfLength(10),
attributes
);
}
public static void assertHlrcEquals(
org.elasticsearch.xpack.core.transform.transforms.TransformStats serverTestInstance,
TransformStats clientInstance
) {
assertThat(serverTestInstance.getId(), equalTo(clientInstance.getId()));
assertThat(serverTestInstance.getState().value(), equalTo(clientInstance.getState().value()));
assertTransformIndexerStats(serverTestInstance.getIndexerStats(), clientInstance.getIndexerStats());
assertTransformCheckpointInfo(serverTestInstance.getCheckpointingInfo(), clientInstance.getCheckpointingInfo());
assertNodeAttributes(serverTestInstance.getNode(), clientInstance.getNode());
assertThat(serverTestInstance.getReason(), equalTo(clientInstance.getReason()));
}
@Override
protected org.elasticsearch.xpack.core.transform.transforms.TransformStats createServerTestInstance(XContentType xContentType) {
return new org.elasticsearch.xpack.core.transform.transforms.TransformStats(
randomAlphaOfLength(10),
randomFrom(org.elasticsearch.xpack.core.transform.transforms.TransformStats.State.values()),
randomBoolean() ? null : randomAlphaOfLength(100),
randomBoolean() ? null : randomNodeAttributes(),
TransformIndexerStatsTests.randomStats(),
TransformCheckpointingInfoTests.randomTransformCheckpointingInfo()
);
}
@Override
protected TransformStats doParseToClientInstance(XContentParser parser) throws IOException {
return TransformStats.fromXContent(parser);
}
@Override
protected void assertInstances(
org.elasticsearch.xpack.core.transform.transforms.TransformStats serverTestInstance,
TransformStats clientInstance
) {
assertHlrcEquals(serverTestInstance, clientInstance);
}
private static void assertNodeAttributes(
org.elasticsearch.xpack.core.transform.transforms.NodeAttributes serverTestInstance,
NodeAttributes clientInstance
) {
if (serverTestInstance == null || clientInstance == null) {
assertNull(serverTestInstance);
assertNull(clientInstance);
return;
}
assertThat(serverTestInstance.getAttributes(), equalTo(clientInstance.getAttributes()));
assertThat(serverTestInstance.getEphemeralId(), equalTo(clientInstance.getEphemeralId()));
assertThat(serverTestInstance.getId(), equalTo(clientInstance.getId()));
assertThat(serverTestInstance.getName(), equalTo(clientInstance.getName()));
assertThat(serverTestInstance.getTransportAddress(), equalTo(clientInstance.getTransportAddress()));
}
public static void assertTransformProgress(
org.elasticsearch.xpack.core.transform.transforms.TransformProgress serverTestInstance,
TransformProgress clientInstance
) {
if (serverTestInstance == null || clientInstance == null) {
assertNull(serverTestInstance);
assertNull(clientInstance);
return;
}
assertThat(serverTestInstance.getPercentComplete(), equalTo(clientInstance.getPercentComplete()));
assertThat(serverTestInstance.getDocumentsProcessed(), equalTo(clientInstance.getDocumentsProcessed()));
assertThat(serverTestInstance.getTotalDocs(), equalTo(clientInstance.getTotalDocs()));
assertThat(serverTestInstance.getDocumentsIndexed(), equalTo(clientInstance.getDocumentsIndexed()));
}
public static void assertPosition(
org.elasticsearch.xpack.core.transform.transforms.TransformIndexerPosition serverTestInstance,
TransformIndexerPosition clientInstance
) {
assertThat(serverTestInstance.getIndexerPosition(), equalTo(clientInstance.getIndexerPosition()));
assertThat(serverTestInstance.getBucketsPosition(), equalTo(clientInstance.getBucketsPosition()));
}
public static void assertTransformCheckpointStats(
org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointStats serverTestInstance,
TransformCheckpointStats clientInstance
) {
assertTransformProgress(serverTestInstance.getCheckpointProgress(), clientInstance.getCheckpointProgress());
assertThat(serverTestInstance.getCheckpoint(), equalTo(clientInstance.getCheckpoint()));
assertPosition(serverTestInstance.getPosition(), clientInstance.getPosition());
assertThat(serverTestInstance.getTimestampMillis(), equalTo(clientInstance.getTimestampMillis()));
assertThat(serverTestInstance.getTimeUpperBoundMillis(), equalTo(clientInstance.getTimeUpperBoundMillis()));
}
public static void assertTransformCheckpointInfo(
org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo serverTestInstance,
TransformCheckpointingInfo clientInstance
) {
assertTransformCheckpointStats(serverTestInstance.getNext(), clientInstance.getNext());
assertTransformCheckpointStats(serverTestInstance.getLast(), clientInstance.getLast());
assertThat(serverTestInstance.getChangesLastDetectedAt(), equalTo(clientInstance.getChangesLastDetectedAt()));
assertThat(serverTestInstance.getOperationsBehind(), equalTo(clientInstance.getOperationsBehind()));
}
public static void assertTransformIndexerStats(
org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats serverTestInstance,
TransformIndexerStats clientInstance
) {
assertThat(serverTestInstance.getExpAvgCheckpointDurationMs(), equalTo(clientInstance.getExpAvgCheckpointDurationMs()));
assertThat(serverTestInstance.getExpAvgDocumentsProcessed(), equalTo(clientInstance.getExpAvgDocumentsProcessed()));
assertThat(serverTestInstance.getExpAvgDocumentsIndexed(), equalTo(clientInstance.getExpAvgDocumentsIndexed()));
assertThat(serverTestInstance.getNumPages(), equalTo(clientInstance.getPagesProcessed()));
assertThat(serverTestInstance.getIndexFailures(), equalTo(clientInstance.getIndexFailures()));
assertThat(serverTestInstance.getIndexTime(), equalTo(clientInstance.getIndexTime()));
assertThat(serverTestInstance.getIndexTotal(), equalTo(clientInstance.getIndexTotal()));
assertThat(serverTestInstance.getNumDocuments(), equalTo(clientInstance.getDocumentsProcessed()));
assertThat(serverTestInstance.getNumInvocations(), equalTo(clientInstance.getTriggerCount()));
assertThat(serverTestInstance.getOutputDocuments(), equalTo(clientInstance.getDocumentsIndexed()));
assertThat(serverTestInstance.getSearchFailures(), equalTo(clientInstance.getSearchFailures()));
assertThat(serverTestInstance.getSearchTime(), equalTo(clientInstance.getSearchTime()));
assertThat(serverTestInstance.getSearchTotal(), equalTo(clientInstance.getSearchTotal()));
}
}

View File

@ -1,88 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import static java.util.Collections.emptyList;
public class AggregationConfigTests extends AbstractXContentTestCase<AggregationConfig> {
public static AggregationConfig randomAggregationConfig() {
AggregatorFactories.Builder builder = new AggregatorFactories.Builder();
Set<String> names = new HashSet<>();
int numAggs = randomIntBetween(1, 4);
for (int i = 0; i < numAggs; ++i) {
AggregationBuilder aggBuilder = getRandomSupportedAggregation();
if (names.add(aggBuilder.getName())) {
builder.addAggregator(aggBuilder);
}
}
return new AggregationConfig(builder);
}
@Override
protected AggregationConfig createTestInstance() {
return randomAggregationConfig();
}
@Override
protected AggregationConfig doParseInstance(XContentParser parser) throws IOException {
return AggregationConfig.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
private static AggregationBuilder getRandomSupportedAggregation() {
final int numberOfSupportedAggs = 4;
switch (randomIntBetween(1, numberOfSupportedAggs)) {
case 1:
return AggregationBuilders.avg(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
case 2:
return AggregationBuilders.min(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
case 3:
return AggregationBuilders.max(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
case 4:
return AggregationBuilders.sum(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
}
return null;
}
}

View File

@ -1,73 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.function.Predicate;
public class DateHistogramGroupSourceTests extends AbstractXContentTestCase<DateHistogramGroupSource> {
public static DateHistogramGroupSource.Interval randomDateHistogramInterval() {
if (randomBoolean()) {
return new DateHistogramGroupSource.FixedInterval(new DateHistogramInterval(randomPositiveTimeValue()));
} else {
return new DateHistogramGroupSource.CalendarInterval(new DateHistogramInterval(randomTimeValue(1, 1, "m", "h", "d", "w")));
}
}
public static DateHistogramGroupSource randomDateHistogramGroupSource() {
String field = randomBoolean() ? randomAlphaOfLengthBetween(1, 20) : null;
Script script = randomBoolean() ? new Script(randomAlphaOfLengthBetween(1, 10)) : null;
return new DateHistogramGroupSource(
field,
script,
randomBoolean(),
randomDateHistogramInterval(),
randomBoolean() ? randomZone() : null
);
}
@Override
protected DateHistogramGroupSource createTestInstance() {
return randomDateHistogramGroupSource();
}
@Override
protected DateHistogramGroupSource doParseInstance(XContentParser parser) throws IOException {
return DateHistogramGroupSource.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only
return field -> !field.isEmpty();
}
}

View File

@ -1,70 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.geo.GeoBoundingBox;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.geo.GeometryTestUtils;
import org.elasticsearch.geometry.Rectangle;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.function.Predicate;
public class GeoTileGroupSourceTests extends AbstractXContentTestCase<GeoTileGroupSource> {
public static GeoTileGroupSource randomGeoTileGroupSource() {
Rectangle rectangle = GeometryTestUtils.randomRectangle();
return new GeoTileGroupSource(
randomBoolean() ? null : randomAlphaOfLength(10),
randomBoolean(),
randomBoolean() ? null : randomIntBetween(1, GeoTileUtils.MAX_ZOOM),
randomBoolean()
? null
: new GeoBoundingBox(
new GeoPoint(rectangle.getMaxLat(), rectangle.getMinLon()),
new GeoPoint(rectangle.getMinLat(), rectangle.getMaxLon())
)
);
}
@Override
protected GeoTileGroupSource createTestInstance() {
return randomGeoTileGroupSource();
}
@Override
protected GeoTileGroupSource doParseInstance(XContentParser parser) throws IOException {
return GeoTileGroupSource.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only
return field -> !field.isEmpty();
}
}

View File

@ -1,160 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
import static org.hamcrest.Matchers.instanceOf;
public class GroupConfigTests extends AbstractXContentTestCase<GroupConfig> {
public static GroupConfig randomGroupConfig() {
Map<String, SingleGroupSource> groups = new LinkedHashMap<>();
// ensure that the unlikely does not happen: 2 group_by's share the same name
Set<String> names = new HashSet<>();
for (int i = 0; i < randomIntBetween(1, 4); ++i) {
String targetFieldName = randomAlphaOfLengthBetween(1, 20);
if (names.add(targetFieldName)) {
SingleGroupSource groupBy = null;
SingleGroupSource.Type type = randomFrom(SingleGroupSource.Type.values());
switch (type) {
case TERMS:
groupBy = TermsGroupSourceTests.randomTermsGroupSource();
break;
case HISTOGRAM:
groupBy = HistogramGroupSourceTests.randomHistogramGroupSource();
break;
case DATE_HISTOGRAM:
groupBy = DateHistogramGroupSourceTests.randomDateHistogramGroupSource();
break;
case GEOTILE_GRID:
groupBy = GeoTileGroupSourceTests.randomGeoTileGroupSource();
break;
default:
fail("unknown group source type, please implement tests and add support here");
}
groups.put(targetFieldName, groupBy);
}
}
return new GroupConfig(groups);
}
@Override
protected GroupConfig createTestInstance() {
return randomGroupConfig();
}
@Override
protected GroupConfig doParseInstance(XContentParser parser) throws IOException {
return GroupConfig.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
return field -> !field.isEmpty();
}
public void testLenientParsing() throws IOException {
BytesArray json = new BytesArray(
"{"
+ " \"unknown-field\": \"foo\","
+ " \"destination-field\": {"
+ " \"terms\": {"
+ " \"field\": \"term-field\""
+ " }"
+ " },"
+ " \"unknown-field-2\": \"bar\","
+ " \"destination-field2\": {"
+ " \"terms\": {"
+ " \"field\": \"term-field2\""
+ " }"
+ " },"
+ " \"array-field\": ["
+ " 1,"
+ " 2"
+ " ]"
+ "}"
);
XContentParser parser = JsonXContent.jsonXContent.createParser(
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
json.streamInput()
);
GroupConfig gc = GroupConfig.fromXContent(parser);
assertEquals(gc.getGroups().size(), 2);
assertTrue(gc.getGroups().containsKey("destination-field"));
SingleGroupSource groupSource = gc.getGroups().get("destination-field");
assertThat(groupSource, instanceOf(TermsGroupSource.class));
assertEquals(groupSource.getField(), "term-field");
}
public void testLenientParsingUnknowGroupType() throws IOException {
BytesArray json = new BytesArray(
"{"
+ " \"destination-field1\": {"
+ " \"newgroup\": {"
+ " \"field1\": \"bar\","
+ " \"field2\": \"foo\""
+ " }"
+ " },"
+ " \"unknown-field\": \"bar\","
+ " \"destination-field2\": {"
+ " \"terms\": {"
+ " \"field\": \"term-field\""
+ " }"
+ " }"
+ "}"
);
XContentParser parser = JsonXContent.jsonXContent.createParser(
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
json.streamInput()
);
GroupConfig gc = GroupConfig.fromXContent(parser);
assertEquals(gc.getGroups().size(), 1);
assertTrue(gc.getGroups().containsKey("destination-field2"));
SingleGroupSource groupSource = gc.getGroups().get("destination-field2");
assertThat(groupSource, instanceOf(TermsGroupSource.class));
assertEquals(groupSource.getField(), "term-field");
}
}

View File

@ -1,59 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.function.Predicate;
public class HistogramGroupSourceTests extends AbstractXContentTestCase<HistogramGroupSource> {
public static HistogramGroupSource randomHistogramGroupSource() {
String field = randomBoolean() ? randomAlphaOfLengthBetween(1, 20) : null;
Script script = randomBoolean() ? new Script(randomAlphaOfLengthBetween(1, 10)) : null;
boolean missingBucket = randomBoolean();
double interval = randomDoubleBetween(Math.nextUp(0), Double.MAX_VALUE, false);
return new HistogramGroupSource(field, script, missingBucket, interval);
}
@Override
protected HistogramGroupSource doParseInstance(XContentParser parser) throws IOException {
return HistogramGroupSource.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected HistogramGroupSource createTestInstance() {
return randomHistogramGroupSource();
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only
return field -> !field.isEmpty();
}
}

View File

@ -1,66 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.function.Predicate;
public class PivotConfigTests extends AbstractXContentTestCase<PivotConfig> {
public static PivotConfig randomPivotConfig() {
return new PivotConfig(GroupConfigTests.randomGroupConfig(),
AggregationConfigTests.randomAggregationConfig(),
randomBoolean() ? null : randomIntBetween(10, 10_000));
}
@Override
protected PivotConfig createTestInstance() {
return randomPivotConfig();
}
@Override
protected PivotConfig doParseInstance(XContentParser parser) throws IOException {
return PivotConfig.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only
return field -> !field.isEmpty();
}
@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
}

View File

@ -1,57 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.function.Predicate;
public class TermsGroupSourceTests extends AbstractXContentTestCase<TermsGroupSource> {
public static TermsGroupSource randomTermsGroupSource() {
String field = randomBoolean() ? randomAlphaOfLengthBetween(1, 20) : null;
Script script = randomBoolean() ? new Script(randomAlphaOfLengthBetween(1, 10)) : null;
return new TermsGroupSource(field, script, randomBoolean());
}
@Override
protected TermsGroupSource createTestInstance() {
return randomTermsGroupSource();
}
@Override
protected TermsGroupSource doParseInstance(XContentParser parser) throws IOException {
return TermsGroupSource.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only
return field -> !field.isEmpty();
}
}

View File

@ -1,128 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.xpack.core.transform.transforms.pivot.DateHistogramGroupSource;
import org.elasticsearch.xpack.core.transform.transforms.pivot.ScriptConfig;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
public class DateHistogramGroupSourceTests extends AbstractResponseTestCase<
DateHistogramGroupSource,
org.elasticsearch.client.transform.transforms.pivot.DateHistogramGroupSource> {
public static ScriptConfig randomScriptConfig() {
ScriptType type = randomFrom(ScriptType.values());
String lang = randomBoolean() ? Script.DEFAULT_SCRIPT_LANG : randomAlphaOfLengthBetween(1, 20);
String idOrCode = randomAlphaOfLengthBetween(1, 20);
Map<String, Object> params = Collections.emptyMap();
type = ScriptType.STORED;
Script script = new Script(type, type == ScriptType.STORED ? null : lang, idOrCode, params);
LinkedHashMap<String, Object> source = null;
try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) {
XContentBuilder content = script.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
source = (LinkedHashMap<String, Object>) XContentHelper.convertToMap(BytesReference.bytes(content), true, XContentType.JSON)
.v2();
} catch (IOException e) {
// should not happen
fail("failed to create random script config");
}
return new ScriptConfig(source, script);
}
public static DateHistogramGroupSource randomDateHistogramGroupSource() {
String field = randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20);
ScriptConfig scriptConfig = randomBoolean() ? null : randomScriptConfig();
DateHistogramGroupSource dateHistogramGroupSource;
if (randomBoolean()) {
dateHistogramGroupSource = new DateHistogramGroupSource(
field,
scriptConfig,
randomBoolean(),
new DateHistogramGroupSource.FixedInterval(new DateHistogramInterval(randomTimeValue(1, 100, "d", "h", "ms", "s", "m"))),
randomBoolean() ? randomZone() : null
);
} else {
dateHistogramGroupSource = new DateHistogramGroupSource(
field,
scriptConfig,
randomBoolean(),
new DateHistogramGroupSource.CalendarInterval(new DateHistogramInterval(randomTimeValue(1, 1, "m", "h", "d", "w"))),
randomBoolean() ? randomZone() : null
);
}
return dateHistogramGroupSource;
}
@Override
protected DateHistogramGroupSource createServerTestInstance(XContentType xContentType) {
return randomDateHistogramGroupSource();
}
@Override
protected org.elasticsearch.client.transform.transforms.pivot.DateHistogramGroupSource doParseToClientInstance(XContentParser parser) {
return org.elasticsearch.client.transform.transforms.pivot.DateHistogramGroupSource.fromXContent(parser);
}
@Override
protected void assertInstances(
DateHistogramGroupSource serverTestInstance,
org.elasticsearch.client.transform.transforms.pivot.DateHistogramGroupSource clientInstance
) {
assertThat(serverTestInstance.getField(), equalTo(clientInstance.getField()));
if (serverTestInstance.getScriptConfig() != null) {
assertThat(serverTestInstance.getScriptConfig().getScript(), equalTo(clientInstance.getScript()));
} else {
assertNull(clientInstance.getScript());
}
assertSameInterval(serverTestInstance.getInterval(), clientInstance.getInterval());
assertThat(serverTestInstance.getTimeZone(), equalTo(clientInstance.getTimeZone()));
assertThat(serverTestInstance.getType().name(), equalTo(clientInstance.getType().name()));
}
private void assertSameInterval(
DateHistogramGroupSource.Interval serverTestInstance,
org.elasticsearch.client.transform.transforms.pivot.DateHistogramGroupSource.Interval clientInstance
) {
assertEquals(serverTestInstance.getName(), clientInstance.getName());
assertEquals(serverTestInstance.getInterval(), clientInstance.getInterval());
}
}

View File

@ -1,74 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.geo.GeoBoundingBox;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.geo.GeometryTestUtils;
import org.elasticsearch.geometry.Rectangle;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils;
import org.elasticsearch.xpack.core.transform.transforms.pivot.GeoTileGroupSource;
import static org.hamcrest.Matchers.equalTo;
public class GeoTileGroupSourceTests extends AbstractResponseTestCase<
GeoTileGroupSource,
org.elasticsearch.client.transform.transforms.pivot.GeoTileGroupSource> {
public static GeoTileGroupSource randomGeoTileGroupSource() {
Rectangle rectangle = GeometryTestUtils.randomRectangle();
return new GeoTileGroupSource(
randomBoolean() ? null : randomAlphaOfLength(10),
randomBoolean(),
randomBoolean() ? null : randomIntBetween(1, GeoTileUtils.MAX_ZOOM),
randomBoolean()
? null
: new GeoBoundingBox(
new GeoPoint(rectangle.getMaxLat(), rectangle.getMinLon()),
new GeoPoint(rectangle.getMinLat(), rectangle.getMaxLon())
)
);
}
@Override
protected GeoTileGroupSource createServerTestInstance(XContentType xContentType) {
return randomGeoTileGroupSource();
}
@Override
protected org.elasticsearch.client.transform.transforms.pivot.GeoTileGroupSource doParseToClientInstance(XContentParser parser) {
return org.elasticsearch.client.transform.transforms.pivot.GeoTileGroupSource.fromXContent(parser);
}
@Override
protected void assertInstances(
GeoTileGroupSource serverTestInstance,
org.elasticsearch.client.transform.transforms.pivot.GeoTileGroupSource clientInstance
) {
assertThat(serverTestInstance.getField(), equalTo(clientInstance.getField()));
assertNull(clientInstance.getScript());
assertThat(serverTestInstance.getPrecision(), equalTo(clientInstance.getPrecision()));
assertThat(serverTestInstance.getGeoBoundingBox(), equalTo(clientInstance.getGeoBoundingBox()));
}
}

View File

@ -1,69 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.transform.transforms.pivot.HistogramGroupSource;
import org.elasticsearch.xpack.core.transform.transforms.pivot.ScriptConfig;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
public class HistogramGroupSourceTests extends AbstractResponseTestCase<
HistogramGroupSource,
org.elasticsearch.client.transform.transforms.pivot.HistogramGroupSource> {
public static HistogramGroupSource randomHistogramGroupSource() {
String field = randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20);
ScriptConfig scriptConfig = randomBoolean() ? null : DateHistogramGroupSourceTests.randomScriptConfig();
boolean missingBucket = randomBoolean();
double interval = randomDoubleBetween(Math.nextUp(0), Double.MAX_VALUE, false);
return new HistogramGroupSource(field, scriptConfig, missingBucket, interval);
}
@Override
protected HistogramGroupSource createServerTestInstance(XContentType xContentType) {
return randomHistogramGroupSource();
}
@Override
protected org.elasticsearch.client.transform.transforms.pivot.HistogramGroupSource doParseToClientInstance(XContentParser parser)
throws IOException {
return org.elasticsearch.client.transform.transforms.pivot.HistogramGroupSource.fromXContent(parser);
}
@Override
protected void assertInstances(
HistogramGroupSource serverTestInstance,
org.elasticsearch.client.transform.transforms.pivot.HistogramGroupSource clientInstance
) {
assertThat(serverTestInstance.getField(), equalTo(clientInstance.getField()));
if (serverTestInstance.getScriptConfig() != null) {
assertThat(serverTestInstance.getScriptConfig().getScript(), equalTo(clientInstance.getScript()));
} else {
assertNull(clientInstance.getScript());
}
assertThat(serverTestInstance.getInterval(), equalTo(clientInstance.getInterval()));
}
}

View File

@ -1,67 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.transform.transforms.pivot.ScriptConfig;
import org.elasticsearch.xpack.core.transform.transforms.pivot.TermsGroupSource;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
public class TermsGroupSourceTests extends AbstractResponseTestCase<
TermsGroupSource,
org.elasticsearch.client.transform.transforms.pivot.TermsGroupSource> {
public static TermsGroupSource randomTermsGroupSource() {
String field = randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20);
ScriptConfig scriptConfig = randomBoolean() ? null : DateHistogramGroupSourceTests.randomScriptConfig();
boolean missingBucket = randomBoolean();
return new TermsGroupSource(field, scriptConfig, missingBucket);
}
@Override
protected TermsGroupSource createServerTestInstance(XContentType xContentType) {
return randomTermsGroupSource();
}
@Override
protected org.elasticsearch.client.transform.transforms.pivot.TermsGroupSource doParseToClientInstance(XContentParser parser)
throws IOException {
return org.elasticsearch.client.transform.transforms.pivot.TermsGroupSource.fromXContent(parser);
}
@Override
protected void assertInstances(
TermsGroupSource serverTestInstance,
org.elasticsearch.client.transform.transforms.pivot.TermsGroupSource clientInstance
) {
assertThat(serverTestInstance.getField(), equalTo(clientInstance.getField()));
if (serverTestInstance.getScriptConfig() != null) {
assertThat(serverTestInstance.getScriptConfig().getScript(), equalTo(clientInstance.getScript()));
} else {
assertNull(clientInstance.getScript());
}
}
}

View File

@ -131,7 +131,6 @@ def projectPathsToExclude = [
':x-pack:plugin:ql',
':x-pack:plugin:search-business-rules',
':x-pack:plugin:spatial',
':x-pack:plugin:transform',
':x-pack:plugin:vectors',
':x-pack:plugin:voting-only-node',
':x-pack:plugin:wildcard',