Add create rollup job api to high level rest client (#33521)
This commit adds the Create Rollup Job API to the high level REST client. It supersedes #32703 and adds dedicated request/response objects so that it does not depend on server side components. Related #29827
This commit is contained in:
parent
34379887b4
commit
e77835c6f5
|
@ -219,6 +219,7 @@ public class RestHighLevelClient implements Closeable {
|
||||||
private final MigrationClient migrationClient = new MigrationClient(this);
|
private final MigrationClient migrationClient = new MigrationClient(this);
|
||||||
private final MachineLearningClient machineLearningClient = new MachineLearningClient(this);
|
private final MachineLearningClient machineLearningClient = new MachineLearningClient(this);
|
||||||
private final SecurityClient securityClient = new SecurityClient(this);
|
private final SecurityClient securityClient = new SecurityClient(this);
|
||||||
|
private final RollupClient rollupClient = new RollupClient(this);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the
|
* Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the
|
||||||
|
@ -300,6 +301,18 @@ public class RestHighLevelClient implements Closeable {
|
||||||
return snapshotClient;
|
return snapshotClient;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides methods for accessing the Elastic Licensed Rollup APIs that
|
||||||
|
* are shipped with the default distribution of Elasticsearch. All of
|
||||||
|
* these APIs will 404 if run against the OSS distribution of Elasticsearch.
|
||||||
|
* <p>
|
||||||
|
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-apis.html">
|
||||||
|
* Watcher APIs on elastic.co</a> for more information.
|
||||||
|
*/
|
||||||
|
public RollupClient rollup() {
|
||||||
|
return rollupClient;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Provides a {@link TasksClient} which can be used to access the Tasks API.
|
* Provides a {@link TasksClient} which can be used to access the Tasks API.
|
||||||
*
|
*
|
||||||
|
|
|
@ -0,0 +1,76 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.ActionListener;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A wrapper for the {@link RestHighLevelClient} that provides methods for
|
||||||
|
* accessing the Elastic Rollup-related methods
|
||||||
|
* <p>
|
||||||
|
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-apis.html">
|
||||||
|
* X-Pack Rollup APIs on elastic.co</a> for more information.
|
||||||
|
*/
|
||||||
|
public class RollupClient {
|
||||||
|
|
||||||
|
private final RestHighLevelClient restHighLevelClient;
|
||||||
|
|
||||||
|
RollupClient(final RestHighLevelClient restHighLevelClient) {
|
||||||
|
this.restHighLevelClient = restHighLevelClient;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Put a rollup job into the cluster
|
||||||
|
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html">
|
||||||
|
* the docs</a> for more.
|
||||||
|
* @param request the request
|
||||||
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @return the response
|
||||||
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
|
*/
|
||||||
|
public PutRollupJobResponse putRollupJob(PutRollupJobRequest request, RequestOptions options) throws IOException {
|
||||||
|
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||||
|
RollupRequestConverters::putJob,
|
||||||
|
options,
|
||||||
|
PutRollupJobResponse::fromXContent,
|
||||||
|
Collections.emptySet());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronously put a rollup job into the cluster
|
||||||
|
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html">
|
||||||
|
* the docs</a> for more.
|
||||||
|
* @param request the request
|
||||||
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @param listener the listener to be notified upon request completion
|
||||||
|
*/
|
||||||
|
public void putRollupJobAsync(PutRollupJobRequest request, RequestOptions options, ActionListener<PutRollupJobResponse> listener) {
|
||||||
|
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||||
|
RollupRequestConverters::putJob,
|
||||||
|
options,
|
||||||
|
PutRollupJobResponse::fromXContent,
|
||||||
|
listener, Collections.emptySet());
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.apache.http.client.methods.HttpPut;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
|
||||||
|
import static org.elasticsearch.client.RequestConverters.createEntity;
|
||||||
|
|
||||||
|
final class RollupRequestConverters {
|
||||||
|
|
||||||
|
private RollupRequestConverters() {
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request putJob(final PutRollupJobRequest putRollupJobRequest) throws IOException {
|
||||||
|
String endpoint = new RequestConverters.EndpointBuilder()
|
||||||
|
.addPathPartAsIs("_xpack")
|
||||||
|
.addPathPartAsIs("rollup")
|
||||||
|
.addPathPartAsIs("job")
|
||||||
|
.addPathPart(putRollupJobRequest.getConfig().getId())
|
||||||
|
.build();
|
||||||
|
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||||
|
request.setEntity(createEntity(putRollupJobRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
}
|
|
@ -18,6 +18,8 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.client;
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
@ -31,10 +33,23 @@ public class ValidationException extends IllegalArgumentException {
|
||||||
* Add a new validation error to the accumulating validation errors
|
* Add a new validation error to the accumulating validation errors
|
||||||
* @param error the error to add
|
* @param error the error to add
|
||||||
*/
|
*/
|
||||||
public void addValidationError(String error) {
|
public void addValidationError(final String error) {
|
||||||
validationErrors.add(error);
|
validationErrors.add(error);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds validation errors from an existing {@link ValidationException} to
|
||||||
|
* the accumulating validation errors
|
||||||
|
* @param exception the {@link ValidationException} to add errors from
|
||||||
|
*/
|
||||||
|
public final void addValidationErrors(final @Nullable ValidationException exception) {
|
||||||
|
if (exception != null) {
|
||||||
|
for (String error : exception.validationErrors()) {
|
||||||
|
addValidationError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the validation errors accumulated
|
* Returns the validation errors accumulated
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -0,0 +1,65 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
public class PutRollupJobRequest implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
private final RollupJobConfig config;
|
||||||
|
|
||||||
|
public PutRollupJobRequest(final RollupJobConfig config) {
|
||||||
|
this.config = Objects.requireNonNull(config, "rollup job configuration is required");
|
||||||
|
}
|
||||||
|
|
||||||
|
public RollupJobConfig getConfig() {
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
return config.toXContent(builder, params);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
return config.validate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) return true;
|
||||||
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
|
final PutRollupJobRequest that = (PutRollupJobRequest) o;
|
||||||
|
return Objects.equals(config, that.config);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(config);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,80 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
|
public class PutRollupJobResponse implements ToXContentObject {
|
||||||
|
|
||||||
|
private final boolean acknowledged;
|
||||||
|
|
||||||
|
public PutRollupJobResponse(final boolean acknowledged) {
|
||||||
|
this.acknowledged = acknowledged;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isAcknowledged() {
|
||||||
|
return acknowledged;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final PutRollupJobResponse that = (PutRollupJobResponse) o;
|
||||||
|
return isAcknowledged() == that.isAcknowledged();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(acknowledged);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field("acknowledged", isAcknowledged());
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<PutRollupJobResponse, Void> PARSER
|
||||||
|
= new ConstructingObjectParser<>("put_rollup_job_response", true, args -> new PutRollupJobResponse((boolean) args[0]));
|
||||||
|
static {
|
||||||
|
PARSER.declareBoolean(constructorArg(), new ParseField("acknowledged"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static PutRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,189 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||||
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
|
import static org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The configuration object for the histograms in the rollup config
|
||||||
|
*
|
||||||
|
* {
|
||||||
|
* "groups": [
|
||||||
|
* "date_histogram": {
|
||||||
|
* "field" : "foo",
|
||||||
|
* "interval" : "1d",
|
||||||
|
* "delay": "30d",
|
||||||
|
* "time_zone" : "EST"
|
||||||
|
* }
|
||||||
|
* ]
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
public class DateHistogramGroupConfig implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
static final String NAME = "date_histogram";
|
||||||
|
private static final String INTERVAL = "interval";
|
||||||
|
private static final String FIELD = "field";
|
||||||
|
private static final String TIME_ZONE = "time_zone";
|
||||||
|
private static final String DELAY = "delay";
|
||||||
|
private static final String DEFAULT_TIMEZONE = "UTC";
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<DateHistogramGroupConfig, Void> PARSER;
|
||||||
|
static {
|
||||||
|
PARSER = new ConstructingObjectParser<>(NAME, true, a ->
|
||||||
|
new DateHistogramGroupConfig((String) a[0], (DateHistogramInterval) a[1], (DateHistogramInterval) a[2], (String) a[3]));
|
||||||
|
PARSER.declareString(constructorArg(), new ParseField(FIELD));
|
||||||
|
PARSER.declareField(constructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(INTERVAL), ValueType.STRING);
|
||||||
|
PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(DELAY), ValueType.STRING);
|
||||||
|
PARSER.declareString(optionalConstructorArg(), new ParseField(TIME_ZONE));
|
||||||
|
}
|
||||||
|
|
||||||
|
private final String field;
|
||||||
|
private final DateHistogramInterval interval;
|
||||||
|
private final DateHistogramInterval delay;
|
||||||
|
private final String timeZone;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new {@link DateHistogramGroupConfig} using the given field and interval parameters.
|
||||||
|
*/
|
||||||
|
public DateHistogramGroupConfig(final String field, final DateHistogramInterval interval) {
|
||||||
|
this(field, interval, null, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new {@link DateHistogramGroupConfig} using the given configuration parameters.
|
||||||
|
* <p>
|
||||||
|
* The {@code field} and {@code interval} are required to compute the date histogram for the rolled up documents.
|
||||||
|
* The {@code delay} is optional and can be set to {@code null}. It defines how long to wait before rolling up new documents.
|
||||||
|
* The {@code timeZone} is optional and can be set to {@code null}. When configured, the time zone value is resolved using
|
||||||
|
* ({@link DateTimeZone#forID(String)} and must match a time zone identifier provided by the Joda Time library.
|
||||||
|
* </p>
|
||||||
|
*
|
||||||
|
* @param field the name of the date field to use for the date histogram (required)
|
||||||
|
* @param interval the interval to use for the date histogram (required)
|
||||||
|
* @param delay the time delay (optional)
|
||||||
|
* @param timeZone the id of time zone to use to calculate the date histogram (optional). When {@code null}, the UTC timezone is used.
|
||||||
|
*/
|
||||||
|
public DateHistogramGroupConfig(final String field,
|
||||||
|
final DateHistogramInterval interval,
|
||||||
|
final @Nullable DateHistogramInterval delay,
|
||||||
|
final @Nullable String timeZone) {
|
||||||
|
this.field = field;
|
||||||
|
this.interval = interval;
|
||||||
|
this.delay = delay;
|
||||||
|
this.timeZone = (timeZone != null && timeZone.isEmpty() == false) ? timeZone : DEFAULT_TIMEZONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
final ValidationException validationException = new ValidationException();
|
||||||
|
if (field == null || field.isEmpty()) {
|
||||||
|
validationException.addValidationError("Field name is required");
|
||||||
|
}
|
||||||
|
if (interval == null) {
|
||||||
|
validationException.addValidationError("Interval is required");
|
||||||
|
}
|
||||||
|
if (validationException.validationErrors().isEmpty()) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
return Optional.of(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the date field
|
||||||
|
*/
|
||||||
|
public String getField() {
|
||||||
|
return field;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the date interval
|
||||||
|
*/
|
||||||
|
public DateHistogramInterval getInterval() {
|
||||||
|
return interval;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the time delay for this histogram
|
||||||
|
*/
|
||||||
|
public DateHistogramInterval getDelay() {
|
||||||
|
return delay;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the timezone to apply
|
||||||
|
*/
|
||||||
|
public String getTimeZone() {
|
||||||
|
return timeZone;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field(INTERVAL, interval.toString());
|
||||||
|
builder.field(FIELD, field);
|
||||||
|
if (delay != null) {
|
||||||
|
builder.field(DELAY, delay.toString());
|
||||||
|
}
|
||||||
|
builder.field(TIME_ZONE, timeZone);
|
||||||
|
}
|
||||||
|
return builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(final Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final DateHistogramGroupConfig that = (DateHistogramGroupConfig) other;
|
||||||
|
return Objects.equals(interval, that.interval)
|
||||||
|
&& Objects.equals(field, that.field)
|
||||||
|
&& Objects.equals(delay, that.delay)
|
||||||
|
&& Objects.equals(timeZone, that.timeZone);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(interval, field, delay, timeZone);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static DateHistogramGroupConfig fromXContent(final XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,171 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The configuration object for the groups section in the rollup config.
|
||||||
|
* Basically just a wrapper for histo/date histo/terms objects
|
||||||
|
*
|
||||||
|
* {
|
||||||
|
* "groups": [
|
||||||
|
* "date_histogram": {...},
|
||||||
|
* "histogram" : {...},
|
||||||
|
* "terms" : {...}
|
||||||
|
* ]
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
public class GroupConfig implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
static final String NAME = "groups";
|
||||||
|
private static final ConstructingObjectParser<GroupConfig, Void> PARSER;
|
||||||
|
static {
|
||||||
|
PARSER = new ConstructingObjectParser<>(NAME, true, args ->
|
||||||
|
new GroupConfig((DateHistogramGroupConfig) args[0], (HistogramGroupConfig) args[1], (TermsGroupConfig) args[2]));
|
||||||
|
PARSER.declareObject(constructorArg(),
|
||||||
|
(p, c) -> DateHistogramGroupConfig.fromXContent(p), new ParseField(DateHistogramGroupConfig.NAME));
|
||||||
|
PARSER.declareObject(optionalConstructorArg(),
|
||||||
|
(p, c) -> HistogramGroupConfig.fromXContent(p), new ParseField(HistogramGroupConfig.NAME));
|
||||||
|
PARSER.declareObject(optionalConstructorArg(),
|
||||||
|
(p, c) -> TermsGroupConfig.fromXContent(p), new ParseField(TermsGroupConfig.NAME));
|
||||||
|
}
|
||||||
|
|
||||||
|
private final DateHistogramGroupConfig dateHistogram;
|
||||||
|
private final @Nullable
|
||||||
|
HistogramGroupConfig histogram;
|
||||||
|
private final @Nullable
|
||||||
|
TermsGroupConfig terms;
|
||||||
|
|
||||||
|
public GroupConfig(final DateHistogramGroupConfig dateHistogram) {
|
||||||
|
this(dateHistogram, null, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public GroupConfig(final DateHistogramGroupConfig dateHistogram,
|
||||||
|
final @Nullable HistogramGroupConfig histogram,
|
||||||
|
final @Nullable TermsGroupConfig terms) {
|
||||||
|
this.dateHistogram = dateHistogram;
|
||||||
|
this.histogram = histogram;
|
||||||
|
this.terms = terms;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
final ValidationException validationException = new ValidationException();
|
||||||
|
if (dateHistogram != null) {
|
||||||
|
final Optional<ValidationException> dateHistogramValidationErrors = dateHistogram.validate();
|
||||||
|
if (dateHistogramValidationErrors != null && dateHistogramValidationErrors.isPresent()) {
|
||||||
|
validationException.addValidationErrors(dateHistogramValidationErrors.get());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
validationException.addValidationError("Date histogram must not be null");
|
||||||
|
}
|
||||||
|
if (histogram != null) {
|
||||||
|
final Optional<ValidationException> histogramValidationErrors = histogram.validate();
|
||||||
|
if (histogramValidationErrors != null && histogramValidationErrors.isPresent()) {
|
||||||
|
validationException.addValidationErrors(histogramValidationErrors.get());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (terms != null) {
|
||||||
|
final Optional<ValidationException> termsValidationErrors = terms.validate();
|
||||||
|
if (termsValidationErrors != null && termsValidationErrors.isPresent()) {
|
||||||
|
validationException.addValidationErrors(termsValidationErrors.get());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (validationException.validationErrors().isEmpty()) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
return Optional.of(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the configuration of the date histogram
|
||||||
|
*/
|
||||||
|
public DateHistogramGroupConfig getDateHistogram() {
|
||||||
|
return dateHistogram;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the configuration of the histogram
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
public HistogramGroupConfig getHistogram() {
|
||||||
|
return histogram;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the configuration of the terms
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
public TermsGroupConfig getTerms() {
|
||||||
|
return terms;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field(DateHistogramGroupConfig.NAME, dateHistogram);
|
||||||
|
if (histogram != null) {
|
||||||
|
builder.field(HistogramGroupConfig.NAME, histogram);
|
||||||
|
}
|
||||||
|
if (terms != null) {
|
||||||
|
builder.field(TermsGroupConfig.NAME, terms);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(final Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final GroupConfig that = (GroupConfig) other;
|
||||||
|
return Objects.equals(dateHistogram, that.dateHistogram)
|
||||||
|
&& Objects.equals(histogram, that.histogram)
|
||||||
|
&& Objects.equals(terms, that.terms);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(dateHistogram, histogram, terms);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static GroupConfig fromXContent(final XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,127 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The configuration object for the histograms in the rollup config
|
||||||
|
*
|
||||||
|
* {
|
||||||
|
* "groups": [
|
||||||
|
* "histogram": {
|
||||||
|
* "fields" : [ "foo", "bar" ],
|
||||||
|
* "interval" : 123
|
||||||
|
* }
|
||||||
|
* ]
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
public class HistogramGroupConfig implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
static final String NAME = "histogram";
|
||||||
|
private static final String INTERVAL = "interval";
|
||||||
|
private static final String FIELDS = "fields";
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<HistogramGroupConfig, Void> PARSER;
|
||||||
|
static {
|
||||||
|
PARSER = new ConstructingObjectParser<>(NAME, true, args -> {
|
||||||
|
@SuppressWarnings("unchecked") List<String> fields = (List<String>) args[1];
|
||||||
|
return new HistogramGroupConfig((long) args[0], fields != null ? fields.toArray(new String[fields.size()]) : null);
|
||||||
|
});
|
||||||
|
PARSER.declareLong(constructorArg(), new ParseField(INTERVAL));
|
||||||
|
PARSER.declareStringArray(constructorArg(), new ParseField(FIELDS));
|
||||||
|
}
|
||||||
|
|
||||||
|
private final long interval;
|
||||||
|
private final String[] fields;
|
||||||
|
|
||||||
|
public HistogramGroupConfig(final long interval, final String... fields) {
|
||||||
|
this.interval = interval;
|
||||||
|
this.fields = fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
final ValidationException validationException = new ValidationException();
|
||||||
|
if (fields == null || fields.length == 0) {
|
||||||
|
validationException.addValidationError("Fields must have at least one value");
|
||||||
|
}
|
||||||
|
if (interval <= 0) {
|
||||||
|
validationException.addValidationError("Interval must be a positive long");
|
||||||
|
}
|
||||||
|
if (validationException.validationErrors().isEmpty()) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
return Optional.of(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getInterval() {
|
||||||
|
return interval;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String[] getFields() {
|
||||||
|
return fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field(INTERVAL, interval);
|
||||||
|
builder.field(FIELDS, fields);
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(final Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final HistogramGroupConfig that = (HistogramGroupConfig) other;
|
||||||
|
return Objects.equals(interval, that.interval) && Arrays.equals(fields, that.fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(interval, Arrays.hashCode(fields));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static HistogramGroupConfig fromXContent(final XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,135 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The configuration object for the metrics portion of a rollup job config
|
||||||
|
*
|
||||||
|
* {
|
||||||
|
* "metrics": [
|
||||||
|
* {
|
||||||
|
* "field": "foo",
|
||||||
|
* "metrics": [ "min", "max", "sum"]
|
||||||
|
* },
|
||||||
|
* {
|
||||||
|
* "field": "bar",
|
||||||
|
* "metrics": [ "max" ]
|
||||||
|
* }
|
||||||
|
* ]
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
public class MetricConfig implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
static final String NAME = "metrics";
|
||||||
|
private static final String FIELD = "field";
|
||||||
|
private static final String METRICS = "metrics";
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<MetricConfig, Void> PARSER;
|
||||||
|
static {
|
||||||
|
PARSER = new ConstructingObjectParser<>(NAME, true, args -> {
|
||||||
|
@SuppressWarnings("unchecked") List<String> metrics = (List<String>) args[1];
|
||||||
|
return new MetricConfig((String) args[0], metrics);
|
||||||
|
});
|
||||||
|
PARSER.declareString(constructorArg(), new ParseField(FIELD));
|
||||||
|
PARSER.declareStringArray(constructorArg(), new ParseField(METRICS));
|
||||||
|
}
|
||||||
|
|
||||||
|
private final String field;
|
||||||
|
private final List<String> metrics;
|
||||||
|
|
||||||
|
public MetricConfig(final String field, final List<String> metrics) {
|
||||||
|
this.field = field;
|
||||||
|
this.metrics = metrics;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
final ValidationException validationException = new ValidationException();
|
||||||
|
if (field == null || field.isEmpty()) {
|
||||||
|
validationException.addValidationError("Field name is required");
|
||||||
|
}
|
||||||
|
if (metrics == null || metrics.isEmpty()) {
|
||||||
|
validationException.addValidationError("Metrics must be a non-null, non-empty array of strings");
|
||||||
|
}
|
||||||
|
if (validationException.validationErrors().isEmpty()) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
return Optional.of(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the name of the field used in the metric configuration. Never {@code null}.
|
||||||
|
*/
|
||||||
|
public String getField() {
|
||||||
|
return field;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the names of the metrics used in the metric configuration. Never {@code null}.
|
||||||
|
*/
|
||||||
|
public List<String> getMetrics() {
|
||||||
|
return metrics;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field(FIELD, field);
|
||||||
|
builder.field(METRICS, metrics);
|
||||||
|
}
|
||||||
|
return builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(final Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final MetricConfig that = (MetricConfig) other;
|
||||||
|
return Objects.equals(field, that.field) && Objects.equals(metrics, that.metrics);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(field, metrics);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static MetricConfig fromXContent(final XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,242 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.regex.Regex;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class holds the configuration details of a rollup job, such as the groupings, metrics, what
|
||||||
|
* index to rollup and where to roll them to.
|
||||||
|
*/
|
||||||
|
public class RollupJobConfig implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
private static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueSeconds(20);
|
||||||
|
private static final String ID = "id";
|
||||||
|
private static final String TIMEOUT = "timeout";
|
||||||
|
private static final String CRON = "cron";
|
||||||
|
private static final String PAGE_SIZE = "page_size";
|
||||||
|
private static final String INDEX_PATTERN = "index_pattern";
|
||||||
|
private static final String ROLLUP_INDEX = "rollup_index";
|
||||||
|
|
||||||
|
private final String id;
|
||||||
|
private final String indexPattern;
|
||||||
|
private final String rollupIndex;
|
||||||
|
private final GroupConfig groupConfig;
|
||||||
|
private final List<MetricConfig> metricsConfig;
|
||||||
|
private final TimeValue timeout;
|
||||||
|
private final String cron;
|
||||||
|
private final int pageSize;
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<RollupJobConfig, String> PARSER;
|
||||||
|
static {
|
||||||
|
PARSER = new ConstructingObjectParser<>("rollup_job_config", true, (args, optionalId) -> {
|
||||||
|
String id = args[0] != null ? (String) args[0] : optionalId;
|
||||||
|
String indexPattern = (String) args[1];
|
||||||
|
String rollupIndex = (String) args[2];
|
||||||
|
GroupConfig groupConfig = (GroupConfig) args[3];
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
List<MetricConfig> metricsConfig = (List<MetricConfig>) args[4];
|
||||||
|
TimeValue timeout = (TimeValue) args[5];
|
||||||
|
String cron = (String) args[6];
|
||||||
|
int pageSize = (int) args[7];
|
||||||
|
return new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groupConfig, metricsConfig, timeout);
|
||||||
|
});
|
||||||
|
PARSER.declareString(optionalConstructorArg(), new ParseField(ID));
|
||||||
|
PARSER.declareString(constructorArg(), new ParseField(INDEX_PATTERN));
|
||||||
|
PARSER.declareString(constructorArg(), new ParseField(ROLLUP_INDEX));
|
||||||
|
PARSER.declareObject(optionalConstructorArg(), (p, c) -> GroupConfig.fromXContent(p), new ParseField(GroupConfig.NAME));
|
||||||
|
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> MetricConfig.fromXContent(p), new ParseField(MetricConfig.NAME));
|
||||||
|
PARSER.declareField(optionalConstructorArg(), (p, c) -> TimeValue.parseTimeValue(p.textOrNull(), TIMEOUT),
|
||||||
|
new ParseField(TIMEOUT), ObjectParser.ValueType.STRING_OR_NULL);
|
||||||
|
PARSER.declareString(constructorArg(), new ParseField(CRON));
|
||||||
|
PARSER.declareInt(constructorArg(), new ParseField(PAGE_SIZE));
|
||||||
|
}
|
||||||
|
|
||||||
|
public RollupJobConfig(final String id,
|
||||||
|
final String indexPattern,
|
||||||
|
final String rollupIndex,
|
||||||
|
final String cron,
|
||||||
|
final int pageSize,
|
||||||
|
final GroupConfig groupConfig,
|
||||||
|
final List<MetricConfig> metricsConfig,
|
||||||
|
final @Nullable TimeValue timeout) {
|
||||||
|
this.id = id;
|
||||||
|
this.indexPattern = indexPattern;
|
||||||
|
this.rollupIndex = rollupIndex;
|
||||||
|
this.groupConfig = groupConfig;
|
||||||
|
this.metricsConfig = metricsConfig != null ? metricsConfig : Collections.emptyList();
|
||||||
|
this.timeout = timeout != null ? timeout : DEFAULT_TIMEOUT;
|
||||||
|
this.cron = cron;
|
||||||
|
this.pageSize = pageSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
final ValidationException validationException = new ValidationException();
|
||||||
|
if (id == null || id.isEmpty()) {
|
||||||
|
validationException.addValidationError("Id must be a non-null, non-empty string");
|
||||||
|
}
|
||||||
|
if (indexPattern == null || indexPattern.isEmpty()) {
|
||||||
|
validationException.addValidationError("Index pattern must be a non-null, non-empty string");
|
||||||
|
} else if (Regex.isMatchAllPattern(indexPattern)) {
|
||||||
|
validationException.addValidationError("Index pattern must not match all indices (as it would match it's own rollup index");
|
||||||
|
} else if (indexPattern != null && indexPattern.equals(rollupIndex)) {
|
||||||
|
validationException.addValidationError("Rollup index may not be the same as the index pattern");
|
||||||
|
} else if (Regex.isSimpleMatchPattern(indexPattern) && Regex.simpleMatch(indexPattern, rollupIndex)) {
|
||||||
|
validationException.addValidationError("Index pattern would match rollup index name which is not allowed");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (rollupIndex == null || rollupIndex.isEmpty()) {
|
||||||
|
validationException.addValidationError("Rollup index must be a non-null, non-empty string");
|
||||||
|
}
|
||||||
|
if (cron == null || cron.isEmpty()) {
|
||||||
|
validationException.addValidationError("Cron schedule must be a non-null, non-empty string");
|
||||||
|
}
|
||||||
|
if (pageSize <= 0) {
|
||||||
|
validationException.addValidationError("Page size is mandatory and must be a positive long");
|
||||||
|
}
|
||||||
|
if (groupConfig == null && (metricsConfig == null || metricsConfig.isEmpty())) {
|
||||||
|
validationException.addValidationError("At least one grouping or metric must be configured");
|
||||||
|
}
|
||||||
|
if (groupConfig != null) {
|
||||||
|
final Optional<ValidationException> groupValidationErrors = groupConfig.validate();
|
||||||
|
if (groupValidationErrors != null && groupValidationErrors.isPresent()) {
|
||||||
|
validationException.addValidationErrors(groupValidationErrors.get());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (metricsConfig != null) {
|
||||||
|
for (MetricConfig metricConfig : metricsConfig) {
|
||||||
|
final Optional<ValidationException> metricsValidationErrors = metricConfig.validate();
|
||||||
|
if (metricsValidationErrors != null && metricsValidationErrors.isPresent()) {
|
||||||
|
validationException.addValidationErrors(metricsValidationErrors.get());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (validationException.validationErrors().isEmpty()) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
return Optional.of(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public GroupConfig getGroupConfig() {
|
||||||
|
return groupConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<MetricConfig> getMetricsConfig() {
|
||||||
|
return metricsConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
public TimeValue getTimeout() {
|
||||||
|
return timeout;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getIndexPattern() {
|
||||||
|
return indexPattern;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getRollupIndex() {
|
||||||
|
return rollupIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCron() {
|
||||||
|
return cron;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getPageSize() {
|
||||||
|
return pageSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field(ID, id);
|
||||||
|
builder.field(INDEX_PATTERN, indexPattern);
|
||||||
|
builder.field(ROLLUP_INDEX, rollupIndex);
|
||||||
|
builder.field(CRON, cron);
|
||||||
|
if (groupConfig != null) {
|
||||||
|
builder.field(GroupConfig.NAME, groupConfig);
|
||||||
|
}
|
||||||
|
if (metricsConfig != null) {
|
||||||
|
builder.startArray(MetricConfig.NAME);
|
||||||
|
for (MetricConfig metric : metricsConfig) {
|
||||||
|
metric.toXContent(builder, params);
|
||||||
|
}
|
||||||
|
builder.endArray();
|
||||||
|
}
|
||||||
|
if (timeout != null) {
|
||||||
|
builder.field(TIMEOUT, timeout.getStringRep());
|
||||||
|
}
|
||||||
|
builder.field(PAGE_SIZE, pageSize);
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final RollupJobConfig that = (RollupJobConfig) other;
|
||||||
|
return Objects.equals(this.id, that.id)
|
||||||
|
&& Objects.equals(this.indexPattern, that.indexPattern)
|
||||||
|
&& Objects.equals(this.rollupIndex, that.rollupIndex)
|
||||||
|
&& Objects.equals(this.cron, that.cron)
|
||||||
|
&& Objects.equals(this.groupConfig, that.groupConfig)
|
||||||
|
&& Objects.equals(this.metricsConfig, that.metricsConfig)
|
||||||
|
&& Objects.equals(this.timeout, that.timeout)
|
||||||
|
&& Objects.equals(this.pageSize, that.pageSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(id, indexPattern, rollupIndex, cron, groupConfig, metricsConfig, timeout, pageSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static RollupJobConfig fromXContent(final XContentParser parser, @Nullable final String optionalJobId) throws IOException {
|
||||||
|
return PARSER.parse(parser, optionalJobId);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,115 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The configuration object for the histograms in the rollup config
|
||||||
|
*
|
||||||
|
* {
|
||||||
|
* "groups": [
|
||||||
|
* "terms": {
|
||||||
|
* "fields" : [ "foo", "bar" ]
|
||||||
|
* }
|
||||||
|
* ]
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
public class TermsGroupConfig implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
static final String NAME = "terms";
|
||||||
|
private static final String FIELDS = "fields";
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<TermsGroupConfig, Void> PARSER;
|
||||||
|
static {
|
||||||
|
PARSER = new ConstructingObjectParser<>(NAME, true, args -> {
|
||||||
|
@SuppressWarnings("unchecked") List<String> fields = (List<String>) args[0];
|
||||||
|
return new TermsGroupConfig(fields != null ? fields.toArray(new String[fields.size()]) : null);
|
||||||
|
});
|
||||||
|
PARSER.declareStringArray(constructorArg(), new ParseField(FIELDS));
|
||||||
|
}
|
||||||
|
|
||||||
|
private final String[] fields;
|
||||||
|
|
||||||
|
public TermsGroupConfig(final String... fields) {
|
||||||
|
this.fields = fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
final ValidationException validationException = new ValidationException();
|
||||||
|
if (fields == null || fields.length == 0) {
|
||||||
|
validationException.addValidationError("Fields must have at least one value");
|
||||||
|
}
|
||||||
|
if (validationException.validationErrors().isEmpty()) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
return Optional.of(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the names of the fields. Never {@code null}.
|
||||||
|
*/
|
||||||
|
public String[] getFields() {
|
||||||
|
return fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field(FIELDS, fields);
|
||||||
|
}
|
||||||
|
return builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(final Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final TermsGroupConfig that = (TermsGroupConfig) other;
|
||||||
|
return Arrays.equals(fields, that.fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Arrays.hashCode(fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static TermsGroupConfig fromXContent(final XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -754,6 +754,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
||||||
if (apiName.startsWith("xpack.") == false &&
|
if (apiName.startsWith("xpack.") == false &&
|
||||||
apiName.startsWith("license.") == false &&
|
apiName.startsWith("license.") == false &&
|
||||||
apiName.startsWith("machine_learning.") == false &&
|
apiName.startsWith("machine_learning.") == false &&
|
||||||
|
apiName.startsWith("rollup.") == false &&
|
||||||
apiName.startsWith("watcher.") == false &&
|
apiName.startsWith("watcher.") == false &&
|
||||||
apiName.startsWith("graph.") == false &&
|
apiName.startsWith("graph.") == false &&
|
||||||
apiName.startsWith("migration.") == false &&
|
apiName.startsWith("migration.") == false &&
|
||||||
|
|
|
@ -0,0 +1,162 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
||||||
|
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||||
|
import org.elasticsearch.action.bulk.BulkRequest;
|
||||||
|
import org.elasticsearch.action.bulk.BulkResponse;
|
||||||
|
import org.elasticsearch.action.index.IndexRequest;
|
||||||
|
import org.elasticsearch.action.search.SearchRequest;
|
||||||
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
|
import org.elasticsearch.action.support.WriteRequest;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.GroupConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.MetricConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.rest.RestStatus;
|
||||||
|
import org.elasticsearch.search.SearchHit;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
|
|
||||||
|
public class RollupIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
|
private static final List<String> SUPPORTED_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, MinAggregationBuilder.NAME,
|
||||||
|
SumAggregationBuilder.NAME, AvgAggregationBuilder.NAME, ValueCountAggregationBuilder.NAME);
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public void testPutRollupJob() throws Exception {
|
||||||
|
double sum = 0.0d;
|
||||||
|
int max = Integer.MIN_VALUE;
|
||||||
|
int min = Integer.MAX_VALUE;
|
||||||
|
|
||||||
|
final BulkRequest bulkRequest = new BulkRequest();
|
||||||
|
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||||
|
for (int minute = 0; minute < 60; minute++) {
|
||||||
|
for (int second = 0; second < 60; second = second + 10) {
|
||||||
|
final int value = randomIntBetween(0, 100);
|
||||||
|
|
||||||
|
final IndexRequest indexRequest = new IndexRequest("docs", "doc");
|
||||||
|
indexRequest.source(jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("value", value)
|
||||||
|
.field("date", String.format(Locale.ROOT, "2018-01-01T00:%02d:%02dZ", minute, second))
|
||||||
|
.endObject());
|
||||||
|
bulkRequest.add(indexRequest);
|
||||||
|
|
||||||
|
sum += value;
|
||||||
|
if (value > max) {
|
||||||
|
max = value;
|
||||||
|
}
|
||||||
|
if (value < min) {
|
||||||
|
min = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
final int numDocs = bulkRequest.numberOfActions();
|
||||||
|
|
||||||
|
BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
|
||||||
|
assertEquals(RestStatus.OK, bulkResponse.status());
|
||||||
|
if (bulkResponse.hasFailures()) {
|
||||||
|
for (BulkItemResponse itemResponse : bulkResponse.getItems()) {
|
||||||
|
if (itemResponse.isFailed()) {
|
||||||
|
logger.fatal(itemResponse.getFailureMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertFalse(bulkResponse.hasFailures());
|
||||||
|
|
||||||
|
RefreshResponse refreshResponse = highLevelClient().indices().refresh(new RefreshRequest("docs"), RequestOptions.DEFAULT);
|
||||||
|
assertEquals(0, refreshResponse.getFailedShards());
|
||||||
|
|
||||||
|
final String id = randomAlphaOfLength(10);
|
||||||
|
final String indexPattern = randomFrom("docs", "d*", "doc*");
|
||||||
|
final String rollupIndex = randomFrom("rollup", "test");
|
||||||
|
final String cron = "*/1 * * * * ?";
|
||||||
|
final int pageSize = randomIntBetween(numDocs, numDocs * 10);
|
||||||
|
// TODO expand this to also test with histogram and terms?
|
||||||
|
final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig("date", DateHistogramInterval.DAY));
|
||||||
|
final List<MetricConfig> metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS));
|
||||||
|
final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(30, 600));
|
||||||
|
|
||||||
|
PutRollupJobRequest putRollupJobRequest =
|
||||||
|
new PutRollupJobRequest(new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout));
|
||||||
|
|
||||||
|
final RollupClient rollupClient = highLevelClient().rollup();
|
||||||
|
PutRollupJobResponse response = execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
|
||||||
|
assertTrue(response.isAcknowledged());
|
||||||
|
|
||||||
|
// TODO Replace this with the Rollup Start Job API
|
||||||
|
Response startResponse = client().performRequest(new Request("POST", "/_xpack/rollup/job/" + id + "/_start"));
|
||||||
|
assertEquals(RestStatus.OK.getStatus(), startResponse.getHttpResponse().getStatusLine().getStatusCode());
|
||||||
|
|
||||||
|
int finalMin = min;
|
||||||
|
int finalMax = max;
|
||||||
|
double finalSum = sum;
|
||||||
|
assertBusy(() -> {
|
||||||
|
SearchResponse searchResponse = highLevelClient().search(new SearchRequest(rollupIndex), RequestOptions.DEFAULT);
|
||||||
|
assertEquals(0, searchResponse.getFailedShards());
|
||||||
|
assertEquals(1L, searchResponse.getHits().getTotalHits());
|
||||||
|
|
||||||
|
SearchHit searchHit = searchResponse.getHits().getAt(0);
|
||||||
|
Map<String, Object> source = searchHit.getSourceAsMap();
|
||||||
|
assertNotNull(source);
|
||||||
|
|
||||||
|
assertEquals(numDocs, source.get("date.date_histogram._count"));
|
||||||
|
assertEquals(groups.getDateHistogram().getInterval().toString(), source.get("date.date_histogram.interval"));
|
||||||
|
assertEquals(groups.getDateHistogram().getTimeZone(), source.get("date.date_histogram.time_zone"));
|
||||||
|
|
||||||
|
for (MetricConfig metric : metrics) {
|
||||||
|
for (String name : metric.getMetrics()) {
|
||||||
|
Number value = (Number) source.get(metric.getField() + "." + name + ".value");
|
||||||
|
if ("min".equals(name)) {
|
||||||
|
assertEquals(finalMin, value.intValue());
|
||||||
|
} else if ("max".equals(name)) {
|
||||||
|
assertEquals(finalMax, value.intValue());
|
||||||
|
} else if ("sum".equals(name)) {
|
||||||
|
assertEquals(finalSum, value.doubleValue(), 0.0d);
|
||||||
|
} else if ("avg".equals(name)) {
|
||||||
|
assertEquals(finalSum, value.doubleValue(), 0.0d);
|
||||||
|
Number avgCount = (Number) source.get(metric.getField() + "." + name + "._count");
|
||||||
|
assertEquals(numDocs, avgCount.intValue());
|
||||||
|
} else if ("value_count".equals(name)) {
|
||||||
|
assertEquals(numDocs, value.intValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,163 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.documentation;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.ActionListener;
|
||||||
|
import org.elasticsearch.action.LatchedActionListener;
|
||||||
|
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
||||||
|
import org.elasticsearch.action.bulk.BulkRequest;
|
||||||
|
import org.elasticsearch.action.bulk.BulkResponse;
|
||||||
|
import org.elasticsearch.action.index.IndexRequest;
|
||||||
|
import org.elasticsearch.action.support.WriteRequest;
|
||||||
|
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||||
|
import org.elasticsearch.client.RequestOptions;
|
||||||
|
import org.elasticsearch.client.RestHighLevelClient;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.GroupConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.HistogramGroupConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.MetricConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.TermsGroupConfig;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.rest.RestStatus;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
|
|
||||||
|
public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUpDocs() throws IOException {
|
||||||
|
final BulkRequest bulkRequest = new BulkRequest();
|
||||||
|
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||||
|
for (int i = 0; i < 50; i++) {
|
||||||
|
final IndexRequest indexRequest = new IndexRequest("docs", "doc");
|
||||||
|
indexRequest.source(jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("timestamp", String.format(Locale.ROOT, "2018-01-01T00:%02d:00Z", i))
|
||||||
|
.field("hostname", 0)
|
||||||
|
.field("datacenter", 0)
|
||||||
|
.field("temperature", 0)
|
||||||
|
.field("voltage", 0)
|
||||||
|
.field("load", 0)
|
||||||
|
.field("net_in", 0)
|
||||||
|
.field("net_out", 0)
|
||||||
|
.endObject());
|
||||||
|
bulkRequest.add(indexRequest);
|
||||||
|
}
|
||||||
|
BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
|
||||||
|
assertEquals(RestStatus.OK, bulkResponse.status());
|
||||||
|
assertFalse(bulkResponse.hasFailures());
|
||||||
|
|
||||||
|
RefreshResponse refreshResponse = highLevelClient().indices().refresh(new RefreshRequest("docs"), RequestOptions.DEFAULT);
|
||||||
|
assertEquals(0, refreshResponse.getFailedShards());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCreateRollupJob() throws Exception {
|
||||||
|
RestHighLevelClient client = highLevelClient();
|
||||||
|
|
||||||
|
final String indexPattern = "docs";
|
||||||
|
final String rollupIndex = "rollup";
|
||||||
|
final String cron = "*/1 * * * * ?";
|
||||||
|
final int pageSize = 100;
|
||||||
|
final TimeValue timeout = null;
|
||||||
|
|
||||||
|
//tag::x-pack-rollup-put-rollup-job-group-config
|
||||||
|
DateHistogramGroupConfig dateHistogram =
|
||||||
|
new DateHistogramGroupConfig("timestamp", DateHistogramInterval.HOUR, new DateHistogramInterval("7d"), "UTC"); // <1>
|
||||||
|
TermsGroupConfig terms = new TermsGroupConfig("hostname", "datacenter"); // <2>
|
||||||
|
HistogramGroupConfig histogram = new HistogramGroupConfig(5L, "load", "net_in", "net_out"); // <3>
|
||||||
|
|
||||||
|
GroupConfig groups = new GroupConfig(dateHistogram, histogram, terms); // <4>
|
||||||
|
//end::x-pack-rollup-put-rollup-job-group-config
|
||||||
|
|
||||||
|
//tag::x-pack-rollup-put-rollup-job-metrics-config
|
||||||
|
List<MetricConfig> metrics = new ArrayList<>(); // <1>
|
||||||
|
metrics.add(new MetricConfig("temperature", Arrays.asList("min", "max", "sum"))); // <2>
|
||||||
|
metrics.add(new MetricConfig("voltage", Arrays.asList("avg", "value_count"))); // <3>
|
||||||
|
//end::x-pack-rollup-put-rollup-job-metrics-config
|
||||||
|
{
|
||||||
|
String id = "job_1";
|
||||||
|
|
||||||
|
//tag::x-pack-rollup-put-rollup-job-config
|
||||||
|
RollupJobConfig config = new RollupJobConfig(id, // <1>
|
||||||
|
indexPattern, // <2>
|
||||||
|
rollupIndex, // <3>
|
||||||
|
cron, // <4>
|
||||||
|
pageSize, // <5>
|
||||||
|
groups, // <6>
|
||||||
|
metrics, // <7>
|
||||||
|
timeout); // <8>
|
||||||
|
//end::x-pack-rollup-put-rollup-job-config
|
||||||
|
|
||||||
|
//tag::x-pack-rollup-put-rollup-job-request
|
||||||
|
PutRollupJobRequest request = new PutRollupJobRequest(config); // <1>
|
||||||
|
//end::x-pack-rollup-put-rollup-job-request
|
||||||
|
|
||||||
|
//tag::x-pack-rollup-put-rollup-job-execute
|
||||||
|
PutRollupJobResponse response = client.rollup().putRollupJob(request, RequestOptions.DEFAULT);
|
||||||
|
//end::x-pack-rollup-put-rollup-job-execute
|
||||||
|
|
||||||
|
//tag::x-pack-rollup-put-rollup-job-response
|
||||||
|
boolean acknowledged = response.isAcknowledged(); // <1>
|
||||||
|
//end::x-pack-rollup-put-rollup-job-response
|
||||||
|
assertTrue(acknowledged);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
String id = "job_2";
|
||||||
|
RollupJobConfig config = new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout);
|
||||||
|
PutRollupJobRequest request = new PutRollupJobRequest(config);
|
||||||
|
// tag::x-pack-rollup-put-rollup-job-execute-listener
|
||||||
|
ActionListener<PutRollupJobResponse> listener = new ActionListener<PutRollupJobResponse>() {
|
||||||
|
@Override
|
||||||
|
public void onResponse(PutRollupJobResponse response) {
|
||||||
|
// <1>
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFailure(Exception e) {
|
||||||
|
// <2>
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// end::x-pack-rollup-put-rollup-job-execute-listener
|
||||||
|
|
||||||
|
// Replace the empty listener by a blocking listener in test
|
||||||
|
final CountDownLatch latch = new CountDownLatch(1);
|
||||||
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
|
// tag::x-pack-rollup-put-rollup-job-execute-async
|
||||||
|
client.rollup().putRollupJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||||
|
// end::x-pack-rollup-put-rollup-job-execute-async
|
||||||
|
|
||||||
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,59 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfigTests;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
|
||||||
|
public class PutRollupJobRequestTests extends AbstractXContentTestCase<PutRollupJobRequest> {
|
||||||
|
|
||||||
|
private String jobId;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUpOptionalId() {
|
||||||
|
jobId = randomAlphaOfLengthBetween(1, 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected PutRollupJobRequest createTestInstance() {
|
||||||
|
return new PutRollupJobRequest(RollupJobConfigTests.randomRollupJobConfig(jobId));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected PutRollupJobRequest doParseInstance(final XContentParser parser) throws IOException {
|
||||||
|
final String optionalId = randomBoolean() ? jobId : null;
|
||||||
|
return new PutRollupJobRequest(RollupJobConfig.fromXContent(parser, optionalId));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRequireConfiguration() {
|
||||||
|
final NullPointerException e = expectThrows(NullPointerException.class, ()-> new PutRollupJobRequest(null));
|
||||||
|
assertEquals("rollup job configuration is required", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,50 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class PutRollupJobResponseTests extends AbstractXContentTestCase<PutRollupJobResponse> {
|
||||||
|
|
||||||
|
private boolean acknowledged;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setupJobID() {
|
||||||
|
acknowledged = randomBoolean();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected PutRollupJobResponse createTestInstance() {
|
||||||
|
return new PutRollupJobResponse(acknowledged);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected PutRollupJobResponse doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return PutRollupJobResponse.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,98 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
|
public class DateHistogramGroupConfigTests extends AbstractXContentTestCase<DateHistogramGroupConfig> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected DateHistogramGroupConfig createTestInstance() {
|
||||||
|
return randomDateHistogramGroupConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected DateHistogramGroupConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||||
|
return DateHistogramGroupConfig.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullField() {
|
||||||
|
final DateHistogramGroupConfig config = new DateHistogramGroupConfig(null, DateHistogramInterval.DAY, null, null);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Field name is required")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateEmptyField() {
|
||||||
|
final DateHistogramGroupConfig config = new DateHistogramGroupConfig("", DateHistogramInterval.DAY, null, null);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Field name is required")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullInterval() {
|
||||||
|
final DateHistogramGroupConfig config = new DateHistogramGroupConfig("field", null, null, null);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Interval is required")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidate() {
|
||||||
|
final DateHistogramGroupConfig config = randomDateHistogramGroupConfig();
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(false));
|
||||||
|
}
|
||||||
|
|
||||||
|
static DateHistogramGroupConfig randomDateHistogramGroupConfig() {
|
||||||
|
final String field = randomAlphaOfLength(randomIntBetween(3, 10));
|
||||||
|
final DateHistogramInterval interval = new DateHistogramInterval(randomPositiveTimeValue());
|
||||||
|
final DateHistogramInterval delay = randomBoolean() ? new DateHistogramInterval(randomPositiveTimeValue()) : null;
|
||||||
|
final String timezone = randomBoolean() ? randomDateTimeZone().toString() : null;
|
||||||
|
return new DateHistogramGroupConfig(field, interval, delay, timezone);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,116 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
|
public class GroupConfigTests extends AbstractXContentTestCase<GroupConfig> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GroupConfig createTestInstance() {
|
||||||
|
return randomGroupConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GroupConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||||
|
return GroupConfig.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullDateHistogramGroupConfig() {
|
||||||
|
final GroupConfig config = new GroupConfig(null);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Date histogram must not be null")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateDateHistogramGroupConfigWithErrors() {
|
||||||
|
final DateHistogramGroupConfig dateHistogramGroupConfig = new DateHistogramGroupConfig(null, null, null, null);
|
||||||
|
|
||||||
|
final GroupConfig config = new GroupConfig(dateHistogramGroupConfig);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(2));
|
||||||
|
assertThat(validationException.validationErrors(),
|
||||||
|
containsInAnyOrder("Field name is required", "Interval is required"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateHistogramGroupConfigWithErrors() {
|
||||||
|
final HistogramGroupConfig histogramGroupConfig = new HistogramGroupConfig(0L);
|
||||||
|
|
||||||
|
final GroupConfig config = new GroupConfig(randomGroupConfig().getDateHistogram(), histogramGroupConfig, null);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(2));
|
||||||
|
assertThat(validationException.validationErrors(),
|
||||||
|
containsInAnyOrder("Fields must have at least one value", "Interval must be a positive long"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateTermsGroupConfigWithErrors() {
|
||||||
|
final TermsGroupConfig termsGroupConfig = new TermsGroupConfig();
|
||||||
|
|
||||||
|
final GroupConfig config = new GroupConfig(randomGroupConfig().getDateHistogram(), null, termsGroupConfig);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Fields must have at least one value"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidate() {
|
||||||
|
final GroupConfig config = randomGroupConfig();
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(false));
|
||||||
|
}
|
||||||
|
|
||||||
|
static GroupConfig randomGroupConfig() {
|
||||||
|
DateHistogramGroupConfig dateHistogram = DateHistogramGroupConfigTests.randomDateHistogramGroupConfig();
|
||||||
|
HistogramGroupConfig histogram = randomBoolean() ? HistogramGroupConfigTests.randomHistogramGroupConfig() : null;
|
||||||
|
TermsGroupConfig terms = randomBoolean() ? TermsGroupConfigTests.randomTermsGroupConfig() : null;
|
||||||
|
return new GroupConfig(dateHistogram, histogram, terms);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,109 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
|
public class HistogramGroupConfigTests extends AbstractXContentTestCase<HistogramGroupConfig> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected HistogramGroupConfig createTestInstance() {
|
||||||
|
return randomHistogramGroupConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected HistogramGroupConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||||
|
return HistogramGroupConfig.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullFields() {
|
||||||
|
final HistogramGroupConfig config = new HistogramGroupConfig(60L);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Fields must have at least one value")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidatEmptyFields() {
|
||||||
|
final HistogramGroupConfig config = new HistogramGroupConfig(60L, Strings.EMPTY_ARRAY);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Fields must have at least one value")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNegativeInterval() {
|
||||||
|
final HistogramGroupConfig config = new HistogramGroupConfig(-1L, randomHistogramGroupConfig().getFields());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Interval must be a positive long")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateZeroInterval() {
|
||||||
|
final HistogramGroupConfig config = new HistogramGroupConfig(0L, randomHistogramGroupConfig().getFields());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Interval must be a positive long")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidate() {
|
||||||
|
final HistogramGroupConfig config = randomHistogramGroupConfig();
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(false));
|
||||||
|
}
|
||||||
|
static HistogramGroupConfig randomHistogramGroupConfig() {
|
||||||
|
final long interval = randomNonNegativeLong();
|
||||||
|
final String[] fields = new String[randomIntBetween(1, 10)];
|
||||||
|
for (int i = 0; i < fields.length; i++) {
|
||||||
|
fields[i] = randomAlphaOfLength(randomIntBetween(3, 10));
|
||||||
|
}
|
||||||
|
return new HistogramGroupConfig(interval, fields);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,127 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
|
public class MetricConfigTests extends AbstractXContentTestCase<MetricConfig> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected MetricConfig createTestInstance() {
|
||||||
|
return randomMetricConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected MetricConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||||
|
return MetricConfig.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullField() {
|
||||||
|
final MetricConfig config = new MetricConfig(null, randomMetricConfig().getMetrics());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Field name is required")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateEmptyField() {
|
||||||
|
final MetricConfig config = new MetricConfig("", randomMetricConfig().getMetrics());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Field name is required")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullListOfMetrics() {
|
||||||
|
final MetricConfig config = new MetricConfig("field", null);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Metrics must be a non-null, non-empty array of strings")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateEmptyListOfMetrics() {
|
||||||
|
final MetricConfig config = new MetricConfig("field", Collections.emptyList());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Metrics must be a non-null, non-empty array of strings")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidate() {
|
||||||
|
final MetricConfig config = randomMetricConfig();
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(false));
|
||||||
|
}
|
||||||
|
|
||||||
|
static MetricConfig randomMetricConfig() {
|
||||||
|
final List<String> metrics = new ArrayList<>();
|
||||||
|
if (randomBoolean()) {
|
||||||
|
metrics.add("min");
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
metrics.add("max");
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
metrics.add("sum");
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
metrics.add("avg");
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
metrics.add("value_count");
|
||||||
|
}
|
||||||
|
if (metrics.size() == 0) {
|
||||||
|
metrics.add("min");
|
||||||
|
}
|
||||||
|
// large name so we don't accidentally collide
|
||||||
|
return new MetricConfig(randomAlphaOfLengthBetween(15, 25), Collections.unmodifiableList(metrics));
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,308 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
import static java.util.Collections.singletonList;
|
||||||
|
import static java.util.Collections.unmodifiableList;
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
|
public class RollupJobConfigTests extends AbstractXContentTestCase<RollupJobConfig> {
|
||||||
|
|
||||||
|
private String id;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUpOptionalId() {
|
||||||
|
id = randomAlphaOfLengthBetween(1, 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected RollupJobConfig createTestInstance() {
|
||||||
|
return randomRollupJobConfig(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected RollupJobConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||||
|
return RollupJobConfig.fromXContent(parser, randomBoolean() ? id : null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullId() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(null, sample.getIndexPattern(), sample.getRollupIndex(), sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Id must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateEmptyId() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig("", sample.getIndexPattern(), sample.getRollupIndex(), sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Id must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullIndexPattern() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), null, sample.getRollupIndex(), sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Index pattern must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateEmptyIndexPattern() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), "", sample.getRollupIndex(), sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Index pattern must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateMatchAllIndexPattern() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), "*", sample.getRollupIndex(), sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(),
|
||||||
|
contains("Index pattern must not match all indices (as it would match it's own rollup index"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateIndexPatternMatchesRollupIndex() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), "rollup*", "rollup", sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Index pattern would match rollup index name which is not allowed"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateSameIndexAndRollupPatterns() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), "test", "test", sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Rollup index may not be the same as the index pattern"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullRollupPattern() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), null, sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Rollup index must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateEmptyRollupPattern() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), "", sample.getCron(),
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Rollup index must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullCron() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(), null,
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Cron schedule must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateEmptyCron() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(), "",
|
||||||
|
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Cron schedule must be a non-null, non-empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidatePageSize() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(),
|
||||||
|
sample.getCron(), 0, sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Page size is mandatory and must be a positive long"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateGroupOrMetrics() {
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(),
|
||||||
|
sample.getCron(), sample.getPageSize(), null, null, sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("At least one grouping or metric must be configured"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateGroupConfigWithErrors() {
|
||||||
|
final GroupConfig groupConfig = new GroupConfig(null);
|
||||||
|
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(),
|
||||||
|
sample.getCron(), sample.getPageSize(), groupConfig, sample.getMetricsConfig(), sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains("Date histogram must not be null"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateListOfMetricsWithErrors() {
|
||||||
|
final List<MetricConfig> metricsConfigs = singletonList(new MetricConfig(null, null));
|
||||||
|
|
||||||
|
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||||
|
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(),
|
||||||
|
sample.getCron(), sample.getPageSize(), sample.getGroupConfig(), metricsConfigs, sample.getTimeout());
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(2));
|
||||||
|
assertThat(validationException.validationErrors(),
|
||||||
|
containsInAnyOrder("Field name is required", "Metrics must be a non-null, non-empty array of strings"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static RollupJobConfig randomRollupJobConfig(final String id) {
|
||||||
|
final String indexPattern = randomAlphaOfLengthBetween(5, 20);
|
||||||
|
final String rollupIndex = "rollup_" + indexPattern;
|
||||||
|
final String cron = randomCron();
|
||||||
|
final int pageSize = randomIntBetween(1, 100);
|
||||||
|
final TimeValue timeout = randomBoolean() ? null :
|
||||||
|
new TimeValue(randomIntBetween(0, 60), randomFrom(Arrays.asList(TimeUnit.MILLISECONDS, TimeUnit.SECONDS, TimeUnit.MINUTES)));
|
||||||
|
final GroupConfig groups = GroupConfigTests.randomGroupConfig();
|
||||||
|
|
||||||
|
final List<MetricConfig> metrics = new ArrayList<>();
|
||||||
|
if (randomBoolean()) {
|
||||||
|
final int numMetrics = randomIntBetween(1, 10);
|
||||||
|
for (int i = 0; i < numMetrics; i++) {
|
||||||
|
metrics.add(MetricConfigTests.randomMetricConfig());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, unmodifiableList(metrics), timeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String randomCron() {
|
||||||
|
return (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(0, 59))) + //second
|
||||||
|
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(0, 59))) + //minute
|
||||||
|
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(0, 23))) + //hour
|
||||||
|
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(1, 31))) + //day of month
|
||||||
|
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(1, 12))) + //month
|
||||||
|
" ?" + //day of week
|
||||||
|
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(1970, 2199))); //year
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,87 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
|
public class TermsGroupConfigTests extends AbstractXContentTestCase<TermsGroupConfig> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TermsGroupConfig createTestInstance() {
|
||||||
|
return randomTermsGroupConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TermsGroupConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||||
|
return TermsGroupConfig.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateNullFields() {
|
||||||
|
final TermsGroupConfig config = new TermsGroupConfig();
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Fields must have at least one value")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidatEmptyFields() {
|
||||||
|
final TermsGroupConfig config = new TermsGroupConfig(Strings.EMPTY_ARRAY);
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(true));
|
||||||
|
ValidationException validationException = validation.get();
|
||||||
|
assertThat(validationException.validationErrors().size(), is(1));
|
||||||
|
assertThat(validationException.validationErrors(), contains(is("Fields must have at least one value")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidate() {
|
||||||
|
final TermsGroupConfig config = randomTermsGroupConfig();
|
||||||
|
|
||||||
|
Optional<ValidationException> validation = config.validate();
|
||||||
|
assertThat(validation, notNullValue());
|
||||||
|
assertThat(validation.isPresent(), is(false));
|
||||||
|
}
|
||||||
|
|
||||||
|
static TermsGroupConfig randomTermsGroupConfig() {
|
||||||
|
final String[] fields = new String[randomIntBetween(1, 10)];
|
||||||
|
for (int i = 0; i < fields.length; i++) {
|
||||||
|
fields[i] = randomAlphaOfLength(randomIntBetween(3, 10));
|
||||||
|
}
|
||||||
|
return new TermsGroupConfig(fields);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,172 @@
|
||||||
|
[[java-rest-high-x-pack-rollup-put-job]]
|
||||||
|
=== Put Rollup Job API
|
||||||
|
|
||||||
|
The Put Rollup Job API can be used to create a new Rollup job
|
||||||
|
in the cluster. The API accepts a `PutRollupJobRequest` object
|
||||||
|
as a request and returns a `PutRollupJobResponse`.
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-put-rollup-job-request]]
|
||||||
|
==== Put Rollup Job Request
|
||||||
|
|
||||||
|
A `PutRollupJobRequest` requires the following argument:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-request]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The configuration of the Rollup job to create as a `RollupJobConfig`
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-put-rollup-job-config]]
|
||||||
|
==== Rollup Job Configuration
|
||||||
|
|
||||||
|
The `RollupJobConfig` object contains all the details about the rollup job
|
||||||
|
configuration. See <<Rollup configuration, rollup-job-config>> to learn more
|
||||||
|
about the various configuration settings.
|
||||||
|
|
||||||
|
A `RollupJobConfig` requires the following arguments:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-config]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The name of the Rollup job
|
||||||
|
<2> The index (or index pattern) to rollup
|
||||||
|
<3> The index to store rollup results into
|
||||||
|
<4> A cron expression which defines when the Rollup job should be executed
|
||||||
|
<5> The page size to use for the Rollup job
|
||||||
|
<6> The grouping configuration of the Rollup job as a `GroupConfig`
|
||||||
|
<7> The metrics configuration of the Rollup job as a list of `MetricConfig`
|
||||||
|
<8> The timeout value to use for the Rollup job as a `TimeValue`
|
||||||
|
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-put-rollup-job-group-config]]
|
||||||
|
==== Grouping Configuration
|
||||||
|
|
||||||
|
The grouping configuration of the Rollup job is defined in the `RollupJobConfig`
|
||||||
|
using a `GroupConfig` instance. `GroupConfig` reflects all the configuration
|
||||||
|
settings that can be defined using the REST API. See <<Grouping Config, rollup-groups-config>>
|
||||||
|
to learn more about these settings.
|
||||||
|
|
||||||
|
Using the REST API, we could define this grouping configuration:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------------------------------
|
||||||
|
"groups" : {
|
||||||
|
"date_histogram": {
|
||||||
|
"field": "timestamp",
|
||||||
|
"interval": "1h",
|
||||||
|
"delay": "7d",
|
||||||
|
"time_zone": "UTC"
|
||||||
|
},
|
||||||
|
"terms": {
|
||||||
|
"fields": ["hostname", "datacenter"]
|
||||||
|
},
|
||||||
|
"histogram": {
|
||||||
|
"fields": ["load", "net_in", "net_out"],
|
||||||
|
"interval": 5
|
||||||
|
}
|
||||||
|
}
|
||||||
|
--------------------------------------------------
|
||||||
|
// NOTCONSOLE
|
||||||
|
|
||||||
|
Using the `GroupConfig` object and the high level REST client, the same
|
||||||
|
configuration would be:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-group-config]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The date histogram aggregation to use to rollup up documents, as a `DateHistogramGroupConfig`
|
||||||
|
<2> The terms aggregation to use to rollup up documents, as a `TermsGroupConfig`
|
||||||
|
<3> The histogram aggregation to use to rollup up documents, as a `HistogramGroupConfig`
|
||||||
|
<4> The grouping configuration as a `GroupConfig`
|
||||||
|
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-put-rollup-job-metrics-config]]
|
||||||
|
==== Metrics Configuration
|
||||||
|
|
||||||
|
After defining which groups should be generated for the data, you next configure
|
||||||
|
which metrics should be collected. The list of metrics is defined in the `RollupJobConfig`
|
||||||
|
using a `List<MetricConfig>` instance. `MetricConfig` reflects all the configuration
|
||||||
|
settings that can be defined using the REST API. See <<Metrics Config, rollup-metrics-config>>
|
||||||
|
to learn more about these settings.
|
||||||
|
|
||||||
|
Using the REST API, we could define this metrics configuration:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------------------------------
|
||||||
|
"metrics": [
|
||||||
|
{
|
||||||
|
"field": "temperature",
|
||||||
|
"metrics": ["min", "max", "sum"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "voltage",
|
||||||
|
"metrics": ["avg", "value_count"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
--------------------------------------------------
|
||||||
|
// NOTCONSOLE
|
||||||
|
|
||||||
|
Using the `MetricConfig` object and the high level REST client, the same
|
||||||
|
configuration would be:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-metrics-config]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The list of `MetricConfig` to configure in the `RollupJobConfig`
|
||||||
|
<2> Adds the metrics to compute on the `temperature` field
|
||||||
|
<3> Adds the metrics to compute on the `voltage` field
|
||||||
|
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-put-rollup-job-execution]]
|
||||||
|
==== Execution
|
||||||
|
|
||||||
|
The Put Rollup Job API can be executed through a `RollupClient`
|
||||||
|
instance. Such instance can be retrieved from a `RestHighLevelClient`
|
||||||
|
using the `rollup()` method:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-execute]
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-put-rollup-job-response]]
|
||||||
|
==== Response
|
||||||
|
|
||||||
|
The returned `PutRollupJobResponse` indicates if the new Rollup job
|
||||||
|
has been successfully created:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-response]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> `acknowledged` is a boolean indicating whether the job was successfully created
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-put-rollup-job-async]]
|
||||||
|
==== Asynchronous Execution
|
||||||
|
|
||||||
|
This request can be executed asynchronously:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-execute-async]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The `PutRollupJobRequest` to execute and the `ActionListener` to use when
|
||||||
|
the execution completes
|
||||||
|
|
||||||
|
The asynchronous method does not block and returns immediately. Once it is
|
||||||
|
completed the `ActionListener` is called back using the `onResponse` method
|
||||||
|
if the execution successfully completed or using the `onFailure` method if
|
||||||
|
it failed.
|
||||||
|
|
||||||
|
A typical listener for `PutRollupJobResponse` looks like:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-job-execute-listener]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Called when the execution is successfully completed. The response is
|
||||||
|
provided as an argument
|
||||||
|
<2> Called in case of failure. The raised exception is provided as an argument
|
|
@ -262,6 +262,14 @@ The Java High Level REST Client supports the following Migration APIs:
|
||||||
|
|
||||||
include::migration/get-assistance.asciidoc[]
|
include::migration/get-assistance.asciidoc[]
|
||||||
|
|
||||||
|
== Rollup APIs
|
||||||
|
|
||||||
|
The Java High Level REST Client supports the following Rollup APIs:
|
||||||
|
|
||||||
|
* <<java-rest-high-x-pack-rollup-put-job>>
|
||||||
|
|
||||||
|
include::rollup/put_job.asciidoc[]
|
||||||
|
|
||||||
== Security APIs
|
== Security APIs
|
||||||
|
|
||||||
The Java High Level REST Client supports the following Security APIs:
|
The Java High Level REST Client supports the following Security APIs:
|
||||||
|
|
|
@ -7,9 +7,7 @@ package org.elasticsearch.xpack.rollup.rest;
|
||||||
|
|
||||||
|
|
||||||
import org.elasticsearch.client.node.NodeClient;
|
import org.elasticsearch.client.node.NodeClient;
|
||||||
import org.elasticsearch.common.ParseField;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
|
||||||
import org.elasticsearch.rest.BaseRestHandler;
|
import org.elasticsearch.rest.BaseRestHandler;
|
||||||
import org.elasticsearch.rest.RestController;
|
import org.elasticsearch.rest.RestController;
|
||||||
import org.elasticsearch.rest.RestRequest;
|
import org.elasticsearch.rest.RestRequest;
|
||||||
|
@ -20,7 +18,6 @@ import org.elasticsearch.xpack.rollup.Rollup;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
public class RestPutRollupJobAction extends BaseRestHandler {
|
public class RestPutRollupJobAction extends BaseRestHandler {
|
||||||
public static final ParseField ID = new ParseField("id");
|
|
||||||
|
|
||||||
public RestPutRollupJobAction(Settings settings, RestController controller) {
|
public RestPutRollupJobAction(Settings settings, RestController controller) {
|
||||||
super(settings);
|
super(settings);
|
||||||
|
@ -28,13 +25,10 @@ public class RestPutRollupJobAction extends BaseRestHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||||
String id = restRequest.param(ID.getPreferredName());
|
final String id = request.param("id");
|
||||||
XContentParser parser = restRequest.contentParser();
|
final PutRollupJobAction.Request putRollupJobRequest = PutRollupJobAction.Request.fromXContent(request.contentParser(), id);
|
||||||
|
return channel -> client.execute(PutRollupJobAction.INSTANCE, putRollupJobRequest, new RestToXContentListener<>(channel));
|
||||||
PutRollupJobAction.Request request = PutRollupJobAction.Request.fromXContent(parser, id);
|
|
||||||
|
|
||||||
return channel -> client.execute(PutRollupJobAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
Loading…
Reference in New Issue