[PURIFY] remove all trace of x-pack rollups (#17)
This commit removes all trace of Elastic licensed rollups Signed-off-by: Peter Nied <petern@amazon.com>
This commit is contained in:
parent
fb7ff93c42
commit
3769b2c6a4
|
@ -36,7 +36,6 @@
|
|||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]MigrationDocumentationIT.java" id="SnippetLength" />
|
||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]MigrationClientDocumentationIT.java" id="SnippetLength" />
|
||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]MiscellaneousDocumentationIT.java" id="SnippetLength" />
|
||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]RollupDocumentationIT.java" id="SnippetLength" />
|
||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]SearchDocumentationIT.java" id="SnippetLength" />
|
||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]SecurityDocumentationIT.java" id="SnippetLength" />
|
||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]SnapshotClientDocumentationIT.java" id="SnippetLength" />
|
||||
|
|
|
@ -400,8 +400,7 @@ final class RequestConverters {
|
|||
* Convert a {@linkplain SearchRequest} into a {@linkplain Request}.
|
||||
* @param searchRequest the request to convert
|
||||
* @param searchEndpoint the name of the search endpoint. {@literal _search}
|
||||
* for standard searches and {@literal _rollup_search} for rollup
|
||||
* searches.
|
||||
* for standard searches
|
||||
*/
|
||||
static Request search(SearchRequest searchRequest, String searchEndpoint) throws IOException {
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint(searchRequest.indices(), searchRequest.types(), searchEndpoint));
|
||||
|
|
|
@ -263,7 +263,6 @@ public class RestHighLevelClient implements Closeable {
|
|||
private final WatcherClient watcherClient = new WatcherClient(this);
|
||||
private final MigrationClient migrationClient = new MigrationClient(this);
|
||||
private final IndexLifecycleClient ilmClient = new IndexLifecycleClient(this);
|
||||
private final RollupClient rollupClient = new RollupClient(this);
|
||||
private final TransformClient transformClient = new TransformClient(this);
|
||||
|
||||
/**
|
||||
|
@ -346,18 +345,6 @@ public class RestHighLevelClient implements Closeable {
|
|||
return snapshotClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides methods for accessing the Elastic Licensed Rollup APIs that
|
||||
* are shipped with the default distribution of Elasticsearch. All of
|
||||
* these APIs will 404 if run against the OSS distribution of Elasticsearch.
|
||||
* <p>
|
||||
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-apis.html">
|
||||
* Watcher APIs on elastic.co</a> for more information.
|
||||
*/
|
||||
public RollupClient rollup() {
|
||||
return rollupClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a {@link TasksClient} which can be used to access the Tasks API.
|
||||
*
|
||||
|
|
|
@ -1,341 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.core.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.rollup.DeleteRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupIndexCapsRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupIndexCapsResponse;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.GetRollupCapsRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupCapsResponse;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.StopRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StopRollupJobResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
/**
|
||||
* A wrapper for the {@link RestHighLevelClient} that provides methods for
|
||||
* accessing the Elastic Rollup-related methods
|
||||
* <p>
|
||||
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-apis.html">
|
||||
* X-Pack Rollup APIs on elastic.co</a> for more information.
|
||||
*/
|
||||
public class RollupClient {
|
||||
|
||||
private final RestHighLevelClient restHighLevelClient;
|
||||
|
||||
RollupClient(final RestHighLevelClient restHighLevelClient) {
|
||||
this.restHighLevelClient = restHighLevelClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Put a rollup job into the cluster
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse putRollupJob(PutRollupJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
RollupRequestConverters::putJob,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously put a rollup job into the cluster
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable putRollupJobAsync(PutRollupJobRequest request, RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
RollupRequestConverters::putJob,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
listener, Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a rollup job
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-start-job.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public StartRollupJobResponse startRollupJob(StartRollupJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
RollupRequestConverters::startJob,
|
||||
options,
|
||||
StartRollupJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously start a rollup job
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-start-job.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable startRollupJobAsync(StartRollupJobRequest request, RequestOptions options,
|
||||
ActionListener<StartRollupJobResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
RollupRequestConverters::startJob,
|
||||
options,
|
||||
StartRollupJobResponse::fromXContent,
|
||||
listener, Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a rollup job
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-stop-job.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public StopRollupJobResponse stopRollupJob(StopRollupJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
RollupRequestConverters::stopJob,
|
||||
options,
|
||||
StopRollupJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously stop a rollup job
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-stop-job.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable stopRollupJobAsync(StopRollupJobRequest request, RequestOptions options,
|
||||
ActionListener<StopRollupJobResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
RollupRequestConverters::stopJob,
|
||||
options,
|
||||
StopRollupJobResponse::fromXContent,
|
||||
listener, Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a rollup job from the cluster
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-delete-job.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse deleteRollupJob(DeleteRollupJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
RollupRequestConverters::deleteJob,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
/**
|
||||
* Asynchronously delete a rollup job from the cluster
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-delete-job.html">
|
||||
* The docs</a> for details.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable deleteRollupJobAsync(DeleteRollupJobRequest request,
|
||||
RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
RollupRequestConverters::deleteJob,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
listener, Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a rollup job from the cluster.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public GetRollupJobResponse getRollupJob(GetRollupJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
RollupRequestConverters::getJob,
|
||||
options,
|
||||
GetRollupJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously get a rollup job from the cluster.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable getRollupJobAsync(GetRollupJobRequest request, RequestOptions options,
|
||||
ActionListener<GetRollupJobResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
RollupRequestConverters::getJob,
|
||||
options,
|
||||
GetRollupJobResponse::fromXContent,
|
||||
listener, Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a rollup search.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-search.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public SearchResponse search(SearchRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(
|
||||
request,
|
||||
RollupRequestConverters::search,
|
||||
options,
|
||||
SearchResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a rollup search.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-search.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable searchAsync(SearchRequest request, RequestOptions options, ActionListener<SearchResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(
|
||||
request,
|
||||
RollupRequestConverters::search,
|
||||
options,
|
||||
SearchResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the Rollup Capabilities of a target (non-rollup) index or pattern
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-get-rollup-caps.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public GetRollupCapsResponse getRollupCapabilities(GetRollupCapsRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
RollupRequestConverters::getRollupCaps,
|
||||
options,
|
||||
GetRollupCapsResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously Get the Rollup Capabilities of a target (non-rollup) index or pattern
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable getRollupCapabilitiesAsync(GetRollupCapsRequest request, RequestOptions options,
|
||||
ActionListener<GetRollupCapsResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
RollupRequestConverters::getRollupCaps,
|
||||
options,
|
||||
GetRollupCapsResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the Rollup Index Capabilities of a rollup index or pattern
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-get-rollup-index-caps.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public GetRollupIndexCapsResponse getRollupIndexCapabilities(GetRollupIndexCapsRequest request,
|
||||
RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
RollupRequestConverters::getRollupIndexCaps,
|
||||
options,
|
||||
GetRollupIndexCapsResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously Get the Rollup Index Capabilities of a rollup index or pattern
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-get-rollup-index-caps.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable getRollupIndexCapabilitiesAsync(GetRollupIndexCapsRequest request, RequestOptions options,
|
||||
ActionListener<GetRollupIndexCapsResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
RollupRequestConverters::getRollupIndexCaps,
|
||||
options,
|
||||
GetRollupIndexCapsResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
}
|
|
@ -1,131 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.client.rollup.DeleteRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupCapsRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupIndexCapsRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StopRollupJobRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
|
||||
import static org.elasticsearch.client.RequestConverters.createEntity;
|
||||
|
||||
final class RollupRequestConverters {
|
||||
|
||||
private RollupRequestConverters() {
|
||||
}
|
||||
|
||||
static Request putJob(final PutRollupJobRequest putRollupJobRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_rollup", "job")
|
||||
.addPathPart(putRollupJobRequest.getConfig().getId())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(putRollupJobRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request startJob(final StartRollupJobRequest startRollupJobRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_rollup", "job")
|
||||
.addPathPart(startRollupJobRequest.getJobId())
|
||||
.addPathPartAsIs("_start")
|
||||
.build();
|
||||
return new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
}
|
||||
|
||||
static Request stopJob(final StopRollupJobRequest stopRollupJobRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_rollup", "job")
|
||||
.addPathPart(stopRollupJobRequest.getJobId())
|
||||
.addPathPartAsIs("_stop")
|
||||
.build();
|
||||
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(stopRollupJobRequest.timeout());
|
||||
if (stopRollupJobRequest.waitForCompletion() != null) {
|
||||
parameters.withWaitForCompletion(stopRollupJobRequest.waitForCompletion());
|
||||
}
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getJob(final GetRollupJobRequest getRollupJobRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_rollup", "job")
|
||||
.addPathPart(getRollupJobRequest.getJobId())
|
||||
.build();
|
||||
return new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
}
|
||||
|
||||
static Request deleteJob(final DeleteRollupJobRequest deleteRollupJobRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_rollup", "job")
|
||||
.addPathPart(deleteRollupJobRequest.getId())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(deleteRollupJobRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request search(final SearchRequest request) throws IOException {
|
||||
if (request.types().length > 0) {
|
||||
/*
|
||||
* Ideally we'd check this with the standard validation framework
|
||||
* but we don't have a special request for rollup search so that'd
|
||||
* be difficult.
|
||||
*/
|
||||
ValidationException ve = new ValidationException();
|
||||
ve.addValidationError("types are not allowed in rollup search");
|
||||
throw ve;
|
||||
}
|
||||
return RequestConverters.search(request, "_rollup_search");
|
||||
}
|
||||
|
||||
static Request getRollupCaps(final GetRollupCapsRequest getRollupCapsRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_rollup", "data")
|
||||
.addPathPart(getRollupCapsRequest.getIndexPattern())
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(getRollupCapsRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getRollupIndexCaps(final GetRollupIndexCapsRequest getRollupIndexCapsRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addCommaSeparatedPathParts(getRollupIndexCapsRequest.indices())
|
||||
.addPathPartAsIs("_rollup", "data")
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(getRollupIndexCapsRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
}
|
|
@ -1,79 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
|
||||
public class DeleteRollupJobRequest implements Validatable, ToXContentObject {
|
||||
|
||||
private static final ParseField ID_FIELD = new ParseField("id");
|
||||
private final String id;
|
||||
|
||||
|
||||
public DeleteRollupJobRequest(String id) {
|
||||
this.id = Objects.requireNonNull(id, "id parameter must not be null");
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<DeleteRollupJobRequest, Void> PARSER =
|
||||
new ConstructingObjectParser<>("request", a -> {
|
||||
return new DeleteRollupJobRequest((String) a[0]);
|
||||
});
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), ID_FIELD);
|
||||
}
|
||||
|
||||
public static DeleteRollupJobRequest fromXContent(XContentParser parser) {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(ID_FIELD.getPreferredName(), this.id);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
DeleteRollupJobRequest that = (DeleteRollupJobRequest) o;
|
||||
return Objects.equals(id, that.id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(id);
|
||||
}
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.cluster.metadata.Metadata;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GetRollupCapsRequest implements Validatable, ToXContentObject {
|
||||
private static final String ID = "id";
|
||||
private final String indexPattern;
|
||||
|
||||
public GetRollupCapsRequest(final String indexPattern) {
|
||||
if (Strings.isNullOrEmpty(indexPattern) || indexPattern.equals("*")) {
|
||||
this.indexPattern = Metadata.ALL;
|
||||
} else {
|
||||
this.indexPattern = indexPattern;
|
||||
}
|
||||
}
|
||||
|
||||
public String getIndexPattern() {
|
||||
return indexPattern;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(ID, indexPattern);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(indexPattern);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetRollupCapsRequest other = (GetRollupCapsRequest) obj;
|
||||
return Objects.equals(indexPattern, other.indexPattern);
|
||||
}
|
||||
}
|
|
@ -1,73 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GetRollupCapsResponse {
|
||||
|
||||
private final Map<String, RollableIndexCaps> jobs;
|
||||
|
||||
public GetRollupCapsResponse(final Map<String, RollableIndexCaps> jobs) {
|
||||
this.jobs = Collections.unmodifiableMap(Objects.requireNonNull(jobs));
|
||||
}
|
||||
|
||||
public Map<String, RollableIndexCaps> getJobs() {
|
||||
return jobs;
|
||||
}
|
||||
|
||||
public static GetRollupCapsResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
Map<String, RollableIndexCaps> jobs = new HashMap<>();
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token.equals(XContentParser.Token.START_OBJECT)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token.equals(XContentParser.Token.FIELD_NAME)) {
|
||||
String pattern = parser.currentName();
|
||||
|
||||
RollableIndexCaps cap = RollableIndexCaps.PARSER.parse(parser, pattern);
|
||||
jobs.put(pattern, cap);
|
||||
}
|
||||
}
|
||||
}
|
||||
return new GetRollupCapsResponse(jobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetRollupCapsResponse other = (GetRollupCapsResponse) obj;
|
||||
return Objects.equals(jobs, other.jobs);
|
||||
}
|
||||
}
|
|
@ -1,95 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GetRollupIndexCapsRequest implements Validatable, ToXContentObject {
|
||||
private static final String INDICES = "indices";
|
||||
private static final String INDICES_OPTIONS = "indices_options";
|
||||
|
||||
private String[] indices;
|
||||
private IndicesOptions options;
|
||||
|
||||
public GetRollupIndexCapsRequest(final String... indices) {
|
||||
this(indices, IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED);
|
||||
}
|
||||
|
||||
public GetRollupIndexCapsRequest(final String[] indices, final IndicesOptions options) {
|
||||
if (indices == null || indices.length == 0) {
|
||||
throw new IllegalArgumentException("[indices] must not be null or empty");
|
||||
}
|
||||
for (String index : indices) {
|
||||
if (Strings.isNullOrEmpty(index)) {
|
||||
throw new IllegalArgumentException("[index] must not be null or empty");
|
||||
}
|
||||
}
|
||||
this.indices = indices;
|
||||
this.options = Objects.requireNonNull(options);
|
||||
}
|
||||
|
||||
public IndicesOptions indicesOptions() {
|
||||
return options;
|
||||
}
|
||||
|
||||
public String[] indices() {
|
||||
return indices;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.array(INDICES, indices);
|
||||
builder.startObject(INDICES_OPTIONS);
|
||||
{
|
||||
options.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(Arrays.hashCode(indices), options);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetRollupIndexCapsRequest other = (GetRollupIndexCapsRequest) obj;
|
||||
return Arrays.equals(indices, other.indices)
|
||||
&& Objects.equals(options, other.options);
|
||||
}
|
||||
}
|
|
@ -1,73 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GetRollupIndexCapsResponse {
|
||||
|
||||
private final Map<String, RollableIndexCaps> jobs;
|
||||
|
||||
public GetRollupIndexCapsResponse(final Map<String, RollableIndexCaps> jobs) {
|
||||
this.jobs = Collections.unmodifiableMap(Objects.requireNonNull(jobs));
|
||||
}
|
||||
|
||||
public Map<String, RollableIndexCaps> getJobs() {
|
||||
return jobs;
|
||||
}
|
||||
|
||||
public static GetRollupIndexCapsResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
Map<String, RollableIndexCaps> jobs = new HashMap<>();
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token.equals(XContentParser.Token.START_OBJECT)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token.equals(XContentParser.Token.FIELD_NAME)) {
|
||||
String pattern = parser.currentName();
|
||||
|
||||
RollableIndexCaps cap = RollableIndexCaps.PARSER.apply(parser, pattern);
|
||||
jobs.put(pattern, cap);
|
||||
}
|
||||
}
|
||||
}
|
||||
return new GetRollupIndexCapsResponse(jobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetRollupIndexCapsResponse other = (GetRollupIndexCapsResponse) obj;
|
||||
return Objects.equals(jobs, other.jobs);
|
||||
}
|
||||
}
|
|
@ -1,77 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Request to fetch rollup jobs.
|
||||
*/
|
||||
public class GetRollupJobRequest implements Validatable {
|
||||
private final String jobId;
|
||||
|
||||
/**
|
||||
* Create a requets .
|
||||
* @param jobId id of the job to return or {@code _all} to return all jobs
|
||||
*/
|
||||
public GetRollupJobRequest(final String jobId) {
|
||||
Objects.requireNonNull(jobId, "jobId is required");
|
||||
if ("_all".equals(jobId)) {
|
||||
throw new IllegalArgumentException("use the default ctor to ask for all jobs");
|
||||
}
|
||||
this.jobId = jobId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a request to load all rollup jobs.
|
||||
*/
|
||||
public GetRollupJobRequest() {
|
||||
this.jobId = "_all";
|
||||
}
|
||||
|
||||
/**
|
||||
* ID of the job to return.
|
||||
*/
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ValidationException> validate() {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
final GetRollupJobRequest that = (GetRollupJobRequest) o;
|
||||
return jobId.equals(that.jobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId);
|
||||
}
|
||||
}
|
|
@ -1,290 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.core.IndexerJobStats;
|
||||
import org.elasticsearch.client.core.IndexerState;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static java.util.Collections.unmodifiableList;
|
||||
import static java.util.stream.Collectors.joining;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
/**
|
||||
* Response from rollup's get jobs api.
|
||||
*/
|
||||
public class GetRollupJobResponse {
|
||||
static final ParseField JOBS = new ParseField("jobs");
|
||||
static final ParseField CONFIG = new ParseField("config");
|
||||
static final ParseField STATS = new ParseField("stats");
|
||||
static final ParseField STATUS = new ParseField("status");
|
||||
static final ParseField STATE = new ParseField("job_state");
|
||||
static final ParseField CURRENT_POSITION = new ParseField("current_position");
|
||||
static final ParseField ROLLUPS_INDEXED = new ParseField("rollups_indexed");
|
||||
static final ParseField UPGRADED_DOC_ID = new ParseField("upgraded_doc_id");
|
||||
|
||||
private List<JobWrapper> jobs;
|
||||
|
||||
GetRollupJobResponse(final List<JobWrapper> jobs) {
|
||||
this.jobs = Objects.requireNonNull(jobs, "jobs is required");
|
||||
}
|
||||
|
||||
/**
|
||||
* Jobs returned by the request.
|
||||
*/
|
||||
public List<JobWrapper> getJobs() {
|
||||
return jobs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final GetRollupJobResponse that = (GetRollupJobResponse) o;
|
||||
return jobs.equals(that.jobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobs);
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<GetRollupJobResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"get_rollup_job_response",
|
||||
true,
|
||||
args -> {
|
||||
@SuppressWarnings("unchecked") // We're careful about the type in the list
|
||||
List<JobWrapper> jobs = (List<JobWrapper>) args[0];
|
||||
return new GetRollupJobResponse(unmodifiableList(jobs));
|
||||
});
|
||||
static {
|
||||
PARSER.declareObjectArray(constructorArg(), JobWrapper.PARSER::apply, JOBS);
|
||||
}
|
||||
|
||||
public static GetRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return "{jobs=" + jobs.stream().map(Object::toString).collect(joining("\n")) + "\n}";
|
||||
}
|
||||
|
||||
public static class JobWrapper {
|
||||
private final RollupJobConfig job;
|
||||
private final RollupIndexerJobStats stats;
|
||||
private final RollupJobStatus status;
|
||||
|
||||
JobWrapper(RollupJobConfig job, RollupIndexerJobStats stats, RollupJobStatus status) {
|
||||
this.job = job;
|
||||
this.stats = stats;
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration of the job.
|
||||
*/
|
||||
public RollupJobConfig getJob() {
|
||||
return job;
|
||||
}
|
||||
|
||||
/**
|
||||
* Statistics about the execution of the job.
|
||||
*/
|
||||
public RollupIndexerJobStats getStats() {
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Current state of the job.
|
||||
*/
|
||||
public RollupJobStatus getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<JobWrapper, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"job",
|
||||
true,
|
||||
a -> new JobWrapper((RollupJobConfig) a[0], (RollupIndexerJobStats) a[1], (RollupJobStatus) a[2]));
|
||||
static {
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> RollupJobConfig.fromXContent(p, null), CONFIG);
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(), RollupIndexerJobStats.PARSER::apply, STATS);
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(), RollupJobStatus.PARSER::apply, STATUS);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
JobWrapper other = (JobWrapper) obj;
|
||||
return Objects.equals(job, other.job)
|
||||
&& Objects.equals(stats, other.stats)
|
||||
&& Objects.equals(status, other.status);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(job, stats, status);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return "{job=" + job
|
||||
+ ", stats=" + stats
|
||||
+ ", status=" + status + "}";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The Rollup specialization of stats for the AsyncTwoPhaseIndexer.
|
||||
* Note: instead of `documents_indexed`, this XContent show `rollups_indexed`
|
||||
*/
|
||||
public static class RollupIndexerJobStats extends IndexerJobStats {
|
||||
|
||||
RollupIndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations,
|
||||
long indexTime, long indexTotal, long searchTime, long searchTotal, long processingTime,
|
||||
long processingTotal, long indexFailures, long searchFailures) {
|
||||
super(numPages, numInputDocuments, numOuputDocuments, numInvocations,
|
||||
indexTime, searchTime, processingTime, indexTotal, searchTotal, processingTotal, indexFailures, searchFailures);
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<RollupIndexerJobStats, Void> PARSER = new ConstructingObjectParser<>(
|
||||
STATS.getPreferredName(),
|
||||
true,
|
||||
args -> new RollupIndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3],
|
||||
(long) args[4], (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9],
|
||||
(long) args[10], (long) args[11]));
|
||||
static {
|
||||
PARSER.declareLong(constructorArg(), NUM_PAGES);
|
||||
PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS);
|
||||
PARSER.declareLong(constructorArg(), ROLLUPS_INDEXED);
|
||||
PARSER.declareLong(constructorArg(), NUM_INVOCATIONS);
|
||||
PARSER.declareLong(constructorArg(), INDEX_TIME_IN_MS);
|
||||
PARSER.declareLong(constructorArg(), INDEX_TOTAL);
|
||||
PARSER.declareLong(constructorArg(), SEARCH_TIME_IN_MS);
|
||||
PARSER.declareLong(constructorArg(), SEARCH_TOTAL);
|
||||
PARSER.declareLong(constructorArg(), PROCESSING_TIME_IN_MS);
|
||||
PARSER.declareLong(constructorArg(), PROCESSING_TOTAL);
|
||||
PARSER.declareLong(constructorArg(), INDEX_FAILURES);
|
||||
PARSER.declareLong(constructorArg(), SEARCH_FAILURES);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Status of the rollup job.
|
||||
*/
|
||||
public static class RollupJobStatus {
|
||||
private final IndexerState state;
|
||||
private final Map<String, Object> currentPosition;
|
||||
private final boolean upgradedDocumentId;
|
||||
|
||||
RollupJobStatus(IndexerState state, Map<String, Object> position, boolean upgradedDocumentId) {
|
||||
this.state = state;
|
||||
this.currentPosition = position;
|
||||
this.upgradedDocumentId = upgradedDocumentId;
|
||||
}
|
||||
|
||||
/**
|
||||
* The state of the writer.
|
||||
*/
|
||||
public IndexerState getState() {
|
||||
return state;
|
||||
}
|
||||
/**
|
||||
* The current position of the writer.
|
||||
*/
|
||||
public Map<String, Object> getCurrentPosition() {
|
||||
return currentPosition;
|
||||
}
|
||||
/**
|
||||
* Flag holds the state of the ID scheme, e.g. if it has been upgraded
|
||||
* to the concatenation scheme.
|
||||
*/
|
||||
public boolean getUpgradedDocumentId() {
|
||||
return upgradedDocumentId;
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<RollupJobStatus, Void> PARSER = new ConstructingObjectParser<>(
|
||||
STATUS.getPreferredName(),
|
||||
true,
|
||||
args -> {
|
||||
IndexerState state = (IndexerState) args[0];
|
||||
@SuppressWarnings("unchecked") // We're careful of the contents
|
||||
Map<String, Object> currentPosition = (Map<String, Object>) args[1];
|
||||
Boolean upgradedDocumentId = (Boolean) args[2];
|
||||
return new RollupJobStatus(state, currentPosition, upgradedDocumentId == null ? false : upgradedDocumentId);
|
||||
});
|
||||
static {
|
||||
PARSER.declareField(constructorArg(), p -> IndexerState.fromString(p.text()), STATE, ObjectParser.ValueType.STRING);
|
||||
PARSER.declareField(optionalConstructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
return p.map();
|
||||
}
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||
return null;
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, CURRENT_POSITION, ObjectParser.ValueType.VALUE_OBJECT_ARRAY);
|
||||
|
||||
// Optional to accommodate old versions of state
|
||||
PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), UPGRADED_DOC_ID);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) return true;
|
||||
if (other == null || getClass() != other.getClass()) return false;
|
||||
RollupJobStatus that = (RollupJobStatus) other;
|
||||
return Objects.equals(state, that.state)
|
||||
&& Objects.equals(currentPosition, that.currentPosition)
|
||||
&& upgradedDocumentId == that.upgradedDocumentId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(state, currentPosition, upgradedDocumentId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return "{stats=" + state
|
||||
+ ", currentPosition=" + currentPosition
|
||||
+ ", upgradedDocumentId=" + upgradedDocumentId + "}";
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,65 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public class PutRollupJobRequest implements Validatable, ToXContentObject {
|
||||
|
||||
private final RollupJobConfig config;
|
||||
|
||||
public PutRollupJobRequest(final RollupJobConfig config) {
|
||||
this.config = Objects.requireNonNull(config, "rollup job configuration is required");
|
||||
}
|
||||
|
||||
public RollupJobConfig getConfig() {
|
||||
return config;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return config.toXContent(builder, params);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ValidationException> validate() {
|
||||
return config.validate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
final PutRollupJobRequest that = (PutRollupJobRequest) o;
|
||||
return Objects.equals(config, that.config);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(config);
|
||||
}
|
||||
}
|
|
@ -1,103 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
/**
|
||||
* Represents the rollup capabilities of a non-rollup index. E.g. what values/aggregations
|
||||
* were rolled up for this index, in what rollup jobs that data is stored and where those
|
||||
* concrete rollup indices exist
|
||||
*
|
||||
* The index name can either be a single index, or an index pattern (logstash-*)
|
||||
*/
|
||||
public class RollableIndexCaps implements ToXContentFragment {
|
||||
private static final ParseField ROLLUP_JOBS = new ParseField("rollup_jobs");
|
||||
|
||||
public static final ConstructingObjectParser<RollableIndexCaps, String> PARSER = new ConstructingObjectParser<>(
|
||||
ROLLUP_JOBS.getPreferredName(), true, (Object[] args, String indexName) -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<RollupJobCaps> caps = (List<RollupJobCaps>) args[0];
|
||||
return new RollableIndexCaps(indexName, caps);
|
||||
});
|
||||
static {
|
||||
PARSER.declareObjectArray(constructorArg(), (p, name) -> RollupJobCaps.PARSER.parse(p, null), ROLLUP_JOBS);
|
||||
}
|
||||
|
||||
private final String indexName;
|
||||
private final List<RollupJobCaps> jobCaps;
|
||||
|
||||
RollableIndexCaps(final String indexName, final List<RollupJobCaps> caps) {
|
||||
this.indexName = indexName;
|
||||
this.jobCaps = Collections.unmodifiableList(Objects.requireNonNull(caps)
|
||||
.stream()
|
||||
.sorted(Comparator.comparing(RollupJobCaps::getJobID))
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
public String getIndexName() {
|
||||
return indexName;
|
||||
}
|
||||
|
||||
public List<RollupJobCaps> getJobCaps() {
|
||||
return jobCaps;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(indexName);
|
||||
{
|
||||
builder.field(ROLLUP_JOBS.getPreferredName(), jobCaps);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
RollableIndexCaps that = (RollableIndexCaps) other;
|
||||
return Objects.equals(this.jobCaps, that.jobCaps)
|
||||
&& Objects.equals(this.indexName, that.indexName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobCaps, indexName);
|
||||
}
|
||||
}
|
|
@ -1,185 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Represents the Rollup capabilities for a specific job on a single rollup index
|
||||
*/
|
||||
public class RollupJobCaps implements ToXContentObject {
|
||||
private static final ParseField JOB_ID = new ParseField("job_id");
|
||||
private static final ParseField ROLLUP_INDEX = new ParseField("rollup_index");
|
||||
private static final ParseField INDEX_PATTERN = new ParseField("index_pattern");
|
||||
private static final ParseField FIELDS = new ParseField("fields");
|
||||
private static final String NAME = "rollup_job_caps";
|
||||
|
||||
public static final ConstructingObjectParser<RollupJobCaps, Void> PARSER = new ConstructingObjectParser<>(NAME, true,
|
||||
a -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Tuple<String, RollupFieldCaps>> caps = (List<Tuple<String, RollupFieldCaps>>) a[3];
|
||||
Map<String, RollupFieldCaps> mapCaps =
|
||||
new HashMap<>(caps.stream().collect(Collectors.toMap(Tuple::v1, Tuple::v2)));
|
||||
return new RollupJobCaps((String) a[0], (String) a[1], (String) a[2], mapCaps);
|
||||
});
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), JOB_ID);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), ROLLUP_INDEX);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), INDEX_PATTERN);
|
||||
PARSER.declareNamedObjects(ConstructingObjectParser.constructorArg(),
|
||||
(p, c, name) -> new Tuple<>(name, RollupFieldCaps.fromXContent(p)), FIELDS);
|
||||
}
|
||||
|
||||
private final String jobID;
|
||||
private final String rollupIndex;
|
||||
private final String indexPattern;
|
||||
private final Map<String, RollupFieldCaps> fieldCapLookup;
|
||||
|
||||
RollupJobCaps(final String jobID, final String rollupIndex,
|
||||
final String indexPattern, final Map<String, RollupFieldCaps> fieldCapLookup) {
|
||||
this.jobID = jobID;
|
||||
this.rollupIndex = rollupIndex;
|
||||
this.indexPattern = indexPattern;
|
||||
this.fieldCapLookup = Collections.unmodifiableMap(Objects.requireNonNull(fieldCapLookup));
|
||||
}
|
||||
|
||||
public Map<String, RollupFieldCaps> getFieldCaps() {
|
||||
return fieldCapLookup;
|
||||
}
|
||||
|
||||
public String getRollupIndex() {
|
||||
return rollupIndex;
|
||||
}
|
||||
|
||||
public String getIndexPattern() {
|
||||
return indexPattern;
|
||||
}
|
||||
|
||||
public String getJobID() {
|
||||
return jobID;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field(JOB_ID.getPreferredName(), jobID);
|
||||
builder.field(ROLLUP_INDEX.getPreferredName(), rollupIndex);
|
||||
builder.field(INDEX_PATTERN.getPreferredName(), indexPattern);
|
||||
builder.startObject(FIELDS.getPreferredName());
|
||||
{
|
||||
for (Map.Entry<String, RollupFieldCaps> fieldCap : fieldCapLookup.entrySet()) {
|
||||
builder.array(fieldCap.getKey(), fieldCap.getValue());
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
RollupJobCaps that = (RollupJobCaps) other;
|
||||
|
||||
return Objects.equals(this.jobID, that.jobID)
|
||||
&& Objects.equals(this.indexPattern, that.indexPattern)
|
||||
&& Objects.equals(this.rollupIndex, that.rollupIndex)
|
||||
&& Objects.equals(this.fieldCapLookup, that.fieldCapLookup);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobID, rollupIndex, fieldCapLookup, indexPattern);
|
||||
}
|
||||
|
||||
public static class RollupFieldCaps implements ToXContentFragment {
|
||||
private static final String NAME = "rollup_field_caps";
|
||||
private final List<Map<String, Object>> aggs;
|
||||
|
||||
RollupFieldCaps(final List<Map<String, Object>> aggs) {
|
||||
this.aggs = Collections.unmodifiableList(Objects.requireNonNull(aggs));
|
||||
}
|
||||
|
||||
public List<Map<String, Object>> getAggs() {
|
||||
return aggs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
for (Map<String, Object> agg : aggs) {
|
||||
builder.map(agg);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static RollupFieldCaps fromXContent(XContentParser parser) throws IOException {
|
||||
List<Map<String, Object>> aggs = new ArrayList<>();
|
||||
if (parser.nextToken().equals(XContentParser.Token.START_ARRAY)) {
|
||||
while (parser.nextToken().equals(XContentParser.Token.START_OBJECT)) {
|
||||
aggs.add(parser.map());
|
||||
}
|
||||
}
|
||||
return new RollupFieldCaps(aggs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
RollupFieldCaps that = (RollupFieldCaps) other;
|
||||
return Objects.equals(this.aggs, that.aggs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(aggs);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class StartRollupJobRequest implements Validatable {
|
||||
|
||||
private final String jobId;
|
||||
|
||||
public StartRollupJobRequest(final String jobId) {
|
||||
this.jobId = Objects.requireNonNull(jobId, "id parameter must not be null");
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
final StartRollupJobRequest that = (StartRollupJobRequest) o;
|
||||
return Objects.equals(jobId, that.jobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId);
|
||||
}
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.core.AcknowledgedResponse;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class StartRollupJobResponse extends AcknowledgedResponse {
|
||||
|
||||
private static final String PARSE_FIELD_NAME = "started";
|
||||
|
||||
private static final ConstructingObjectParser<StartRollupJobResponse, Void> PARSER = AcknowledgedResponse
|
||||
.generateParser("start_rollup_job_response", StartRollupJobResponse::new, PARSE_FIELD_NAME);
|
||||
|
||||
public StartRollupJobResponse(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
public static StartRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getFieldName() {
|
||||
return PARSE_FIELD_NAME;
|
||||
}
|
||||
}
|
|
@ -1,74 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class StopRollupJobRequest implements Validatable {
|
||||
|
||||
private final String jobId;
|
||||
private TimeValue timeout;
|
||||
private Boolean waitForCompletion;
|
||||
|
||||
public StopRollupJobRequest(final String jobId) {
|
||||
this.jobId = Objects.requireNonNull(jobId, "id parameter must not be null");
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
final StopRollupJobRequest that = (StopRollupJobRequest) o;
|
||||
return Objects.equals(jobId, that.jobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the requests optional "timeout" parameter.
|
||||
*/
|
||||
public void timeout(TimeValue timeout) {
|
||||
this.timeout = timeout;
|
||||
}
|
||||
|
||||
public TimeValue timeout() {
|
||||
return this.timeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the requests optional "wait_for_completion".
|
||||
*/
|
||||
public void waitForCompletion(boolean waitForCompletion) {
|
||||
this.waitForCompletion = waitForCompletion;
|
||||
}
|
||||
|
||||
public Boolean waitForCompletion() {
|
||||
return this.waitForCompletion;
|
||||
}
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.core.AcknowledgedResponse;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class StopRollupJobResponse extends AcknowledgedResponse {
|
||||
|
||||
private static final String PARSE_FIELD_NAME = "stopped";
|
||||
|
||||
private static final ConstructingObjectParser<StopRollupJobResponse, Void> PARSER = AcknowledgedResponse
|
||||
.generateParser("stop_rollup_job_response", StopRollupJobResponse::new, PARSE_FIELD_NAME);
|
||||
|
||||
public StopRollupJobResponse(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
public static StopRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getFieldName() {
|
||||
return PARSE_FIELD_NAME;
|
||||
}
|
||||
}
|
|
@ -1,302 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup.job.config;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
|
||||
/**
|
||||
* The configuration object for the histograms in the rollup config
|
||||
*
|
||||
* {
|
||||
* "groups": [
|
||||
* "date_histogram": {
|
||||
* "field" : "foo",
|
||||
* "interval" : "1d",
|
||||
* "delay": "30d",
|
||||
* "time_zone" : "EST"
|
||||
* }
|
||||
* ]
|
||||
* }
|
||||
*/
|
||||
public class DateHistogramGroupConfig implements Validatable, ToXContentObject {
|
||||
|
||||
static final String NAME = "date_histogram";
|
||||
private static final String INTERVAL = "interval";
|
||||
private static final String FIELD = "field";
|
||||
private static final String TIME_ZONE = "time_zone";
|
||||
private static final String DELAY = "delay";
|
||||
private static final String DEFAULT_TIMEZONE = "UTC";
|
||||
private static final String CALENDAR_INTERVAL = "calendar_interval";
|
||||
private static final String FIXED_INTERVAL = "fixed_interval";
|
||||
|
||||
// From DateHistogramAggregationBuilder in core, transplanted and modified to a set
|
||||
// so we don't need to import a dependency on the class
|
||||
private static final Set<String> DATE_FIELD_UNITS;
|
||||
static {
|
||||
Set<String> dateFieldUnits = new HashSet<>();
|
||||
dateFieldUnits.add("year");
|
||||
dateFieldUnits.add("1y");
|
||||
dateFieldUnits.add("quarter");
|
||||
dateFieldUnits.add("1q");
|
||||
dateFieldUnits.add("month");
|
||||
dateFieldUnits.add("1M");
|
||||
dateFieldUnits.add("week");
|
||||
dateFieldUnits.add("1w");
|
||||
dateFieldUnits.add("day");
|
||||
dateFieldUnits.add("1d");
|
||||
dateFieldUnits.add("hour");
|
||||
dateFieldUnits.add("1h");
|
||||
dateFieldUnits.add("minute");
|
||||
dateFieldUnits.add("1m");
|
||||
dateFieldUnits.add("second");
|
||||
dateFieldUnits.add("1s");
|
||||
DATE_FIELD_UNITS = Collections.unmodifiableSet(dateFieldUnits);
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<DateHistogramGroupConfig, Void> PARSER;
|
||||
static {
|
||||
PARSER = new ConstructingObjectParser<>(NAME, true, a -> {
|
||||
DateHistogramInterval oldInterval = (DateHistogramInterval) a[1];
|
||||
DateHistogramInterval calendarInterval = (DateHistogramInterval) a[2];
|
||||
DateHistogramInterval fixedInterval = (DateHistogramInterval) a[3];
|
||||
|
||||
if (oldInterval != null) {
|
||||
if (calendarInterval != null || fixedInterval != null) {
|
||||
throw new IllegalArgumentException("Cannot use [interval] with [fixed_interval] or [calendar_interval] " +
|
||||
"configuration options.");
|
||||
}
|
||||
return new DateHistogramGroupConfig((String) a[0], oldInterval, (DateHistogramInterval) a[4], (String) a[5]);
|
||||
} else if (calendarInterval != null && fixedInterval == null) {
|
||||
return new CalendarInterval((String) a[0], calendarInterval, (DateHistogramInterval) a[4], (String) a[5]);
|
||||
} else if (calendarInterval == null && fixedInterval != null) {
|
||||
return new FixedInterval((String) a[0], fixedInterval, (DateHistogramInterval) a[4], (String) a[5]);
|
||||
} else if (calendarInterval != null && fixedInterval != null) {
|
||||
throw new IllegalArgumentException("Cannot set both [fixed_interval] and [calendar_interval] at the same time");
|
||||
} else {
|
||||
throw new IllegalArgumentException("An interval is required. Use [fixed_interval] or [calendar_interval].");
|
||||
}
|
||||
});
|
||||
PARSER.declareString(constructorArg(), new ParseField(FIELD));
|
||||
PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(INTERVAL), ValueType.STRING);
|
||||
PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()),
|
||||
new ParseField(CALENDAR_INTERVAL), ValueType.STRING);
|
||||
PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()),
|
||||
new ParseField(FIXED_INTERVAL), ValueType.STRING);
|
||||
PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(DELAY), ValueType.STRING);
|
||||
PARSER.declareString(optionalConstructorArg(), new ParseField(TIME_ZONE));
|
||||
}
|
||||
|
||||
private final String field;
|
||||
private final DateHistogramInterval interval;
|
||||
private final DateHistogramInterval delay;
|
||||
private final String timeZone;
|
||||
|
||||
/**
|
||||
* FixedInterval is a {@link DateHistogramGroupConfig} that uses a fixed time interval for rolling up data.
|
||||
* The fixed time interval is one or multiples of SI units and has no calendar-awareness (e.g. doesn't account
|
||||
* for leap corrections, does not have variable length months, etc).
|
||||
*
|
||||
* For calendar-aware rollups, use {@link CalendarInterval}
|
||||
*/
|
||||
public static class FixedInterval extends DateHistogramGroupConfig {
|
||||
public FixedInterval(String field, DateHistogramInterval interval) {
|
||||
this(field, interval, null, null);
|
||||
}
|
||||
|
||||
public FixedInterval(String field, DateHistogramInterval interval, DateHistogramInterval delay, String timeZone) {
|
||||
super(field, interval, delay, timeZone);
|
||||
// validate fixed time
|
||||
TimeValue.parseTimeValue(interval.toString(), NAME + ".FixedInterval");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* CalendarInterval is a {@link DateHistogramGroupConfig} that uses calendar-aware intervals for rolling up data.
|
||||
* Calendar time intervals understand leap corrections and contextual differences in certain calendar units (e.g.
|
||||
* months are variable length depending on the month). Calendar units are only available in singular quantities:
|
||||
* 1s, 1m, 1h, 1d, 1w, 1q, 1M, 1y
|
||||
*
|
||||
* For fixed time rollups, use {@link FixedInterval}
|
||||
*/
|
||||
public static class CalendarInterval extends DateHistogramGroupConfig {
|
||||
public CalendarInterval(String field, DateHistogramInterval interval) {
|
||||
this(field, interval, null, null);
|
||||
|
||||
}
|
||||
|
||||
public CalendarInterval(String field, DateHistogramInterval interval, DateHistogramInterval delay, String timeZone) {
|
||||
super(field, interval, delay, timeZone);
|
||||
if (DATE_FIELD_UNITS.contains(interval.toString()) == false) {
|
||||
throw new IllegalArgumentException("The supplied interval [" + interval +"] could not be parsed " +
|
||||
"as a calendar interval.");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DateHistogramGroupConfig} using the given field and interval parameters.
|
||||
*
|
||||
* @deprecated Build a DateHistoConfig using {@link DateHistogramGroupConfig.CalendarInterval}
|
||||
* or {@link DateHistogramGroupConfig.FixedInterval} instead
|
||||
*
|
||||
* @since 7.2.0
|
||||
*/
|
||||
@Deprecated
|
||||
public DateHistogramGroupConfig(final String field, final DateHistogramInterval interval) {
|
||||
this(field, interval, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DateHistogramGroupConfig} using the given configuration parameters.
|
||||
* <p>
|
||||
* The {@code field} and {@code interval} are required to compute the date histogram for the rolled up documents.
|
||||
* The {@code delay} is optional and can be set to {@code null}. It defines how long to wait before rolling up new documents.
|
||||
* The {@code timeZone} is optional and can be set to {@code null}. When configured, the time zone value is resolved using
|
||||
* ({@link DateTimeZone#forID(String)} and must match a time zone identifier provided by the Joda Time library.
|
||||
* </p>
|
||||
* @param field the name of the date field to use for the date histogram (required)
|
||||
* @param interval the interval to use for the date histogram (required)
|
||||
* @param delay the time delay (optional)
|
||||
* @param timeZone the id of time zone to use to calculate the date histogram (optional). When {@code null}, the UTC timezone is used.
|
||||
*
|
||||
* @deprecated Build a DateHistoConfig using {@link DateHistogramGroupConfig.CalendarInterval}
|
||||
* or {@link DateHistogramGroupConfig.FixedInterval} instead
|
||||
*
|
||||
* @since 7.2.0
|
||||
*/
|
||||
@Deprecated
|
||||
public DateHistogramGroupConfig(final String field,
|
||||
final DateHistogramInterval interval,
|
||||
final @Nullable DateHistogramInterval delay,
|
||||
final @Nullable String timeZone) {
|
||||
this.field = field;
|
||||
this.interval = interval;
|
||||
this.delay = delay;
|
||||
this.timeZone = (timeZone != null && timeZone.isEmpty() == false) ? timeZone : DEFAULT_TIMEZONE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ValidationException> validate() {
|
||||
final ValidationException validationException = new ValidationException();
|
||||
if (field == null || field.isEmpty()) {
|
||||
validationException.addValidationError("Field name is required");
|
||||
}
|
||||
if (interval == null) {
|
||||
validationException.addValidationError("Interval is required");
|
||||
}
|
||||
if (validationException.validationErrors().isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
return Optional.of(validationException);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the date field
|
||||
*/
|
||||
public String getField() {
|
||||
return field;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the date interval
|
||||
*/
|
||||
public DateHistogramInterval getInterval() {
|
||||
return interval;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the time delay for this histogram
|
||||
*/
|
||||
public DateHistogramInterval getDelay() {
|
||||
return delay;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the timezone to apply
|
||||
*/
|
||||
public String getTimeZone() {
|
||||
return timeZone;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
if (this.getClass().equals(CalendarInterval.class)) {
|
||||
builder.field(CALENDAR_INTERVAL, interval.toString());
|
||||
} else if (this.getClass().equals(FixedInterval.class)) {
|
||||
builder.field(FIXED_INTERVAL, interval.toString());
|
||||
} else {
|
||||
builder.field(INTERVAL, interval.toString());
|
||||
}
|
||||
builder.field(FIELD, field);
|
||||
if (delay != null) {
|
||||
builder.field(DELAY, delay.toString());
|
||||
}
|
||||
builder.field(TIME_ZONE, timeZone);
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final DateHistogramGroupConfig that = (DateHistogramGroupConfig) other;
|
||||
return Objects.equals(interval, that.interval)
|
||||
&& Objects.equals(field, that.field)
|
||||
&& Objects.equals(delay, that.delay)
|
||||
&& Objects.equals(timeZone, that.timeZone);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(interval, field, delay, timeZone);
|
||||
}
|
||||
|
||||
public static DateHistogramGroupConfig fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
}
|
|
@ -1,171 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup.job.config;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
/**
|
||||
* The configuration object for the groups section in the rollup config.
|
||||
* Basically just a wrapper for histo/date histo/terms objects
|
||||
*
|
||||
* {
|
||||
* "groups": [
|
||||
* "date_histogram": {...},
|
||||
* "histogram" : {...},
|
||||
* "terms" : {...}
|
||||
* ]
|
||||
* }
|
||||
*/
|
||||
public class GroupConfig implements Validatable, ToXContentObject {
|
||||
|
||||
static final String NAME = "groups";
|
||||
private static final ConstructingObjectParser<GroupConfig, Void> PARSER;
|
||||
static {
|
||||
PARSER = new ConstructingObjectParser<>(NAME, true, args ->
|
||||
new GroupConfig((DateHistogramGroupConfig) args[0], (HistogramGroupConfig) args[1], (TermsGroupConfig) args[2]));
|
||||
PARSER.declareObject(constructorArg(),
|
||||
(p, c) -> DateHistogramGroupConfig.fromXContent(p), new ParseField(DateHistogramGroupConfig.NAME));
|
||||
PARSER.declareObject(optionalConstructorArg(),
|
||||
(p, c) -> HistogramGroupConfig.fromXContent(p), new ParseField(HistogramGroupConfig.NAME));
|
||||
PARSER.declareObject(optionalConstructorArg(),
|
||||
(p, c) -> TermsGroupConfig.fromXContent(p), new ParseField(TermsGroupConfig.NAME));
|
||||
}
|
||||
|
||||
private final DateHistogramGroupConfig dateHistogram;
|
||||
private final @Nullable
|
||||
HistogramGroupConfig histogram;
|
||||
private final @Nullable
|
||||
TermsGroupConfig terms;
|
||||
|
||||
public GroupConfig(final DateHistogramGroupConfig dateHistogram) {
|
||||
this(dateHistogram, null, null);
|
||||
}
|
||||
|
||||
public GroupConfig(final DateHistogramGroupConfig dateHistogram,
|
||||
final @Nullable HistogramGroupConfig histogram,
|
||||
final @Nullable TermsGroupConfig terms) {
|
||||
this.dateHistogram = dateHistogram;
|
||||
this.histogram = histogram;
|
||||
this.terms = terms;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ValidationException> validate() {
|
||||
final ValidationException validationException = new ValidationException();
|
||||
if (dateHistogram != null) {
|
||||
final Optional<ValidationException> dateHistogramValidationErrors = dateHistogram.validate();
|
||||
if (dateHistogramValidationErrors != null && dateHistogramValidationErrors.isPresent()) {
|
||||
validationException.addValidationErrors(dateHistogramValidationErrors.get());
|
||||
}
|
||||
} else {
|
||||
validationException.addValidationError("Date histogram must not be null");
|
||||
}
|
||||
if (histogram != null) {
|
||||
final Optional<ValidationException> histogramValidationErrors = histogram.validate();
|
||||
if (histogramValidationErrors != null && histogramValidationErrors.isPresent()) {
|
||||
validationException.addValidationErrors(histogramValidationErrors.get());
|
||||
}
|
||||
}
|
||||
if (terms != null) {
|
||||
final Optional<ValidationException> termsValidationErrors = terms.validate();
|
||||
if (termsValidationErrors != null && termsValidationErrors.isPresent()) {
|
||||
validationException.addValidationErrors(termsValidationErrors.get());
|
||||
}
|
||||
}
|
||||
if (validationException.validationErrors().isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
return Optional.of(validationException);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the configuration of the date histogram
|
||||
*/
|
||||
public DateHistogramGroupConfig getDateHistogram() {
|
||||
return dateHistogram;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the configuration of the histogram
|
||||
*/
|
||||
@Nullable
|
||||
public HistogramGroupConfig getHistogram() {
|
||||
return histogram;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the configuration of the terms
|
||||
*/
|
||||
@Nullable
|
||||
public TermsGroupConfig getTerms() {
|
||||
return terms;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field(DateHistogramGroupConfig.NAME, dateHistogram);
|
||||
if (histogram != null) {
|
||||
builder.field(HistogramGroupConfig.NAME, histogram);
|
||||
}
|
||||
if (terms != null) {
|
||||
builder.field(TermsGroupConfig.NAME, terms);
|
||||
}
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final GroupConfig that = (GroupConfig) other;
|
||||
return Objects.equals(dateHistogram, that.dateHistogram)
|
||||
&& Objects.equals(histogram, that.histogram)
|
||||
&& Objects.equals(terms, that.terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(dateHistogram, histogram, terms);
|
||||
}
|
||||
|
||||
public static GroupConfig fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
}
|
|
@ -1,127 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup.job.config;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
/**
|
||||
* The configuration object for the histograms in the rollup config
|
||||
*
|
||||
* {
|
||||
* "groups": [
|
||||
* "histogram": {
|
||||
* "fields" : [ "foo", "bar" ],
|
||||
* "interval" : 123
|
||||
* }
|
||||
* ]
|
||||
* }
|
||||
*/
|
||||
public class HistogramGroupConfig implements Validatable, ToXContentObject {
|
||||
|
||||
static final String NAME = "histogram";
|
||||
private static final String INTERVAL = "interval";
|
||||
private static final String FIELDS = "fields";
|
||||
|
||||
private static final ConstructingObjectParser<HistogramGroupConfig, Void> PARSER;
|
||||
static {
|
||||
PARSER = new ConstructingObjectParser<>(NAME, true, args -> {
|
||||
@SuppressWarnings("unchecked") List<String> fields = (List<String>) args[1];
|
||||
return new HistogramGroupConfig((long) args[0], fields != null ? fields.toArray(new String[fields.size()]) : null);
|
||||
});
|
||||
PARSER.declareLong(constructorArg(), new ParseField(INTERVAL));
|
||||
PARSER.declareStringArray(constructorArg(), new ParseField(FIELDS));
|
||||
}
|
||||
|
||||
private final long interval;
|
||||
private final String[] fields;
|
||||
|
||||
public HistogramGroupConfig(final long interval, final String... fields) {
|
||||
this.interval = interval;
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ValidationException> validate() {
|
||||
final ValidationException validationException = new ValidationException();
|
||||
if (fields == null || fields.length == 0) {
|
||||
validationException.addValidationError("Fields must have at least one value");
|
||||
}
|
||||
if (interval <= 0) {
|
||||
validationException.addValidationError("Interval must be a positive long");
|
||||
}
|
||||
if (validationException.validationErrors().isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
return Optional.of(validationException);
|
||||
}
|
||||
|
||||
public long getInterval() {
|
||||
return interval;
|
||||
}
|
||||
|
||||
public String[] getFields() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field(INTERVAL, interval);
|
||||
builder.field(FIELDS, fields);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final HistogramGroupConfig that = (HistogramGroupConfig) other;
|
||||
return Objects.equals(interval, that.interval) && Arrays.equals(fields, that.fields);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(interval, Arrays.hashCode(fields));
|
||||
}
|
||||
|
||||
public static HistogramGroupConfig fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
}
|
|
@ -1,135 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup.job.config;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
/**
|
||||
* The configuration object for the metrics portion of a rollup job config
|
||||
*
|
||||
* {
|
||||
* "metrics": [
|
||||
* {
|
||||
* "field": "foo",
|
||||
* "metrics": [ "min", "max", "sum"]
|
||||
* },
|
||||
* {
|
||||
* "field": "bar",
|
||||
* "metrics": [ "max" ]
|
||||
* }
|
||||
* ]
|
||||
* }
|
||||
*/
|
||||
public class MetricConfig implements Validatable, ToXContentObject {
|
||||
|
||||
static final String NAME = "metrics";
|
||||
private static final String FIELD = "field";
|
||||
private static final String METRICS = "metrics";
|
||||
|
||||
private static final ConstructingObjectParser<MetricConfig, Void> PARSER;
|
||||
static {
|
||||
PARSER = new ConstructingObjectParser<>(NAME, true, args -> {
|
||||
@SuppressWarnings("unchecked") List<String> metrics = (List<String>) args[1];
|
||||
return new MetricConfig((String) args[0], metrics);
|
||||
});
|
||||
PARSER.declareString(constructorArg(), new ParseField(FIELD));
|
||||
PARSER.declareStringArray(constructorArg(), new ParseField(METRICS));
|
||||
}
|
||||
|
||||
private final String field;
|
||||
private final List<String> metrics;
|
||||
|
||||
public MetricConfig(final String field, final List<String> metrics) {
|
||||
this.field = field;
|
||||
this.metrics = metrics;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ValidationException> validate() {
|
||||
final ValidationException validationException = new ValidationException();
|
||||
if (field == null || field.isEmpty()) {
|
||||
validationException.addValidationError("Field name is required");
|
||||
}
|
||||
if (metrics == null || metrics.isEmpty()) {
|
||||
validationException.addValidationError("Metrics must be a non-null, non-empty array of strings");
|
||||
}
|
||||
if (validationException.validationErrors().isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
return Optional.of(validationException);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the name of the field used in the metric configuration. Never {@code null}.
|
||||
*/
|
||||
public String getField() {
|
||||
return field;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the names of the metrics used in the metric configuration. Never {@code null}.
|
||||
*/
|
||||
public List<String> getMetrics() {
|
||||
return metrics;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field(FIELD, field);
|
||||
builder.field(METRICS, metrics);
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final MetricConfig that = (MetricConfig) other;
|
||||
return Objects.equals(field, that.field) && Objects.equals(metrics, that.metrics);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(field, metrics);
|
||||
}
|
||||
|
||||
public static MetricConfig fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
}
|
|
@ -1,242 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup.job.config;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
/**
|
||||
* This class holds the configuration details of a rollup job, such as the groupings, metrics, what
|
||||
* index to rollup and where to roll them to.
|
||||
*/
|
||||
public class RollupJobConfig implements Validatable, ToXContentObject {
|
||||
|
||||
private static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueSeconds(20);
|
||||
private static final String ID = "id";
|
||||
private static final String TIMEOUT = "timeout";
|
||||
private static final String CRON = "cron";
|
||||
private static final String PAGE_SIZE = "page_size";
|
||||
private static final String INDEX_PATTERN = "index_pattern";
|
||||
private static final String ROLLUP_INDEX = "rollup_index";
|
||||
|
||||
private final String id;
|
||||
private final String indexPattern;
|
||||
private final String rollupIndex;
|
||||
private final GroupConfig groupConfig;
|
||||
private final List<MetricConfig> metricsConfig;
|
||||
private final TimeValue timeout;
|
||||
private final String cron;
|
||||
private final int pageSize;
|
||||
|
||||
private static final ConstructingObjectParser<RollupJobConfig, String> PARSER;
|
||||
static {
|
||||
PARSER = new ConstructingObjectParser<>("rollup_job_config", true, (args, optionalId) -> {
|
||||
String id = args[0] != null ? (String) args[0] : optionalId;
|
||||
String indexPattern = (String) args[1];
|
||||
String rollupIndex = (String) args[2];
|
||||
GroupConfig groupConfig = (GroupConfig) args[3];
|
||||
@SuppressWarnings("unchecked")
|
||||
List<MetricConfig> metricsConfig = (List<MetricConfig>) args[4];
|
||||
TimeValue timeout = (TimeValue) args[5];
|
||||
String cron = (String) args[6];
|
||||
int pageSize = (int) args[7];
|
||||
return new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groupConfig, metricsConfig, timeout);
|
||||
});
|
||||
PARSER.declareString(optionalConstructorArg(), new ParseField(ID));
|
||||
PARSER.declareString(constructorArg(), new ParseField(INDEX_PATTERN));
|
||||
PARSER.declareString(constructorArg(), new ParseField(ROLLUP_INDEX));
|
||||
PARSER.declareObject(optionalConstructorArg(), (p, c) -> GroupConfig.fromXContent(p), new ParseField(GroupConfig.NAME));
|
||||
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> MetricConfig.fromXContent(p), new ParseField(MetricConfig.NAME));
|
||||
PARSER.declareField(optionalConstructorArg(), (p, c) -> TimeValue.parseTimeValue(p.textOrNull(), TIMEOUT),
|
||||
new ParseField(TIMEOUT), ObjectParser.ValueType.STRING_OR_NULL);
|
||||
PARSER.declareString(constructorArg(), new ParseField(CRON));
|
||||
PARSER.declareInt(constructorArg(), new ParseField(PAGE_SIZE));
|
||||
}
|
||||
|
||||
public RollupJobConfig(final String id,
|
||||
final String indexPattern,
|
||||
final String rollupIndex,
|
||||
final String cron,
|
||||
final int pageSize,
|
||||
final GroupConfig groupConfig,
|
||||
final List<MetricConfig> metricsConfig,
|
||||
final @Nullable TimeValue timeout) {
|
||||
this.id = id;
|
||||
this.indexPattern = indexPattern;
|
||||
this.rollupIndex = rollupIndex;
|
||||
this.groupConfig = groupConfig;
|
||||
this.metricsConfig = metricsConfig != null ? metricsConfig : Collections.emptyList();
|
||||
this.timeout = timeout != null ? timeout : DEFAULT_TIMEOUT;
|
||||
this.cron = cron;
|
||||
this.pageSize = pageSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ValidationException> validate() {
|
||||
final ValidationException validationException = new ValidationException();
|
||||
if (id == null || id.isEmpty()) {
|
||||
validationException.addValidationError("Id must be a non-null, non-empty string");
|
||||
}
|
||||
if (indexPattern == null || indexPattern.isEmpty()) {
|
||||
validationException.addValidationError("Index pattern must be a non-null, non-empty string");
|
||||
} else if (Regex.isMatchAllPattern(indexPattern)) {
|
||||
validationException.addValidationError("Index pattern must not match all indices (as it would match it's own rollup index");
|
||||
} else if (indexPattern != null && indexPattern.equals(rollupIndex)) {
|
||||
validationException.addValidationError("Rollup index may not be the same as the index pattern");
|
||||
} else if (Regex.isSimpleMatchPattern(indexPattern) && Regex.simpleMatch(indexPattern, rollupIndex)) {
|
||||
validationException.addValidationError("Index pattern would match rollup index name which is not allowed");
|
||||
}
|
||||
|
||||
if (rollupIndex == null || rollupIndex.isEmpty()) {
|
||||
validationException.addValidationError("Rollup index must be a non-null, non-empty string");
|
||||
}
|
||||
if (cron == null || cron.isEmpty()) {
|
||||
validationException.addValidationError("Cron schedule must be a non-null, non-empty string");
|
||||
}
|
||||
if (pageSize <= 0) {
|
||||
validationException.addValidationError("Page size is mandatory and must be a positive long");
|
||||
}
|
||||
if (groupConfig == null && (metricsConfig == null || metricsConfig.isEmpty())) {
|
||||
validationException.addValidationError("At least one grouping or metric must be configured");
|
||||
}
|
||||
if (groupConfig != null) {
|
||||
final Optional<ValidationException> groupValidationErrors = groupConfig.validate();
|
||||
if (groupValidationErrors != null && groupValidationErrors.isPresent()) {
|
||||
validationException.addValidationErrors(groupValidationErrors.get());
|
||||
}
|
||||
}
|
||||
if (metricsConfig != null) {
|
||||
for (MetricConfig metricConfig : metricsConfig) {
|
||||
final Optional<ValidationException> metricsValidationErrors = metricConfig.validate();
|
||||
if (metricsValidationErrors != null && metricsValidationErrors.isPresent()) {
|
||||
validationException.addValidationErrors(metricsValidationErrors.get());
|
||||
}
|
||||
}
|
||||
}
|
||||
if (validationException.validationErrors().isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
return Optional.of(validationException);
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public GroupConfig getGroupConfig() {
|
||||
return groupConfig;
|
||||
}
|
||||
|
||||
public List<MetricConfig> getMetricsConfig() {
|
||||
return metricsConfig;
|
||||
}
|
||||
|
||||
public TimeValue getTimeout() {
|
||||
return timeout;
|
||||
}
|
||||
|
||||
public String getIndexPattern() {
|
||||
return indexPattern;
|
||||
}
|
||||
|
||||
public String getRollupIndex() {
|
||||
return rollupIndex;
|
||||
}
|
||||
|
||||
public String getCron() {
|
||||
return cron;
|
||||
}
|
||||
|
||||
public int getPageSize() {
|
||||
return pageSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field(ID, id);
|
||||
builder.field(INDEX_PATTERN, indexPattern);
|
||||
builder.field(ROLLUP_INDEX, rollupIndex);
|
||||
builder.field(CRON, cron);
|
||||
if (groupConfig != null) {
|
||||
builder.field(GroupConfig.NAME, groupConfig);
|
||||
}
|
||||
if (metricsConfig != null) {
|
||||
builder.startArray(MetricConfig.NAME);
|
||||
for (MetricConfig metric : metricsConfig) {
|
||||
metric.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
if (timeout != null) {
|
||||
builder.field(TIMEOUT, timeout.getStringRep());
|
||||
}
|
||||
builder.field(PAGE_SIZE, pageSize);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final RollupJobConfig that = (RollupJobConfig) other;
|
||||
return Objects.equals(this.id, that.id)
|
||||
&& Objects.equals(this.indexPattern, that.indexPattern)
|
||||
&& Objects.equals(this.rollupIndex, that.rollupIndex)
|
||||
&& Objects.equals(this.cron, that.cron)
|
||||
&& Objects.equals(this.groupConfig, that.groupConfig)
|
||||
&& Objects.equals(this.metricsConfig, that.metricsConfig)
|
||||
&& Objects.equals(this.timeout, that.timeout)
|
||||
&& Objects.equals(this.pageSize, that.pageSize);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(id, indexPattern, rollupIndex, cron, groupConfig, metricsConfig, timeout, pageSize);
|
||||
}
|
||||
|
||||
public static RollupJobConfig fromXContent(final XContentParser parser, @Nullable final String optionalJobId) throws IOException {
|
||||
return PARSER.parse(parser, optionalJobId);
|
||||
}
|
||||
}
|
|
@ -1,115 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup.job.config;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
/**
|
||||
* The configuration object for the histograms in the rollup config
|
||||
*
|
||||
* {
|
||||
* "groups": [
|
||||
* "terms": {
|
||||
* "fields" : [ "foo", "bar" ]
|
||||
* }
|
||||
* ]
|
||||
* }
|
||||
*/
|
||||
public class TermsGroupConfig implements Validatable, ToXContentObject {
|
||||
|
||||
static final String NAME = "terms";
|
||||
private static final String FIELDS = "fields";
|
||||
|
||||
private static final ConstructingObjectParser<TermsGroupConfig, Void> PARSER;
|
||||
static {
|
||||
PARSER = new ConstructingObjectParser<>(NAME, true, args -> {
|
||||
@SuppressWarnings("unchecked") List<String> fields = (List<String>) args[0];
|
||||
return new TermsGroupConfig(fields != null ? fields.toArray(new String[fields.size()]) : null);
|
||||
});
|
||||
PARSER.declareStringArray(constructorArg(), new ParseField(FIELDS));
|
||||
}
|
||||
|
||||
private final String[] fields;
|
||||
|
||||
public TermsGroupConfig(final String... fields) {
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ValidationException> validate() {
|
||||
final ValidationException validationException = new ValidationException();
|
||||
if (fields == null || fields.length == 0) {
|
||||
validationException.addValidationError("Fields must have at least one value");
|
||||
}
|
||||
if (validationException.validationErrors().isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
return Optional.of(validationException);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the names of the fields. Never {@code null}.
|
||||
*/
|
||||
public String[] getFields() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field(FIELDS, fields);
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final TermsGroupConfig that = (TermsGroupConfig) other;
|
||||
return Arrays.equals(fields, that.fields);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Arrays.hashCode(fields);
|
||||
}
|
||||
|
||||
public static TermsGroupConfig fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
}
|
|
@ -910,7 +910,6 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||
if (apiName.startsWith("xpack.") == false &&
|
||||
apiName.startsWith("license.") == false &&
|
||||
apiName.startsWith("machine_learning.") == false &&
|
||||
apiName.startsWith("rollup.") == false &&
|
||||
apiName.startsWith("watcher.") == false &&
|
||||
apiName.startsWith("graph.") == false &&
|
||||
apiName.startsWith("migration.") == false &&
|
||||
|
|
|
@ -1,507 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
||||
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.client.core.IndexerState;
|
||||
import org.elasticsearch.client.core.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.rollup.DeleteRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupCapsRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupCapsResponse;
|
||||
import org.elasticsearch.client.rollup.GetRollupIndexCapsRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupIndexCapsResponse;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.RollableIndexCaps;
|
||||
import org.elasticsearch.client.rollup.RollupJobCaps;
|
||||
import org.elasticsearch.client.rollup.StopRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StopRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.GroupConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.MetricConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.either;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
|
||||
public class RollupIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
double sum = 0.0d;
|
||||
int max = Integer.MIN_VALUE;
|
||||
int min = Integer.MAX_VALUE;
|
||||
private static final List<String> SUPPORTED_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, MinAggregationBuilder.NAME,
|
||||
SumAggregationBuilder.NAME, AvgAggregationBuilder.NAME, ValueCountAggregationBuilder.NAME);
|
||||
|
||||
private String id;
|
||||
private String indexPattern;
|
||||
private String rollupIndex;
|
||||
private String cron;
|
||||
private int pageSize;
|
||||
private int numDocs;
|
||||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
id = randomAlphaOfLength(10);
|
||||
indexPattern = randomFrom("docs", "d*", "doc*");
|
||||
rollupIndex = randomFrom("rollup", "test");
|
||||
cron = "*/1 * * * * ?";
|
||||
numDocs = indexDocs();
|
||||
pageSize = randomIntBetween(numDocs, numDocs * 10);
|
||||
}
|
||||
|
||||
public int indexDocs() throws Exception {
|
||||
final BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
for (int minute = 0; minute < 60; minute++) {
|
||||
for (int second = 0; second < 60; second = second + 10) {
|
||||
final int value = randomIntBetween(0, 100);
|
||||
|
||||
final IndexRequest indexRequest = new IndexRequest("docs");
|
||||
indexRequest.source(jsonBuilder()
|
||||
.startObject()
|
||||
.field("value", value)
|
||||
.field("date", String.format(Locale.ROOT, "2018-01-01T00:%02d:%02dZ", minute, second))
|
||||
.endObject());
|
||||
bulkRequest.add(indexRequest);
|
||||
|
||||
sum += value;
|
||||
if (value > max) {
|
||||
max = value;
|
||||
}
|
||||
if (value < min) {
|
||||
min = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final int numDocs = bulkRequest.numberOfActions();
|
||||
|
||||
BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
|
||||
assertEquals(RestStatus.OK, bulkResponse.status());
|
||||
if (bulkResponse.hasFailures()) {
|
||||
for (BulkItemResponse itemResponse : bulkResponse.getItems()) {
|
||||
if (itemResponse.isFailed()) {
|
||||
logger.fatal(itemResponse.getFailureMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
assertFalse(bulkResponse.hasFailures());
|
||||
|
||||
RefreshResponse refreshResponse = highLevelClient().indices().refresh(new RefreshRequest("docs"), RequestOptions.DEFAULT);
|
||||
assertEquals(0, refreshResponse.getFailedShards());
|
||||
return numDocs;
|
||||
}
|
||||
|
||||
|
||||
public void testDeleteRollupJob() throws Exception {
|
||||
final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("date", DateHistogramInterval.DAY));
|
||||
final List<MetricConfig> metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS));
|
||||
final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(30, 600));
|
||||
PutRollupJobRequest putRollupJobRequest =
|
||||
new PutRollupJobRequest(new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout));
|
||||
final RollupClient rollupClient = highLevelClient().rollup();
|
||||
execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
|
||||
DeleteRollupJobRequest deleteRollupJobRequest = new DeleteRollupJobRequest(id);
|
||||
AcknowledgedResponse deleteRollupJobResponse = highLevelClient().rollup()
|
||||
.deleteRollupJob(deleteRollupJobRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(deleteRollupJobResponse.isAcknowledged());
|
||||
}
|
||||
|
||||
public void testDeleteMissingRollupJob() {
|
||||
DeleteRollupJobRequest deleteRollupJobRequest = new DeleteRollupJobRequest(randomAlphaOfLength(10));
|
||||
ElasticsearchStatusException responseException = expectThrows(ElasticsearchStatusException.class,() -> highLevelClient().rollup()
|
||||
.deleteRollupJob(deleteRollupJobRequest, RequestOptions.DEFAULT));
|
||||
assertThat(responseException.status().getStatus(), is(404));
|
||||
}
|
||||
|
||||
public void testPutStartAndGetRollupJob() throws Exception {
|
||||
// TODO expand this to also test with histogram and terms?
|
||||
final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("date", DateHistogramInterval.DAY));
|
||||
final List<MetricConfig> metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS));
|
||||
final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(30, 600));
|
||||
|
||||
PutRollupJobRequest putRollupJobRequest =
|
||||
new PutRollupJobRequest(new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout));
|
||||
|
||||
final RollupClient rollupClient = highLevelClient().rollup();
|
||||
AcknowledgedResponse response = execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
|
||||
assertTrue(response.isAcknowledged());
|
||||
|
||||
StartRollupJobRequest startRequest = new StartRollupJobRequest(id);
|
||||
StartRollupJobResponse startResponse = execute(startRequest, rollupClient::startRollupJob, rollupClient::startRollupJobAsync);
|
||||
assertTrue(startResponse.isAcknowledged());
|
||||
|
||||
assertBusy(() -> {
|
||||
SearchResponse searchResponse = highLevelClient().search(new SearchRequest(rollupIndex), RequestOptions.DEFAULT);
|
||||
assertEquals(0, searchResponse.getFailedShards());
|
||||
assertEquals(1L, searchResponse.getHits().getTotalHits().value);
|
||||
|
||||
SearchHit searchHit = searchResponse.getHits().getAt(0);
|
||||
Map<String, Object> source = searchHit.getSourceAsMap();
|
||||
assertNotNull(source);
|
||||
|
||||
assertEquals(numDocs, source.get("date.date_histogram._count"));
|
||||
assertEquals(groups.getDateHistogram().getInterval().toString(), source.get("date.date_histogram.interval"));
|
||||
assertEquals(groups.getDateHistogram().getTimeZone(), source.get("date.date_histogram.time_zone"));
|
||||
|
||||
for (MetricConfig metric : metrics) {
|
||||
for (String name : metric.getMetrics()) {
|
||||
Number value = (Number) source.get(metric.getField() + "." + name + ".value");
|
||||
if ("min".equals(name)) {
|
||||
assertEquals(min, value.intValue());
|
||||
} else if ("max".equals(name)) {
|
||||
assertEquals(max, value.intValue());
|
||||
} else if ("sum".equals(name)) {
|
||||
assertEquals(sum, value.doubleValue(), 0.0d);
|
||||
} else if ("avg".equals(name)) {
|
||||
assertEquals(sum, value.doubleValue(), 0.0d);
|
||||
Number avgCount = (Number) source.get(metric.getField() + "." + name + "._count");
|
||||
assertEquals(numDocs, avgCount.intValue());
|
||||
} else if ("value_count".equals(name)) {
|
||||
assertEquals(numDocs, value.intValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
GetRollupJobRequest getRollupJobRequest = randomBoolean() ? new GetRollupJobRequest() : new GetRollupJobRequest(id);
|
||||
GetRollupJobResponse getResponse = execute(getRollupJobRequest, rollupClient::getRollupJob, rollupClient::getRollupJobAsync);
|
||||
assertThat(getResponse.getJobs(), hasSize(1));
|
||||
JobWrapper job = getResponse.getJobs().get(0);
|
||||
assertEquals(putRollupJobRequest.getConfig(), job.getJob());
|
||||
assertThat(job.getStats().getNumPages(), lessThan(10L));
|
||||
assertEquals(numDocs, job.getStats().getNumDocuments());
|
||||
assertThat(job.getStats().getNumInvocations(), greaterThan(0L));
|
||||
assertEquals(1, job.getStats().getOutputDocuments());
|
||||
assertThat(job.getStatus().getState(), either(equalTo(IndexerState.STARTED)).or(equalTo(IndexerState.INDEXING)));
|
||||
assertThat(job.getStatus().getCurrentPosition(), hasKey("date.date_histogram"));
|
||||
assertEquals(true, job.getStatus().getUpgradedDocumentId());
|
||||
|
||||
// stop the job
|
||||
StopRollupJobRequest stopRequest = new StopRollupJobRequest(id);
|
||||
stopRequest.waitForCompletion(randomBoolean());
|
||||
StopRollupJobResponse stopResponse = execute(stopRequest, rollupClient::stopRollupJob, rollupClient::stopRollupJobAsync);
|
||||
assertTrue(stopResponse.isAcknowledged());
|
||||
if (stopRequest.waitForCompletion()) {
|
||||
getResponse = execute(new GetRollupJobRequest(id), rollupClient::getRollupJob, rollupClient::getRollupJobAsync);
|
||||
assertThat(getResponse.getJobs(), hasSize(1));
|
||||
assertThat(getResponse.getJobs().get(0).getStatus().getState(), equalTo(IndexerState.STOPPED));
|
||||
}
|
||||
}
|
||||
|
||||
public void testSearch() throws Exception {
|
||||
testPutStartAndGetRollupJob();
|
||||
SearchRequest search = new SearchRequest(rollupIndex);
|
||||
search.source(new SearchSourceBuilder()
|
||||
.size(0)
|
||||
.aggregation(new AvgAggregationBuilder("avg").field("value")));
|
||||
SearchResponse response = highLevelClient().rollup().search(search, RequestOptions.DEFAULT);
|
||||
assertEquals(0, response.getFailedShards());
|
||||
assertEquals(0, response.getHits().getTotalHits().value);
|
||||
NumericMetricsAggregation.SingleValue avg = response.getAggregations().get("avg");
|
||||
assertThat(avg.value(), closeTo(sum / numDocs, 0.00000001));
|
||||
}
|
||||
|
||||
public void testSearchWithType() throws Exception {
|
||||
SearchRequest search = new SearchRequest(rollupIndex);
|
||||
search.types("a", "b", "c");
|
||||
search.source(new SearchSourceBuilder()
|
||||
.size(0)
|
||||
.aggregation(new AvgAggregationBuilder("avg").field("value")));
|
||||
try {
|
||||
highLevelClient().rollup().search(search, RequestOptions.DEFAULT);
|
||||
fail("types are not allowed but didn't fail");
|
||||
} catch (ValidationException e) {
|
||||
assertEquals("Validation Failed: 1: types are not allowed in rollup search;", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetMissingRollupJob() throws Exception {
|
||||
GetRollupJobRequest getRollupJobRequest = new GetRollupJobRequest("missing");
|
||||
RollupClient rollupClient = highLevelClient().rollup();
|
||||
GetRollupJobResponse getResponse = execute(getRollupJobRequest, rollupClient::getRollupJob, rollupClient::getRollupJobAsync);
|
||||
assertThat(getResponse.getJobs(), empty());
|
||||
}
|
||||
|
||||
public void testGetRollupCaps() throws Exception {
|
||||
final Set<Integer> values = new HashSet<>();
|
||||
double sum = 0.0d;
|
||||
int max = Integer.MIN_VALUE;
|
||||
int min = Integer.MAX_VALUE;
|
||||
|
||||
final BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
for (int minute = 0; minute < 60; minute++) {
|
||||
for (int second = 0; second < 60; second = second + 10) {
|
||||
final int value = randomIntBetween(0, 100);
|
||||
|
||||
final IndexRequest indexRequest = new IndexRequest("docs");
|
||||
indexRequest.source(jsonBuilder()
|
||||
.startObject()
|
||||
.field("value", value)
|
||||
.field("date", String.format(Locale.ROOT, "2018-01-01T00:%02d:%02dZ", minute, second))
|
||||
.endObject());
|
||||
bulkRequest.add(indexRequest);
|
||||
|
||||
values.add(value);
|
||||
sum += value;
|
||||
if (value > max) {
|
||||
max = value;
|
||||
}
|
||||
if (value < min) {
|
||||
min = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final int numDocs = bulkRequest.numberOfActions();
|
||||
|
||||
BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
|
||||
assertEquals(RestStatus.OK, bulkResponse.status());
|
||||
if (bulkResponse.hasFailures()) {
|
||||
for (BulkItemResponse itemResponse : bulkResponse.getItems()) {
|
||||
if (itemResponse.isFailed()) {
|
||||
logger.fatal(itemResponse.getFailureMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
assertFalse(bulkResponse.hasFailures());
|
||||
|
||||
RefreshResponse refreshResponse = highLevelClient().indices().refresh(new RefreshRequest("docs"), RequestOptions.DEFAULT);
|
||||
assertEquals(0, refreshResponse.getFailedShards());
|
||||
|
||||
final String id = randomAlphaOfLength(10);
|
||||
final String indexPattern = randomFrom("docs", "d*", "doc*");
|
||||
final String rollupIndex = randomFrom("rollup", "test");
|
||||
final String cron = "*/1 * * * * ?";
|
||||
final int pageSize = randomIntBetween(numDocs, numDocs * 10);
|
||||
// TODO expand this to also test with histogram and terms?
|
||||
final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("date", DateHistogramInterval.DAY));
|
||||
final List<MetricConfig> metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS));
|
||||
final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(30, 600));
|
||||
|
||||
PutRollupJobRequest putRollupJobRequest =
|
||||
new PutRollupJobRequest(new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout));
|
||||
|
||||
final RollupClient rollupClient = highLevelClient().rollup();
|
||||
AcknowledgedResponse response = execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
|
||||
assertTrue(response.isAcknowledged());
|
||||
|
||||
// wait for the PutJob api to create the index w/ metadata
|
||||
highLevelClient().cluster().health(new ClusterHealthRequest(rollupIndex).waitForYellowStatus(), RequestOptions.DEFAULT);
|
||||
|
||||
GetRollupCapsRequest getRollupCapsRequest = new GetRollupCapsRequest(indexPattern);
|
||||
GetRollupCapsResponse capsResponse = highLevelClient().rollup()
|
||||
.getRollupCapabilities(getRollupCapsRequest, RequestOptions.DEFAULT);
|
||||
|
||||
assertNotNull(capsResponse);
|
||||
Map<String, RollableIndexCaps> rolledPatterns = capsResponse.getJobs();
|
||||
assertThat(rolledPatterns.size(), equalTo(1));
|
||||
|
||||
RollableIndexCaps docsPattern = rolledPatterns.get(indexPattern);
|
||||
assertThat(docsPattern.getIndexName(), equalTo(indexPattern));
|
||||
|
||||
List<RollupJobCaps> rollupJobs = docsPattern.getJobCaps();
|
||||
assertThat(rollupJobs.size(), equalTo(1));
|
||||
|
||||
RollupJobCaps jobCaps = rollupJobs.get(0);
|
||||
assertThat(jobCaps.getJobID(), equalTo(id));
|
||||
assertThat(jobCaps.getRollupIndex(), equalTo(rollupIndex));
|
||||
assertThat(jobCaps.getIndexPattern(), equalTo(indexPattern));
|
||||
|
||||
Map<String, RollupJobCaps.RollupFieldCaps> fieldCaps = jobCaps.getFieldCaps();
|
||||
|
||||
List<Map<String, Object>> timestampCaps = fieldCaps.get("date").getAggs();
|
||||
for (Map.Entry<String, Object> entry : timestampCaps.get(0).entrySet()) {
|
||||
switch (entry.getKey()) {
|
||||
case "agg":
|
||||
assertThat(entry.getValue(), equalTo("date_histogram"));
|
||||
break;
|
||||
case "delay":
|
||||
assertThat(entry.getValue(), equalTo("foo"));
|
||||
break;
|
||||
case "calendar_interval":
|
||||
assertThat(entry.getValue(), equalTo("1d"));
|
||||
break;
|
||||
case "time_zone":
|
||||
assertThat(entry.getValue(), equalTo("UTC"));
|
||||
break;
|
||||
default:
|
||||
fail("Unknown field cap: [" + entry.getKey() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
List<Map<String, Object>> valueCaps = fieldCaps.get("value").getAggs();
|
||||
assertThat(valueCaps.size(), equalTo(SUPPORTED_METRICS.size()));
|
||||
}
|
||||
|
||||
public void testGetRollupIndexCaps() throws Exception {
|
||||
final Set<Integer> values = new HashSet<>();
|
||||
double sum = 0.0d;
|
||||
int max = Integer.MIN_VALUE;
|
||||
int min = Integer.MAX_VALUE;
|
||||
|
||||
final BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
for (int minute = 0; minute < 60; minute++) {
|
||||
for (int second = 0; second < 60; second = second + 10) {
|
||||
final int value = randomIntBetween(0, 100);
|
||||
|
||||
final IndexRequest indexRequest = new IndexRequest("docs");
|
||||
indexRequest.source(jsonBuilder()
|
||||
.startObject()
|
||||
.field("value", value)
|
||||
.field("date", String.format(Locale.ROOT, "2018-01-01T00:%02d:%02dZ", minute, second))
|
||||
.endObject());
|
||||
bulkRequest.add(indexRequest);
|
||||
|
||||
values.add(value);
|
||||
sum += value;
|
||||
if (value > max) {
|
||||
max = value;
|
||||
}
|
||||
if (value < min) {
|
||||
min = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final int numDocs = bulkRequest.numberOfActions();
|
||||
|
||||
BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
|
||||
assertEquals(RestStatus.OK, bulkResponse.status());
|
||||
if (bulkResponse.hasFailures()) {
|
||||
for (BulkItemResponse itemResponse : bulkResponse.getItems()) {
|
||||
if (itemResponse.isFailed()) {
|
||||
logger.fatal(itemResponse.getFailureMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
assertFalse(bulkResponse.hasFailures());
|
||||
|
||||
RefreshResponse refreshResponse = highLevelClient().indices().refresh(new RefreshRequest("docs"), RequestOptions.DEFAULT);
|
||||
assertEquals(0, refreshResponse.getFailedShards());
|
||||
|
||||
final String id = randomAlphaOfLength(10);
|
||||
final String indexPattern = randomFrom("docs", "d*", "doc*");
|
||||
final String rollupIndex = randomFrom("rollup", "test");
|
||||
final String cron = "*/1 * * * * ?";
|
||||
final int pageSize = randomIntBetween(numDocs, numDocs * 10);
|
||||
// TODO expand this to also test with histogram and terms?
|
||||
final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("date", DateHistogramInterval.DAY));
|
||||
final List<MetricConfig> metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS));
|
||||
final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(30, 600));
|
||||
|
||||
PutRollupJobRequest putRollupJobRequest =
|
||||
new PutRollupJobRequest(new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout));
|
||||
|
||||
final RollupClient rollupClient = highLevelClient().rollup();
|
||||
AcknowledgedResponse response = execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
|
||||
assertTrue(response.isAcknowledged());
|
||||
|
||||
// wait for the PutJob api to create the index w/ metadata
|
||||
highLevelClient().cluster().health(new ClusterHealthRequest(rollupIndex).waitForYellowStatus(), RequestOptions.DEFAULT);
|
||||
|
||||
GetRollupIndexCapsRequest getRollupIndexCapsRequest = new GetRollupIndexCapsRequest(rollupIndex);
|
||||
GetRollupIndexCapsResponse capsResponse = highLevelClient().rollup()
|
||||
.getRollupIndexCapabilities(getRollupIndexCapsRequest, RequestOptions.DEFAULT);
|
||||
|
||||
assertNotNull(capsResponse);
|
||||
Map<String, RollableIndexCaps> rolledPatterns = capsResponse.getJobs();
|
||||
assertThat(rolledPatterns.size(), equalTo(1));
|
||||
|
||||
RollableIndexCaps docsPattern = rolledPatterns.get(rollupIndex);
|
||||
assertThat(docsPattern.getIndexName(), equalTo(rollupIndex));
|
||||
|
||||
List<RollupJobCaps> rollupJobs = docsPattern.getJobCaps();
|
||||
assertThat(rollupJobs.size(), equalTo(1));
|
||||
|
||||
RollupJobCaps jobCaps = rollupJobs.get(0);
|
||||
assertThat(jobCaps.getJobID(), equalTo(id));
|
||||
assertThat(jobCaps.getRollupIndex(), equalTo(rollupIndex));
|
||||
assertThat(jobCaps.getIndexPattern(), equalTo(indexPattern));
|
||||
|
||||
Map<String, RollupJobCaps.RollupFieldCaps> fieldCaps = jobCaps.getFieldCaps();
|
||||
|
||||
List<Map<String, Object>> timestampCaps = fieldCaps.get("date").getAggs();
|
||||
for (Map.Entry<String, Object> entry : timestampCaps.get(0).entrySet()) {
|
||||
switch (entry.getKey()) {
|
||||
case "agg":
|
||||
assertThat(entry.getValue(), equalTo("date_histogram"));
|
||||
break;
|
||||
case "delay":
|
||||
assertThat(entry.getValue(), equalTo("foo"));
|
||||
break;
|
||||
case "calendar_interval":
|
||||
assertThat(entry.getValue(), equalTo("1d"));
|
||||
break;
|
||||
case "time_zone":
|
||||
assertThat(entry.getValue(), equalTo("UTC"));
|
||||
break;
|
||||
default:
|
||||
fail("Unknown field cap: [" + entry.getKey() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
List<Map<String, Object>> valueCaps = fieldCaps.get("value").getAggs();
|
||||
assertThat(valueCaps.size(), equalTo(SUPPORTED_METRICS.size()));
|
||||
}
|
||||
}
|
|
@ -1,103 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StopRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfigTests;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class RollupRequestConvertersTests extends ESTestCase {
|
||||
public void testPutJob() throws IOException {
|
||||
String job = randomAlphaOfLength(5);
|
||||
|
||||
RollupJobConfig config = RollupJobConfigTests.randomRollupJobConfig(job);
|
||||
PutRollupJobRequest put = new PutRollupJobRequest(config);
|
||||
|
||||
Request request = RollupRequestConverters.putJob(put);
|
||||
assertThat(request.getEndpoint(), equalTo("/_rollup/job/" + job));
|
||||
assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(request.getParameters().keySet(), empty());
|
||||
RequestConvertersTests.assertToXContentBody(put, request.getEntity());
|
||||
}
|
||||
|
||||
public void testStartJob() throws IOException {
|
||||
String jobId = randomAlphaOfLength(5);
|
||||
|
||||
StartRollupJobRequest startJob = new StartRollupJobRequest(jobId);
|
||||
|
||||
Request request = RollupRequestConverters.startJob(startJob);
|
||||
assertThat(request.getEndpoint(), equalTo("/_rollup/job/" + jobId + "/_start"));
|
||||
assertThat(HttpPost.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(request.getParameters().keySet(), empty());
|
||||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
public void testStopJob() throws IOException {
|
||||
String jobId = randomAlphaOfLength(5);
|
||||
StopRollupJobRequest stopJob = new StopRollupJobRequest(jobId);
|
||||
String expectedTimeOutString = null;
|
||||
String expectedWaitForCompletion = null;
|
||||
int expectedParameters = 0;
|
||||
if (randomBoolean()) {
|
||||
stopJob.timeout(TimeValue.parseTimeValue(randomPositiveTimeValue(), "timeout"));
|
||||
expectedTimeOutString = stopJob.timeout().getStringRep();
|
||||
expectedParameters++;
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stopJob.waitForCompletion(randomBoolean());
|
||||
expectedWaitForCompletion = stopJob.waitForCompletion().toString();
|
||||
expectedParameters++;
|
||||
}
|
||||
|
||||
Request request = RollupRequestConverters.stopJob(stopJob);
|
||||
assertThat(request.getEndpoint(), equalTo("/_rollup/job/" + jobId + "/_stop"));
|
||||
assertThat(HttpPost.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(request.getParameters().keySet().size(), equalTo(expectedParameters));
|
||||
assertThat(request.getParameters().get("timeout"), equalTo(expectedTimeOutString));
|
||||
assertThat(request.getParameters().get("wait_for_completion"), equalTo(expectedWaitForCompletion));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testGetJob() {
|
||||
boolean getAll = randomBoolean();
|
||||
String job = getAll ? "_all" : RequestConvertersTests.randomIndicesNames(1, 1)[0];
|
||||
GetRollupJobRequest get = getAll ? new GetRollupJobRequest() : new GetRollupJobRequest(job);
|
||||
|
||||
Request request = RollupRequestConverters.getJob(get);
|
||||
assertThat(request.getEndpoint(), equalTo("/_rollup/job/" + job));
|
||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(request.getParameters().keySet(), empty());
|
||||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
}
|
|
@ -46,14 +46,4 @@ public class AcknowledgedResponseTests extends AbstractResponseTestCase<org.elas
|
|||
assertThat(clientInstance.isAcknowledged(), is(serverTestInstance.isAcknowledged()));
|
||||
}
|
||||
|
||||
// Still needed for StopRollupJobResponseTests and StartRollupJobResponseTests test classes
|
||||
// This method can't be moved to these classes because getFieldName() method isn't accessible from these test classes.
|
||||
public static void toXContent(AcknowledgedResponse response, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field(response.getFieldName(), response.isAcknowledged());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,675 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.documentation;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.LatchedActionListener;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.RollupClient;
|
||||
import org.elasticsearch.client.core.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.rollup.DeleteRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupCapsRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupCapsResponse;
|
||||
import org.elasticsearch.client.rollup.GetRollupIndexCapsRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupIndexCapsResponse;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupIndexerJobStats;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupJobStatus;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.RollableIndexCaps;
|
||||
import org.elasticsearch.client.rollup.RollupJobCaps;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.StopRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StopRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.GroupConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.HistogramGroupConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.MetricConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.TermsGroupConfig;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.oneOf;
|
||||
|
||||
public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
@Before
|
||||
public void setUpDocs() throws IOException {
|
||||
final BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
for (int i = 0; i < 50; i++) {
|
||||
final IndexRequest indexRequest = new IndexRequest("docs");
|
||||
indexRequest.source(jsonBuilder()
|
||||
.startObject()
|
||||
.field("timestamp", String.format(Locale.ROOT, "2018-01-01T00:%02d:00Z", i))
|
||||
.field("hostname", 0)
|
||||
.field("datacenter", 0)
|
||||
.field("temperature", i)
|
||||
.field("voltage", 0)
|
||||
.field("load", 0)
|
||||
.field("net_in", 0)
|
||||
.field("net_out", 0)
|
||||
.endObject());
|
||||
bulkRequest.add(indexRequest);
|
||||
}
|
||||
BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
|
||||
assertEquals(RestStatus.OK, bulkResponse.status());
|
||||
assertFalse(bulkResponse.hasFailures());
|
||||
|
||||
RefreshResponse refreshResponse = highLevelClient().indices().refresh(new RefreshRequest("docs"), RequestOptions.DEFAULT);
|
||||
assertEquals(0, refreshResponse.getFailedShards());
|
||||
}
|
||||
|
||||
public void testCreateRollupJob() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
final String indexPattern = "docs";
|
||||
final String rollupIndex = "rollup";
|
||||
final String cron = "*/1 * * * * ?";
|
||||
final int pageSize = 100;
|
||||
final TimeValue timeout = null;
|
||||
|
||||
//tag::x-pack-rollup-put-rollup-job-group-config
|
||||
DateHistogramGroupConfig dateHistogram =
|
||||
new DateHistogramGroupConfig("timestamp", DateHistogramInterval.HOUR, new DateHistogramInterval("7d"), "UTC"); // <1>
|
||||
TermsGroupConfig terms = new TermsGroupConfig("hostname", "datacenter"); // <2>
|
||||
HistogramGroupConfig histogram = new HistogramGroupConfig(5L, "load", "net_in", "net_out"); // <3>
|
||||
|
||||
GroupConfig groups = new GroupConfig(dateHistogram, histogram, terms); // <4>
|
||||
//end::x-pack-rollup-put-rollup-job-group-config
|
||||
|
||||
//tag::x-pack-rollup-put-rollup-job-metrics-config
|
||||
List<MetricConfig> metrics = new ArrayList<>(); // <1>
|
||||
metrics.add(new MetricConfig("temperature", Arrays.asList("min", "max", "sum"))); // <2>
|
||||
metrics.add(new MetricConfig("voltage", Arrays.asList("avg", "value_count"))); // <3>
|
||||
//end::x-pack-rollup-put-rollup-job-metrics-config
|
||||
{
|
||||
String id = "job_1";
|
||||
|
||||
//tag::x-pack-rollup-put-rollup-job-config
|
||||
RollupJobConfig config = new RollupJobConfig(id, // <1>
|
||||
indexPattern, // <2>
|
||||
rollupIndex, // <3>
|
||||
cron, // <4>
|
||||
pageSize, // <5>
|
||||
groups, // <6>
|
||||
metrics, // <7>
|
||||
timeout); // <8>
|
||||
//end::x-pack-rollup-put-rollup-job-config
|
||||
|
||||
//tag::x-pack-rollup-put-rollup-job-request
|
||||
PutRollupJobRequest request = new PutRollupJobRequest(config); // <1>
|
||||
//end::x-pack-rollup-put-rollup-job-request
|
||||
|
||||
//tag::x-pack-rollup-put-rollup-job-execute
|
||||
AcknowledgedResponse response = client.rollup().putRollupJob(request, RequestOptions.DEFAULT);
|
||||
//end::x-pack-rollup-put-rollup-job-execute
|
||||
|
||||
//tag::x-pack-rollup-put-rollup-job-response
|
||||
boolean acknowledged = response.isAcknowledged(); // <1>
|
||||
//end::x-pack-rollup-put-rollup-job-response
|
||||
assertTrue(acknowledged);
|
||||
}
|
||||
{
|
||||
String id = "job_2";
|
||||
RollupJobConfig config = new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout);
|
||||
PutRollupJobRequest request = new PutRollupJobRequest(config);
|
||||
// tag::x-pack-rollup-put-rollup-job-execute-listener
|
||||
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
|
||||
@Override
|
||||
public void onResponse(AcknowledgedResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::x-pack-rollup-put-rollup-job-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::x-pack-rollup-put-rollup-job-execute-async
|
||||
client.rollup().putRollupJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::x-pack-rollup-put-rollup-job-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testGetRollupJob() throws Exception {
|
||||
testCreateRollupJob();
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
||||
// tag::x-pack-rollup-get-rollup-job-request
|
||||
GetRollupJobRequest getAll = new GetRollupJobRequest(); // <1>
|
||||
GetRollupJobRequest getJob = new GetRollupJobRequest("job_1"); // <2>
|
||||
// end::x-pack-rollup-get-rollup-job-request
|
||||
|
||||
// tag::x-pack-rollup-get-rollup-job-execute
|
||||
GetRollupJobResponse response = client.rollup().getRollupJob(getJob, RequestOptions.DEFAULT);
|
||||
// end::x-pack-rollup-get-rollup-job-execute
|
||||
|
||||
// tag::x-pack-rollup-get-rollup-job-response
|
||||
assertThat(response.getJobs(), hasSize(1));
|
||||
JobWrapper job = response.getJobs().get(0); // <1>
|
||||
RollupJobConfig config = job.getJob();
|
||||
RollupJobStatus status = job.getStatus();
|
||||
RollupIndexerJobStats stats = job.getStats();
|
||||
// end::x-pack-rollup-get-rollup-job-response
|
||||
assertNotNull(config);
|
||||
assertNotNull(status);
|
||||
assertNotNull(status);
|
||||
|
||||
// tag::x-pack-rollup-get-rollup-job-execute-listener
|
||||
ActionListener<GetRollupJobResponse> listener = new ActionListener<GetRollupJobResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetRollupJobResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::x-pack-rollup-get-rollup-job-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::x-pack-rollup-get-rollup-job-execute-async
|
||||
client.rollup().getRollupJobAsync(getJob, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::x-pack-rollup-get-rollup-job-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testStartRollupJob() throws Exception {
|
||||
testCreateRollupJob();
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
String id = "job_1";
|
||||
// tag::rollup-start-job-request
|
||||
StartRollupJobRequest request = new StartRollupJobRequest(id); // <1>
|
||||
// end::rollup-start-job-request
|
||||
try {
|
||||
// tag::rollup-start-job-execute
|
||||
RollupClient rc = client.rollup();
|
||||
StartRollupJobResponse response = rc.startRollupJob(request, RequestOptions.DEFAULT);
|
||||
// end::rollup-start-job-execute
|
||||
// tag::rollup-start-job-response
|
||||
response.isAcknowledged(); // <1>
|
||||
// end::rollup-start-job-response
|
||||
} catch (Exception e) {
|
||||
// Swallow any exception, this test does not test actually cancelling.
|
||||
}
|
||||
// stop job to prevent spamming exceptions on next start request
|
||||
StopRollupJobRequest stopRequest = new StopRollupJobRequest(id);
|
||||
stopRequest.waitForCompletion();
|
||||
stopRequest.timeout(TimeValue.timeValueSeconds(10));
|
||||
|
||||
StopRollupJobResponse response = client.rollup().stopRollupJob(stopRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(response.isAcknowledged());
|
||||
|
||||
// tag::rollup-start-job-execute-listener
|
||||
ActionListener<StartRollupJobResponse> listener = new ActionListener<StartRollupJobResponse>() {
|
||||
@Override
|
||||
public void onResponse(StartRollupJobResponse response) {
|
||||
// <1>
|
||||
}
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::rollup-start-job-execute-listener
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
// tag::rollup-start-job-execute-async
|
||||
RollupClient rc = client.rollup();
|
||||
rc.startRollupJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::rollup-start-job-execute-async
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
|
||||
// stop job so it can correctly be deleted by the test teardown
|
||||
response = rc.stopRollupJob(stopRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(response.isAcknowledged());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testStopRollupJob() throws Exception {
|
||||
testCreateRollupJob();
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
String id = "job_1";
|
||||
// tag::rollup-stop-job-request
|
||||
StopRollupJobRequest request = new StopRollupJobRequest(id); // <1>
|
||||
request.waitForCompletion(true); // <2>
|
||||
request.timeout(TimeValue.timeValueSeconds(10)); // <3>
|
||||
// end::rollup-stop-job-request
|
||||
|
||||
|
||||
try {
|
||||
// tag::rollup-stop-job-execute
|
||||
RollupClient rc = client.rollup();
|
||||
StopRollupJobResponse response = rc.stopRollupJob(request, RequestOptions.DEFAULT);
|
||||
// end::rollup-stop-job-execute
|
||||
|
||||
// tag::rollup-stop-job-response
|
||||
response.isAcknowledged(); // <1>
|
||||
// end::rollup-stop-job-response
|
||||
} catch (Exception e) {
|
||||
// Swallow any exception, this test does not test actually cancelling.
|
||||
}
|
||||
|
||||
// tag::rollup-stop-job-execute-listener
|
||||
ActionListener<StopRollupJobResponse> listener = new ActionListener<StopRollupJobResponse>() {
|
||||
@Override
|
||||
public void onResponse(StopRollupJobResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::rollup-stop-job-execute-listener
|
||||
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::rollup-stop-job-execute-async
|
||||
RollupClient rc = client.rollup();
|
||||
rc.stopRollupJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::rollup-stop-job-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testSearch() throws Exception {
|
||||
// Setup a rollup index to query
|
||||
testCreateRollupJob();
|
||||
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
// tag::search-request
|
||||
SearchRequest request = new SearchRequest();
|
||||
request.source(new SearchSourceBuilder()
|
||||
.size(0)
|
||||
.aggregation(new MaxAggregationBuilder("max_temperature")
|
||||
.field("temperature")));
|
||||
// end::search-request
|
||||
|
||||
// tag::search-execute
|
||||
SearchResponse response =
|
||||
client.rollup().search(request, RequestOptions.DEFAULT);
|
||||
// end::search-execute
|
||||
|
||||
// tag::search-response
|
||||
NumericMetricsAggregation.SingleValue maxTemperature =
|
||||
response.getAggregations().get("max_temperature");
|
||||
assertThat(maxTemperature.value(), closeTo(49.0, .00001));
|
||||
// end::search-response
|
||||
|
||||
ActionListener<SearchResponse> listener;
|
||||
// tag::search-execute-listener
|
||||
listener = new ActionListener<SearchResponse>() {
|
||||
@Override
|
||||
public void onResponse(SearchResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::search-execute-listener
|
||||
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::search-execute-async
|
||||
client.rollup().searchAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::search-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testGetRollupCaps() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
DateHistogramGroupConfig dateHistogram = new DateHistogramGroupConfig.FixedInterval(
|
||||
"timestamp", DateHistogramInterval.HOUR, new DateHistogramInterval("7d"), "UTC"); // <1>
|
||||
TermsGroupConfig terms = new TermsGroupConfig("hostname", "datacenter");
|
||||
HistogramGroupConfig histogram = new HistogramGroupConfig(5L, "load", "net_in", "net_out");
|
||||
GroupConfig groups = new GroupConfig(dateHistogram, histogram, terms);
|
||||
List<MetricConfig> metrics = new ArrayList<>(); // <1>
|
||||
metrics.add(new MetricConfig("temperature", Arrays.asList("min", "max", "sum")));
|
||||
metrics.add(new MetricConfig("voltage", Arrays.asList("avg", "value_count")));
|
||||
|
||||
//tag::x-pack-rollup-get-rollup-caps-setup
|
||||
final String indexPattern = "docs";
|
||||
final String rollupIndexName = "rollup";
|
||||
final String cron = "*/1 * * * * ?";
|
||||
final int pageSize = 100;
|
||||
final TimeValue timeout = null;
|
||||
|
||||
String id = "job_1";
|
||||
RollupJobConfig config = new RollupJobConfig(id, indexPattern, rollupIndexName, cron,
|
||||
pageSize, groups, metrics, timeout);
|
||||
|
||||
PutRollupJobRequest request = new PutRollupJobRequest(config);
|
||||
AcknowledgedResponse response = client.rollup().putRollupJob(request, RequestOptions.DEFAULT);
|
||||
|
||||
boolean acknowledged = response.isAcknowledged();
|
||||
//end::x-pack-rollup-get-rollup-caps-setup
|
||||
assertTrue(acknowledged);
|
||||
|
||||
ClusterHealthRequest healthRequest = new ClusterHealthRequest(config.getRollupIndex()).waitForYellowStatus();
|
||||
ClusterHealthResponse healthResponse = client.cluster().health(healthRequest, RequestOptions.DEFAULT);
|
||||
assertFalse(healthResponse.isTimedOut());
|
||||
assertThat(healthResponse.getStatus(), oneOf(ClusterHealthStatus.YELLOW, ClusterHealthStatus.GREEN));
|
||||
|
||||
// Now that the job is created, we should have a rollup index with metadata.
|
||||
// We can test out the caps API now.
|
||||
|
||||
//tag::x-pack-rollup-get-rollup-caps-request
|
||||
GetRollupCapsRequest getRollupCapsRequest = new GetRollupCapsRequest("docs");
|
||||
//end::x-pack-rollup-get-rollup-caps-request
|
||||
|
||||
//tag::x-pack-rollup-get-rollup-caps-execute
|
||||
GetRollupCapsResponse capsResponse = client.rollup().getRollupCapabilities(getRollupCapsRequest, RequestOptions.DEFAULT);
|
||||
//end::x-pack-rollup-get-rollup-caps-execute
|
||||
|
||||
//tag::x-pack-rollup-get-rollup-caps-response
|
||||
Map<String, RollableIndexCaps> rolledPatterns = capsResponse.getJobs();
|
||||
|
||||
RollableIndexCaps docsPattern = rolledPatterns.get("docs");
|
||||
|
||||
// indexName will be "docs" in this case... the index pattern that we rolled up
|
||||
String indexName = docsPattern.getIndexName();
|
||||
|
||||
// Each index pattern can have multiple jobs that rolled it up, so `getJobCaps()`
|
||||
// returns a list of jobs that rolled up the pattern
|
||||
List<RollupJobCaps> rollupJobs = docsPattern.getJobCaps();
|
||||
RollupJobCaps jobCaps = rollupJobs.get(0);
|
||||
|
||||
// jobID is the identifier we used when we created the job (e.g. `job1`)
|
||||
String jobID = jobCaps.getJobID();
|
||||
|
||||
// rollupIndex is the location that the job stored it's rollup docs (e.g. `rollup`)
|
||||
String rollupIndex = jobCaps.getRollupIndex();
|
||||
|
||||
// indexPattern is the same as the indexName that we retrieved earlier, redundant info
|
||||
assert jobCaps.getIndexPattern().equals(indexName);
|
||||
|
||||
// Finally, fieldCaps are the capabilities of individual fields in the config
|
||||
// The key is the field name, and the value is a RollupFieldCaps object which
|
||||
// provides more info.
|
||||
Map<String, RollupJobCaps.RollupFieldCaps> fieldCaps = jobCaps.getFieldCaps();
|
||||
|
||||
// If we retrieve the "timestamp" field, it returns a list of maps. Each list
|
||||
// item represents a different aggregation that can be run against the "timestamp"
|
||||
// field, and any additional details specific to that agg (interval, etc)
|
||||
List<Map<String, Object>> timestampCaps = fieldCaps.get("timestamp").getAggs();
|
||||
logger.error(timestampCaps.get(0).toString());
|
||||
assert timestampCaps.get(0).toString().equals("{agg=date_histogram, fixed_interval=1h, delay=7d, time_zone=UTC}");
|
||||
|
||||
// In contrast to the timestamp field, the temperature field has multiple aggs configured
|
||||
List<Map<String, Object>> temperatureCaps = fieldCaps.get("temperature").getAggs();
|
||||
assert temperatureCaps.toString().equals("[{agg=min}, {agg=max}, {agg=sum}]");
|
||||
//end::x-pack-rollup-get-rollup-caps-response
|
||||
|
||||
assertThat(indexName, equalTo("docs"));
|
||||
assertThat(jobID, equalTo("job_1"));
|
||||
assertThat(rollupIndex, equalTo("rollup"));
|
||||
assertThat(fieldCaps.size(), equalTo(8));
|
||||
|
||||
// tag::x-pack-rollup-get-rollup-caps-execute-listener
|
||||
ActionListener<GetRollupCapsResponse> listener = new ActionListener<GetRollupCapsResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetRollupCapsResponse response) {
|
||||
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::x-pack-rollup-get-rollup-caps-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::x-pack-rollup-get-rollup-caps-execute-async
|
||||
client.rollup().getRollupCapabilitiesAsync(getRollupCapsRequest, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::x-pack-rollup-get-rollup-caps-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testGetRollupIndexCaps() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
DateHistogramGroupConfig dateHistogram = new DateHistogramGroupConfig.FixedInterval(
|
||||
"timestamp", DateHistogramInterval.HOUR, new DateHistogramInterval("7d"), "UTC"); // <1>
|
||||
TermsGroupConfig terms = new TermsGroupConfig("hostname", "datacenter");
|
||||
HistogramGroupConfig histogram = new HistogramGroupConfig(5L, "load", "net_in", "net_out");
|
||||
GroupConfig groups = new GroupConfig(dateHistogram, histogram, terms);
|
||||
List<MetricConfig> metrics = new ArrayList<>(); // <1>
|
||||
metrics.add(new MetricConfig("temperature", Arrays.asList("min", "max", "sum")));
|
||||
metrics.add(new MetricConfig("voltage", Arrays.asList("avg", "value_count")));
|
||||
|
||||
//tag::x-pack-rollup-get-rollup-index-caps-setup
|
||||
final String indexPattern = "docs";
|
||||
final String rollupIndexName = "rollup";
|
||||
final String cron = "*/1 * * * * ?";
|
||||
final int pageSize = 100;
|
||||
final TimeValue timeout = null;
|
||||
|
||||
String id = "job_1";
|
||||
RollupJobConfig config = new RollupJobConfig(id, indexPattern, rollupIndexName, cron,
|
||||
pageSize, groups, metrics, timeout);
|
||||
|
||||
PutRollupJobRequest request = new PutRollupJobRequest(config);
|
||||
AcknowledgedResponse response = client.rollup().putRollupJob(request, RequestOptions.DEFAULT);
|
||||
|
||||
boolean acknowledged = response.isAcknowledged();
|
||||
//end::x-pack-rollup-get-rollup-index-caps-setup
|
||||
assertTrue(acknowledged);
|
||||
|
||||
ClusterHealthRequest healthRequest = new ClusterHealthRequest(config.getRollupIndex()).waitForYellowStatus();
|
||||
ClusterHealthResponse healthResponse = client.cluster().health(healthRequest, RequestOptions.DEFAULT);
|
||||
assertFalse(healthResponse.isTimedOut());
|
||||
assertThat(healthResponse.getStatus(), oneOf(ClusterHealthStatus.YELLOW, ClusterHealthStatus.GREEN));
|
||||
|
||||
// Now that the job is created, we should have a rollup index with metadata.
|
||||
// We can test out the caps API now.
|
||||
|
||||
//tag::x-pack-rollup-get-rollup-index-caps-request
|
||||
GetRollupIndexCapsRequest getRollupIndexCapsRequest = new GetRollupIndexCapsRequest("rollup");
|
||||
//end::x-pack-rollup-get-rollup-index-caps-request
|
||||
|
||||
//tag::x-pack-rollup-get-rollup-index-caps-execute
|
||||
GetRollupIndexCapsResponse capsResponse = client.rollup()
|
||||
.getRollupIndexCapabilities(getRollupIndexCapsRequest, RequestOptions.DEFAULT);
|
||||
//end::x-pack-rollup-get-rollup-index-caps-execute
|
||||
|
||||
//tag::x-pack-rollup-get-rollup-index-caps-response
|
||||
Map<String, RollableIndexCaps> rolledPatterns = capsResponse.getJobs();
|
||||
|
||||
RollableIndexCaps docsPattern = rolledPatterns.get("rollup");
|
||||
|
||||
// indexName will be "rollup", the target index we requested
|
||||
String indexName = docsPattern.getIndexName();
|
||||
|
||||
// Each index pattern can have multiple jobs that rolled it up, so `getJobCaps()`
|
||||
// returns a list of jobs that rolled up the pattern
|
||||
List<RollupJobCaps> rollupJobs = docsPattern.getJobCaps();
|
||||
RollupJobCaps jobCaps = rollupJobs.get(0);
|
||||
|
||||
// jobID is the identifier we used when we created the job (e.g. `job1`)
|
||||
String jobID = jobCaps.getJobID();
|
||||
|
||||
// rollupIndex is the location that the job stored it's rollup docs (e.g. `rollup`)
|
||||
String rollupIndex = jobCaps.getRollupIndex();
|
||||
|
||||
// Finally, fieldCaps are the capabilities of individual fields in the config
|
||||
// The key is the field name, and the value is a RollupFieldCaps object which
|
||||
// provides more info.
|
||||
Map<String, RollupJobCaps.RollupFieldCaps> fieldCaps = jobCaps.getFieldCaps();
|
||||
|
||||
// If we retrieve the "timestamp" field, it returns a list of maps. Each list
|
||||
// item represents a different aggregation that can be run against the "timestamp"
|
||||
// field, and any additional details specific to that agg (interval, etc)
|
||||
List<Map<String, Object>> timestampCaps = fieldCaps.get("timestamp").getAggs();
|
||||
logger.error(timestampCaps.get(0).toString());
|
||||
assert timestampCaps.get(0).toString().equals("{agg=date_histogram, fixed_interval=1h, delay=7d, time_zone=UTC}");
|
||||
|
||||
// In contrast to the timestamp field, the temperature field has multiple aggs configured
|
||||
List<Map<String, Object>> temperatureCaps = fieldCaps.get("temperature").getAggs();
|
||||
assert temperatureCaps.toString().equals("[{agg=min}, {agg=max}, {agg=sum}]");
|
||||
//end::x-pack-rollup-get-rollup-index-caps-response
|
||||
|
||||
assertThat(indexName, equalTo("rollup"));
|
||||
assertThat(jobID, equalTo("job_1"));
|
||||
assertThat(rollupIndex, equalTo("rollup"));
|
||||
assertThat(fieldCaps.size(), equalTo(8));
|
||||
|
||||
// tag::x-pack-rollup-get-rollup-index-caps-execute-listener
|
||||
ActionListener<GetRollupIndexCapsResponse> listener = new ActionListener<GetRollupIndexCapsResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetRollupIndexCapsResponse response) {
|
||||
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::x-pack-rollup-get-rollup-index-caps-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::x-pack-rollup-get-rollup-index-caps-execute-async
|
||||
client.rollup().getRollupIndexCapabilitiesAsync(getRollupIndexCapsRequest, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::x-pack-rollup-get-rollup-index-caps-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testDeleteRollupJob() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
String id = "job_2";
|
||||
|
||||
// tag::rollup-delete-job-request
|
||||
DeleteRollupJobRequest request = new DeleteRollupJobRequest(id); // <1>
|
||||
// end::rollup-delete-job-request
|
||||
try {
|
||||
// tag::rollup-delete-job-execute
|
||||
AcknowledgedResponse response = client.rollup().deleteRollupJob(request, RequestOptions.DEFAULT);
|
||||
// end::rollup-delete-job-execute
|
||||
|
||||
// tag::rollup-delete-job-response
|
||||
response.isAcknowledged(); // <1>
|
||||
// end::rollup-delete-job-response
|
||||
} catch (Exception e) {
|
||||
// Swallow any exception, this test does not test actually cancelling.
|
||||
}
|
||||
|
||||
// tag::rollup-delete-job-execute-listener
|
||||
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
|
||||
@Override
|
||||
public void onResponse(AcknowledgedResponse response) {
|
||||
boolean acknowledged = response.isAcknowledged(); // <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::rollup-delete-job-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::rollup-delete-job-execute-async
|
||||
client.rollup().deleteRollupJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::rollup-delete-job-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class DeleteRollupJobRequestTests extends AbstractXContentTestCase<DeleteRollupJobRequest> {
|
||||
|
||||
private String jobId;
|
||||
|
||||
@Before
|
||||
public void setUpOptionalId() {
|
||||
jobId = randomAlphaOfLengthBetween(1, 10);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DeleteRollupJobRequest createTestInstance() {
|
||||
return new DeleteRollupJobRequest(jobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DeleteRollupJobRequest doParseInstance(final XContentParser parser) throws IOException {
|
||||
return DeleteRollupJobRequest.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testRequireConfiguration() {
|
||||
final NullPointerException e = expectThrows(NullPointerException.class, ()-> new DeleteRollupJobRequest(null));
|
||||
assertEquals("id parameter must not be null", e.getMessage());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.Metadata;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class GetRollupCapsRequestTests extends ESTestCase {
|
||||
|
||||
public void testImplicitIndexPattern() {
|
||||
String pattern = randomFrom("", "*", Metadata.ALL, null);
|
||||
GetRollupCapsRequest request = new GetRollupCapsRequest(pattern);
|
||||
assertThat(request.getIndexPattern(), equalTo(Metadata.ALL));
|
||||
}
|
||||
}
|
|
@ -1,58 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class GetRollupCapsResponseTests extends RollupCapsResponseTestCase<GetRollupCapsResponse> {
|
||||
|
||||
@Override
|
||||
protected GetRollupCapsResponse createTestInstance() {
|
||||
return new GetRollupCapsResponse(indices);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void toXContent(GetRollupCapsResponse response, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
for (Map.Entry<String, RollableIndexCaps> entry : response.getJobs().entrySet()) {
|
||||
entry.getValue().toXContent(builder, null);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Predicate<String> randomFieldsExcludeFilter() {
|
||||
return (field) -> {
|
||||
// base cannot have extra things in it
|
||||
return "".equals(field)
|
||||
// the field list expects to be a nested object of a certain type
|
||||
|| field.contains("fields");
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GetRollupCapsResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return GetRollupCapsResponse.fromXContent(parser);
|
||||
}
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class GetRollupIndexCapsRequestTests extends ESTestCase {
|
||||
|
||||
public void testNullOrEmptyIndices() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GetRollupIndexCapsRequest((String[]) null));
|
||||
assertThat(e.getMessage(), equalTo("[indices] must not be null or empty"));
|
||||
|
||||
String[] indices = new String[]{};
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new GetRollupIndexCapsRequest(indices));
|
||||
assertThat(e.getMessage(), equalTo("[indices] must not be null or empty"));
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new GetRollupIndexCapsRequest(new String[]{"foo", null}));
|
||||
assertThat(e.getMessage(), equalTo("[index] must not be null or empty"));
|
||||
}
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class GetRollupIndexCapsResponseTests extends RollupCapsResponseTestCase<GetRollupIndexCapsResponse> {
|
||||
|
||||
@Override
|
||||
protected GetRollupIndexCapsResponse createTestInstance() {
|
||||
return new GetRollupIndexCapsResponse(indices);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void toXContent(GetRollupIndexCapsResponse response, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
for (Map.Entry<String, RollableIndexCaps> entry : response.getJobs().entrySet()) {
|
||||
entry.getValue().toXContent(builder, null);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Predicate<String> randomFieldsExcludeFilter() {
|
||||
return (field) ->
|
||||
{
|
||||
// base cannot have extra things in it
|
||||
return "".equals(field)
|
||||
// the field list expects to be a nested object of a certain type
|
||||
|| field.contains("fields");
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GetRollupIndexCapsResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return GetRollupIndexCapsResponse.fromXContent(parser);
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class GetRollupJobRequestTests extends ESTestCase {
|
||||
public void testRequiresJob() {
|
||||
final NullPointerException e = expectThrows(NullPointerException.class, () -> new GetRollupJobRequest(null));
|
||||
assertEquals("jobId is required", e.getMessage());
|
||||
}
|
||||
|
||||
public void testDoNotUseAll() {
|
||||
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GetRollupJobRequest("_all"));
|
||||
assertEquals("use the default ctor to ask for all jobs", e.getMessage());
|
||||
}
|
||||
}
|
|
@ -1,133 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.core.IndexerJobStats;
|
||||
import org.elasticsearch.client.core.IndexerState;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupIndexerJobStats;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupJobStatus;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfigTests;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
|
||||
public class GetRollupJobResponseTests extends ESTestCase {
|
||||
public void testFromXContent() throws IOException {
|
||||
xContentTester(
|
||||
this::createParser,
|
||||
this::createTestInstance,
|
||||
this::toXContent,
|
||||
GetRollupJobResponse::fromXContent)
|
||||
.supportsUnknownFields(false)
|
||||
.randomFieldsExcludeFilter(field ->
|
||||
field.endsWith("status.current_position"))
|
||||
.test();
|
||||
}
|
||||
|
||||
private GetRollupJobResponse createTestInstance() {
|
||||
int jobCount = between(1, 5);
|
||||
List<JobWrapper> jobs = new ArrayList<>();
|
||||
for (int j = 0; j < jobCount; j++) {
|
||||
jobs.add(new JobWrapper(
|
||||
RollupJobConfigTests.randomRollupJobConfig(randomAlphaOfLength(5)),
|
||||
randomStats(),
|
||||
randomStatus()));
|
||||
}
|
||||
return new GetRollupJobResponse(jobs);
|
||||
}
|
||||
|
||||
private RollupIndexerJobStats randomStats() {
|
||||
return new RollupIndexerJobStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
|
||||
randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
|
||||
randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
|
||||
randomNonNegativeLong());
|
||||
}
|
||||
|
||||
private RollupJobStatus randomStatus() {
|
||||
Map<String, Object> currentPosition = new HashMap<>();
|
||||
int positions = between(0, 10);
|
||||
while (currentPosition.size() < positions) {
|
||||
currentPosition.put(randomAlphaOfLength(2), randomAlphaOfLength(2));
|
||||
}
|
||||
return new RollupJobStatus(
|
||||
randomFrom(IndexerState.values()),
|
||||
currentPosition,
|
||||
randomBoolean());
|
||||
}
|
||||
|
||||
private void toXContent(GetRollupJobResponse response, XContentBuilder builder) throws IOException {
|
||||
ToXContent.Params params = ToXContent.EMPTY_PARAMS;
|
||||
builder.startObject();
|
||||
builder.startArray(GetRollupJobResponse.JOBS.getPreferredName());
|
||||
for (JobWrapper job : response.getJobs()) {
|
||||
toXContent(job, builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
private void toXContent(JobWrapper jobWrapper, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(GetRollupJobResponse.CONFIG.getPreferredName());
|
||||
jobWrapper.getJob().toXContent(builder, params);
|
||||
builder.field(GetRollupJobResponse.STATUS.getPreferredName());
|
||||
toXContent(jobWrapper.getStatus(), builder, params);
|
||||
builder.field(GetRollupJobResponse.STATS.getPreferredName());
|
||||
toXContent(jobWrapper.getStats(), builder, params);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
public void toXContent(RollupJobStatus status, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(GetRollupJobResponse.STATE.getPreferredName(), status.getState().value());
|
||||
if (status.getCurrentPosition() != null) {
|
||||
builder.field(GetRollupJobResponse.CURRENT_POSITION.getPreferredName(), status.getCurrentPosition());
|
||||
}
|
||||
builder.field(GetRollupJobResponse.UPGRADED_DOC_ID.getPreferredName(), status.getUpgradedDocumentId());
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
public void toXContent(RollupIndexerJobStats stats, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(IndexerJobStats.NUM_PAGES.getPreferredName(), stats.getNumPages());
|
||||
builder.field(IndexerJobStats.NUM_INPUT_DOCUMENTS.getPreferredName(), stats.getNumDocuments());
|
||||
builder.field(GetRollupJobResponse.ROLLUPS_INDEXED.getPreferredName(), stats.getOutputDocuments());
|
||||
builder.field(IndexerJobStats.NUM_INVOCATIONS.getPreferredName(), stats.getNumInvocations());
|
||||
builder.field(IndexerJobStats.INDEX_TIME_IN_MS.getPreferredName(), stats.getIndexTime());
|
||||
builder.field(IndexerJobStats.INDEX_TOTAL.getPreferredName(), stats.getIndexTotal());
|
||||
builder.field(IndexerJobStats.INDEX_FAILURES.getPreferredName(), stats.getIndexFailures());
|
||||
builder.field(IndexerJobStats.SEARCH_TIME_IN_MS.getPreferredName(), stats.getSearchTime());
|
||||
builder.field(IndexerJobStats.SEARCH_TOTAL.getPreferredName(), stats.getSearchTotal());
|
||||
builder.field(IndexerJobStats.SEARCH_FAILURES.getPreferredName(), stats.getSearchFailures());
|
||||
builder.field(IndexerJobStats.PROCESSING_TIME_IN_MS.getPreferredName(), stats.getProcessingTime());
|
||||
builder.field(IndexerJobStats.PROCESSING_TOTAL.getPreferredName(), stats.getProcessingTotal());
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfigTests;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
||||
public class PutRollupJobRequestTests extends AbstractXContentTestCase<PutRollupJobRequest> {
|
||||
|
||||
private String jobId;
|
||||
|
||||
@Before
|
||||
public void setUpOptionalId() {
|
||||
jobId = randomAlphaOfLengthBetween(1, 10);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PutRollupJobRequest createTestInstance() {
|
||||
return new PutRollupJobRequest(RollupJobConfigTests.randomRollupJobConfig(jobId));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PutRollupJobRequest doParseInstance(final XContentParser parser) throws IOException {
|
||||
final String optionalId = randomBoolean() ? jobId : null;
|
||||
return new PutRollupJobRequest(RollupJobConfig.fromXContent(parser, optionalId));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testRequireConfiguration() {
|
||||
final NullPointerException e = expectThrows(NullPointerException.class, ()-> new PutRollupJobRequest(null));
|
||||
assertEquals("rollup job configuration is required", e.getMessage());
|
||||
}
|
||||
}
|
|
@ -1,166 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.GroupConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.HistogramGroupConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.MetricConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfigTests;
|
||||
import org.elasticsearch.client.rollup.job.config.TermsGroupConfig;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
|
||||
abstract class RollupCapsResponseTestCase<T> extends ESTestCase {
|
||||
|
||||
protected Map<String, RollableIndexCaps> indices;
|
||||
|
||||
protected abstract T createTestInstance();
|
||||
|
||||
protected abstract void toXContent(T response, XContentBuilder builder) throws IOException;
|
||||
|
||||
protected abstract T fromXContent(XContentParser parser) throws IOException;
|
||||
|
||||
protected Predicate<String> randomFieldsExcludeFilter() {
|
||||
return field -> false;
|
||||
}
|
||||
|
||||
protected String[] shuffleFieldsExceptions() {
|
||||
return Strings.EMPTY_ARRAY;
|
||||
}
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
xContentTester(
|
||||
this::createParser,
|
||||
this::createTestInstance,
|
||||
this::toXContent,
|
||||
this::fromXContent)
|
||||
.supportsUnknownFields(true)
|
||||
.randomFieldsExcludeFilter(randomFieldsExcludeFilter())
|
||||
.shuffleFieldsExceptions(shuffleFieldsExceptions())
|
||||
.test();
|
||||
}
|
||||
|
||||
@Before
|
||||
private void setupIndices() throws IOException {
|
||||
int numIndices = randomIntBetween(1,5);
|
||||
indices = new HashMap<>(numIndices);
|
||||
for (int i = 0; i < numIndices; i++) {
|
||||
String indexName = "index_" + randomAlphaOfLength(10);
|
||||
int numJobs = randomIntBetween(1,5);
|
||||
List<RollupJobCaps> jobs = new ArrayList<>(numJobs);
|
||||
for (int j = 0; j < numJobs; j++) {
|
||||
RollupJobConfig config = RollupJobConfigTests.randomRollupJobConfig(randomAlphaOfLength(10));
|
||||
jobs.add(new RollupJobCaps(config.getId(), config.getIndexPattern(),
|
||||
config.getRollupIndex(), createRollupFieldCaps(config)));
|
||||
}
|
||||
RollableIndexCaps cap = new RollableIndexCaps(indexName, jobs);
|
||||
indices.put(indexName, cap);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lifted from core's RollupJobCaps, so that we can test without having to include this actual logic in the request
|
||||
*/
|
||||
private static Map<String, RollupJobCaps.RollupFieldCaps> createRollupFieldCaps(final RollupJobConfig rollupJobConfig) {
|
||||
final Map<String, List<Map<String, Object>>> tempFieldCaps = new HashMap<>();
|
||||
|
||||
final GroupConfig groupConfig = rollupJobConfig.getGroupConfig();
|
||||
if (groupConfig != null) {
|
||||
// Create RollupFieldCaps for the date histogram
|
||||
final DateHistogramGroupConfig dateHistogram = groupConfig.getDateHistogram();
|
||||
final Map<String, Object> dateHistogramAggCap = new HashMap<>();
|
||||
dateHistogramAggCap.put("agg", DateHistogramAggregationBuilder.NAME);
|
||||
dateHistogramAggCap.put("interval", dateHistogram.getInterval().toString());
|
||||
if (dateHistogram.getDelay() != null) {
|
||||
dateHistogramAggCap.put("delay", dateHistogram.getDelay().toString());
|
||||
}
|
||||
dateHistogramAggCap.put("time_zone", dateHistogram.getTimeZone());
|
||||
|
||||
List<Map<String, Object>> dateAggCaps = tempFieldCaps.getOrDefault(dateHistogram.getField(), new ArrayList<>());
|
||||
dateAggCaps.add(dateHistogramAggCap);
|
||||
tempFieldCaps.put(dateHistogram.getField(), dateAggCaps);
|
||||
|
||||
// Create RollupFieldCaps for the histogram
|
||||
final HistogramGroupConfig histogram = groupConfig.getHistogram();
|
||||
if (histogram != null) {
|
||||
final Map<String, Object> histogramAggCap = new HashMap<>();
|
||||
histogramAggCap.put("agg", HistogramAggregationBuilder.NAME);
|
||||
histogramAggCap.put("interval", histogram.getInterval());
|
||||
Arrays.stream(rollupJobConfig.getGroupConfig().getHistogram().getFields()).forEach(field -> {
|
||||
List<Map<String, Object>> caps = tempFieldCaps.getOrDefault(field, new ArrayList<>());
|
||||
caps.add(histogramAggCap);
|
||||
tempFieldCaps.put(field, caps);
|
||||
});
|
||||
}
|
||||
|
||||
// Create RollupFieldCaps for the term
|
||||
final TermsGroupConfig terms = groupConfig.getTerms();
|
||||
if (terms != null) {
|
||||
final Map<String, Object> termsAggCap = singletonMap("agg", TermsAggregationBuilder.NAME);
|
||||
Arrays.stream(rollupJobConfig.getGroupConfig().getTerms().getFields()).forEach(field -> {
|
||||
List<Map<String, Object>> caps = tempFieldCaps.getOrDefault(field, new ArrayList<>());
|
||||
caps.add(termsAggCap);
|
||||
tempFieldCaps.put(field, caps);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Create RollupFieldCaps for the metrics
|
||||
final List<MetricConfig> metricsConfig = rollupJobConfig.getMetricsConfig();
|
||||
if (metricsConfig.size() > 0) {
|
||||
rollupJobConfig.getMetricsConfig().forEach(metricConfig -> {
|
||||
final List<Map<String, Object>> metrics = metricConfig.getMetrics().stream()
|
||||
.map(metric -> singletonMap("agg", (Object) metric))
|
||||
.collect(Collectors.toList());
|
||||
metrics.forEach(m -> {
|
||||
List<Map<String, Object>> caps = tempFieldCaps
|
||||
.getOrDefault(metricConfig.getField(), new ArrayList<>());
|
||||
caps.add(m);
|
||||
tempFieldCaps.put(metricConfig.getField(), caps);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return Collections.unmodifiableMap(tempFieldCaps.entrySet()
|
||||
.stream()
|
||||
.collect(Collectors.toMap(Map.Entry::getKey,
|
||||
e -> new RollupJobCaps.RollupFieldCaps(e.getValue()))));
|
||||
}
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils;
|
||||
|
||||
public class StartRollupJobRequestTests extends ESTestCase {
|
||||
|
||||
public void testConstructor() {
|
||||
String jobId = randomAlphaOfLength(5);
|
||||
assertEquals(jobId, new StartRollupJobRequest(jobId).getJobId());
|
||||
}
|
||||
|
||||
public void testEqualsAndHash() {
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(new StartRollupJobRequest(randomAlphaOfLength(5)),
|
||||
orig -> new StartRollupJobRequest(orig.getJobId()),
|
||||
orig -> new StartRollupJobRequest(orig.getJobId() + "_suffix"));
|
||||
}
|
||||
|
||||
public void testRequireJobId() {
|
||||
final NullPointerException e = expectThrows(NullPointerException.class, ()-> new StartRollupJobRequest(null));
|
||||
assertEquals("id parameter must not be null", e.getMessage());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.core.AcknowledgedResponseTests;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
|
||||
public class StartRollupJobResponseTests extends ESTestCase {
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
xContentTester(this::createParser,
|
||||
this::createTestInstance,
|
||||
AcknowledgedResponseTests::toXContent,
|
||||
StartRollupJobResponse::fromXContent)
|
||||
.supportsUnknownFields(false)
|
||||
.test();
|
||||
}
|
||||
private StartRollupJobResponse createTestInstance() {
|
||||
return new StartRollupJobResponse(randomBoolean());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils;
|
||||
|
||||
public class StopRollupJobRequestTests extends ESTestCase {
|
||||
|
||||
public void testConstructor() {
|
||||
String jobId = randomAlphaOfLength(5);
|
||||
assertEquals(jobId, new StopRollupJobRequest(jobId).getJobId());
|
||||
}
|
||||
|
||||
public void testEqualsAndHash() {
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(new StopRollupJobRequest(randomAlphaOfLength(5)),
|
||||
orig -> new StopRollupJobRequest(orig.getJobId()),
|
||||
orig -> new StopRollupJobRequest(orig.getJobId() + "_suffix"));
|
||||
}
|
||||
|
||||
public void testRequireJobId() {
|
||||
final NullPointerException e = expectThrows(NullPointerException.class, ()-> new StopRollupJobRequest(null));
|
||||
assertEquals("id parameter must not be null", e.getMessage());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.core.AcknowledgedResponseTests;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
|
||||
public class StopRollupJobResponseTests extends ESTestCase {
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
xContentTester(this::createParser,
|
||||
this::createTestInstance,
|
||||
AcknowledgedResponseTests::toXContent,
|
||||
StopRollupJobResponse::fromXContent)
|
||||
.supportsUnknownFields(false)
|
||||
.test();
|
||||
}
|
||||
private StopRollupJobResponse createTestInstance() {
|
||||
return new StopRollupJobResponse(randomBoolean());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,110 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup.job.config;
|
||||
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class DateHistogramGroupConfigTests extends AbstractXContentTestCase<DateHistogramGroupConfig> {
|
||||
|
||||
@Override
|
||||
protected DateHistogramGroupConfig createTestInstance() {
|
||||
return randomDateHistogramGroupConfig();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DateHistogramGroupConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||
return DateHistogramGroupConfig.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testValidateNullField() {
|
||||
final DateHistogramGroupConfig config = new DateHistogramGroupConfig(null, DateHistogramInterval.DAY, null, null);
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains(is("Field name is required")));
|
||||
}
|
||||
|
||||
public void testValidateEmptyField() {
|
||||
final DateHistogramGroupConfig config = new DateHistogramGroupConfig("", DateHistogramInterval.DAY, null, null);
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains(is("Field name is required")));
|
||||
}
|
||||
|
||||
public void testValidateNullInterval() {
|
||||
final DateHistogramGroupConfig config = new DateHistogramGroupConfig("field", null, null, null);
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains(is("Interval is required")));
|
||||
}
|
||||
|
||||
public void testValidate() {
|
||||
final DateHistogramGroupConfig config = randomDateHistogramGroupConfig();
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(false));
|
||||
}
|
||||
|
||||
static DateHistogramGroupConfig randomDateHistogramGroupConfig() {
|
||||
final String field = randomAlphaOfLength(randomIntBetween(3, 10));
|
||||
final DateHistogramInterval delay = randomBoolean() ? new DateHistogramInterval(randomPositiveTimeValue()) : null;
|
||||
final String timezone = randomBoolean() ? randomDateTimeZone().toString() : null;
|
||||
int i = randomIntBetween(0,2);
|
||||
final DateHistogramInterval interval;
|
||||
switch (i) {
|
||||
case 0:
|
||||
interval = new DateHistogramInterval(randomPositiveTimeValue());
|
||||
return new DateHistogramGroupConfig.FixedInterval(field, interval, delay, timezone);
|
||||
case 1:
|
||||
interval = new DateHistogramInterval(randomTimeValue(1,1, "m", "h", "d", "w"));
|
||||
return new DateHistogramGroupConfig.CalendarInterval(field, interval, delay, timezone);
|
||||
default:
|
||||
interval = new DateHistogramInterval(randomPositiveTimeValue());
|
||||
return new DateHistogramGroupConfig(field, interval, delay, timezone);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -1,116 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup.job.config;
|
||||
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class GroupConfigTests extends AbstractXContentTestCase<GroupConfig> {
|
||||
|
||||
@Override
|
||||
protected GroupConfig createTestInstance() {
|
||||
return randomGroupConfig();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GroupConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||
return GroupConfig.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testValidateNullDateHistogramGroupConfig() {
|
||||
final GroupConfig config = new GroupConfig(null);
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains(is("Date histogram must not be null")));
|
||||
}
|
||||
|
||||
public void testValidateDateHistogramGroupConfigWithErrors() {
|
||||
final DateHistogramGroupConfig dateHistogramGroupConfig = new DateHistogramGroupConfig(null, null, null, null);
|
||||
|
||||
final GroupConfig config = new GroupConfig(dateHistogramGroupConfig);
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(2));
|
||||
assertThat(validationException.validationErrors(),
|
||||
containsInAnyOrder("Field name is required", "Interval is required"));
|
||||
}
|
||||
|
||||
public void testValidateHistogramGroupConfigWithErrors() {
|
||||
final HistogramGroupConfig histogramGroupConfig = new HistogramGroupConfig(0L);
|
||||
|
||||
final GroupConfig config = new GroupConfig(randomGroupConfig().getDateHistogram(), histogramGroupConfig, null);
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(2));
|
||||
assertThat(validationException.validationErrors(),
|
||||
containsInAnyOrder("Fields must have at least one value", "Interval must be a positive long"));
|
||||
}
|
||||
|
||||
public void testValidateTermsGroupConfigWithErrors() {
|
||||
final TermsGroupConfig termsGroupConfig = new TermsGroupConfig();
|
||||
|
||||
final GroupConfig config = new GroupConfig(randomGroupConfig().getDateHistogram(), null, termsGroupConfig);
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains("Fields must have at least one value"));
|
||||
}
|
||||
|
||||
public void testValidate() {
|
||||
final GroupConfig config = randomGroupConfig();
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(false));
|
||||
}
|
||||
|
||||
static GroupConfig randomGroupConfig() {
|
||||
DateHistogramGroupConfig dateHistogram = DateHistogramGroupConfigTests.randomDateHistogramGroupConfig();
|
||||
HistogramGroupConfig histogram = randomBoolean() ? HistogramGroupConfigTests.randomHistogramGroupConfig() : null;
|
||||
TermsGroupConfig terms = randomBoolean() ? TermsGroupConfigTests.randomTermsGroupConfig() : null;
|
||||
return new GroupConfig(dateHistogram, histogram, terms);
|
||||
}
|
||||
}
|
|
@ -1,109 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup.job.config;
|
||||
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class HistogramGroupConfigTests extends AbstractXContentTestCase<HistogramGroupConfig> {
|
||||
|
||||
@Override
|
||||
protected HistogramGroupConfig createTestInstance() {
|
||||
return randomHistogramGroupConfig();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected HistogramGroupConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||
return HistogramGroupConfig.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testValidateNullFields() {
|
||||
final HistogramGroupConfig config = new HistogramGroupConfig(60L);
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains(is("Fields must have at least one value")));
|
||||
}
|
||||
|
||||
public void testValidatEmptyFields() {
|
||||
final HistogramGroupConfig config = new HistogramGroupConfig(60L, Strings.EMPTY_ARRAY);
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains(is("Fields must have at least one value")));
|
||||
}
|
||||
|
||||
public void testValidateNegativeInterval() {
|
||||
final HistogramGroupConfig config = new HistogramGroupConfig(-1L, randomHistogramGroupConfig().getFields());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains(is("Interval must be a positive long")));
|
||||
}
|
||||
|
||||
public void testValidateZeroInterval() {
|
||||
final HistogramGroupConfig config = new HistogramGroupConfig(0L, randomHistogramGroupConfig().getFields());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains(is("Interval must be a positive long")));
|
||||
}
|
||||
|
||||
public void testValidate() {
|
||||
final HistogramGroupConfig config = randomHistogramGroupConfig();
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(false));
|
||||
}
|
||||
static HistogramGroupConfig randomHistogramGroupConfig() {
|
||||
final long interval = randomNonNegativeLong();
|
||||
final String[] fields = new String[randomIntBetween(1, 10)];
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
fields[i] = randomAlphaOfLength(randomIntBetween(3, 10));
|
||||
}
|
||||
return new HistogramGroupConfig(interval, fields);
|
||||
}
|
||||
}
|
|
@ -1,127 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup.job.config;
|
||||
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class MetricConfigTests extends AbstractXContentTestCase<MetricConfig> {
|
||||
|
||||
@Override
|
||||
protected MetricConfig createTestInstance() {
|
||||
return randomMetricConfig();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MetricConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||
return MetricConfig.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testValidateNullField() {
|
||||
final MetricConfig config = new MetricConfig(null, randomMetricConfig().getMetrics());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains(is("Field name is required")));
|
||||
}
|
||||
|
||||
public void testValidateEmptyField() {
|
||||
final MetricConfig config = new MetricConfig("", randomMetricConfig().getMetrics());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains(is("Field name is required")));
|
||||
}
|
||||
|
||||
public void testValidateNullListOfMetrics() {
|
||||
final MetricConfig config = new MetricConfig("field", null);
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains(is("Metrics must be a non-null, non-empty array of strings")));
|
||||
}
|
||||
|
||||
public void testValidateEmptyListOfMetrics() {
|
||||
final MetricConfig config = new MetricConfig("field", Collections.emptyList());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains(is("Metrics must be a non-null, non-empty array of strings")));
|
||||
}
|
||||
|
||||
public void testValidate() {
|
||||
final MetricConfig config = randomMetricConfig();
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(false));
|
||||
}
|
||||
|
||||
static MetricConfig randomMetricConfig() {
|
||||
final List<String> metrics = new ArrayList<>();
|
||||
if (randomBoolean()) {
|
||||
metrics.add("min");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
metrics.add("max");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
metrics.add("sum");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
metrics.add("avg");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
metrics.add("value_count");
|
||||
}
|
||||
if (metrics.size() == 0) {
|
||||
metrics.add("min");
|
||||
}
|
||||
// large name so we don't accidentally collide
|
||||
return new MetricConfig(randomAlphaOfLengthBetween(15, 25), Collections.unmodifiableList(metrics));
|
||||
}
|
||||
}
|
|
@ -1,308 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup.job.config;
|
||||
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static java.util.Collections.unmodifiableList;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class RollupJobConfigTests extends AbstractXContentTestCase<RollupJobConfig> {
|
||||
|
||||
private String id;
|
||||
|
||||
@Before
|
||||
public void setUpOptionalId() {
|
||||
id = randomAlphaOfLengthBetween(1, 10);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RollupJobConfig createTestInstance() {
|
||||
return randomRollupJobConfig(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RollupJobConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||
return RollupJobConfig.fromXContent(parser, randomBoolean() ? id : null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testValidateNullId() {
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
|
||||
final RollupJobConfig config = new RollupJobConfig(null, sample.getIndexPattern(), sample.getRollupIndex(), sample.getCron(),
|
||||
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains("Id must be a non-null, non-empty string"));
|
||||
}
|
||||
|
||||
public void testValidateEmptyId() {
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
|
||||
final RollupJobConfig config = new RollupJobConfig("", sample.getIndexPattern(), sample.getRollupIndex(), sample.getCron(),
|
||||
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains("Id must be a non-null, non-empty string"));
|
||||
}
|
||||
|
||||
public void testValidateNullIndexPattern() {
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
|
||||
final RollupJobConfig config = new RollupJobConfig(sample.getId(), null, sample.getRollupIndex(), sample.getCron(),
|
||||
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains("Index pattern must be a non-null, non-empty string"));
|
||||
}
|
||||
|
||||
public void testValidateEmptyIndexPattern() {
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
|
||||
final RollupJobConfig config = new RollupJobConfig(sample.getId(), "", sample.getRollupIndex(), sample.getCron(),
|
||||
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains("Index pattern must be a non-null, non-empty string"));
|
||||
}
|
||||
|
||||
public void testValidateMatchAllIndexPattern() {
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
|
||||
final RollupJobConfig config = new RollupJobConfig(sample.getId(), "*", sample.getRollupIndex(), sample.getCron(),
|
||||
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(),
|
||||
contains("Index pattern must not match all indices (as it would match it's own rollup index"));
|
||||
}
|
||||
|
||||
public void testValidateIndexPatternMatchesRollupIndex() {
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
|
||||
final RollupJobConfig config = new RollupJobConfig(sample.getId(), "rollup*", "rollup", sample.getCron(),
|
||||
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains("Index pattern would match rollup index name which is not allowed"));
|
||||
}
|
||||
|
||||
public void testValidateSameIndexAndRollupPatterns() {
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
|
||||
final RollupJobConfig config = new RollupJobConfig(sample.getId(), "test", "test", sample.getCron(),
|
||||
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains("Rollup index may not be the same as the index pattern"));
|
||||
}
|
||||
|
||||
public void testValidateNullRollupPattern() {
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
|
||||
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), null, sample.getCron(),
|
||||
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains("Rollup index must be a non-null, non-empty string"));
|
||||
}
|
||||
|
||||
public void testValidateEmptyRollupPattern() {
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
|
||||
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), "", sample.getCron(),
|
||||
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains("Rollup index must be a non-null, non-empty string"));
|
||||
}
|
||||
|
||||
public void testValidateNullCron() {
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
|
||||
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(), null,
|
||||
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains("Cron schedule must be a non-null, non-empty string"));
|
||||
}
|
||||
|
||||
public void testValidateEmptyCron() {
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
|
||||
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(), "",
|
||||
sample.getPageSize(), sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains("Cron schedule must be a non-null, non-empty string"));
|
||||
}
|
||||
|
||||
public void testValidatePageSize() {
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
|
||||
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(),
|
||||
sample.getCron(), 0, sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains("Page size is mandatory and must be a positive long"));
|
||||
}
|
||||
|
||||
public void testValidateGroupOrMetrics() {
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
|
||||
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(),
|
||||
sample.getCron(), sample.getPageSize(), null, null, sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains("At least one grouping or metric must be configured"));
|
||||
}
|
||||
|
||||
public void testValidateGroupConfigWithErrors() {
|
||||
final GroupConfig groupConfig = new GroupConfig(null);
|
||||
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(),
|
||||
sample.getCron(), sample.getPageSize(), groupConfig, sample.getMetricsConfig(), sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains("Date histogram must not be null"));
|
||||
}
|
||||
|
||||
public void testValidateListOfMetricsWithErrors() {
|
||||
final List<MetricConfig> metricsConfigs = singletonList(new MetricConfig(null, null));
|
||||
|
||||
final RollupJobConfig sample = randomRollupJobConfig(id);
|
||||
final RollupJobConfig config = new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(),
|
||||
sample.getCron(), sample.getPageSize(), sample.getGroupConfig(), metricsConfigs, sample.getTimeout());
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(2));
|
||||
assertThat(validationException.validationErrors(),
|
||||
containsInAnyOrder("Field name is required", "Metrics must be a non-null, non-empty array of strings"));
|
||||
}
|
||||
|
||||
public static RollupJobConfig randomRollupJobConfig(final String id) {
|
||||
final String indexPattern = randomAlphaOfLengthBetween(5, 20);
|
||||
final String rollupIndex = "rollup_" + indexPattern;
|
||||
final String cron = randomCron();
|
||||
final int pageSize = randomIntBetween(1, 100);
|
||||
final TimeValue timeout = randomBoolean() ? null :
|
||||
new TimeValue(randomIntBetween(0, 60), randomFrom(Arrays.asList(TimeUnit.MILLISECONDS, TimeUnit.SECONDS, TimeUnit.MINUTES)));
|
||||
final GroupConfig groups = GroupConfigTests.randomGroupConfig();
|
||||
|
||||
final List<MetricConfig> metrics = new ArrayList<>();
|
||||
if (randomBoolean()) {
|
||||
final int numMetrics = randomIntBetween(1, 10);
|
||||
for (int i = 0; i < numMetrics; i++) {
|
||||
metrics.add(MetricConfigTests.randomMetricConfig());
|
||||
}
|
||||
}
|
||||
return new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, unmodifiableList(metrics), timeout);
|
||||
}
|
||||
|
||||
private static String randomCron() {
|
||||
return (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(0, 59))) + //second
|
||||
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(0, 59))) + //minute
|
||||
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(0, 23))) + //hour
|
||||
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(1, 31))) + //day of month
|
||||
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(1, 12))) + //month
|
||||
" ?" + //day of week
|
||||
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(1970, 2199))); //year
|
||||
}
|
||||
}
|
|
@ -1,87 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup.job.config;
|
||||
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class TermsGroupConfigTests extends AbstractXContentTestCase<TermsGroupConfig> {
|
||||
|
||||
@Override
|
||||
protected TermsGroupConfig createTestInstance() {
|
||||
return randomTermsGroupConfig();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TermsGroupConfig doParseInstance(final XContentParser parser) throws IOException {
|
||||
return TermsGroupConfig.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testValidateNullFields() {
|
||||
final TermsGroupConfig config = new TermsGroupConfig();
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains(is("Fields must have at least one value")));
|
||||
}
|
||||
|
||||
public void testValidatEmptyFields() {
|
||||
final TermsGroupConfig config = new TermsGroupConfig(Strings.EMPTY_ARRAY);
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(true));
|
||||
ValidationException validationException = validation.get();
|
||||
assertThat(validationException.validationErrors().size(), is(1));
|
||||
assertThat(validationException.validationErrors(), contains(is("Fields must have at least one value")));
|
||||
}
|
||||
|
||||
public void testValidate() {
|
||||
final TermsGroupConfig config = randomTermsGroupConfig();
|
||||
|
||||
Optional<ValidationException> validation = config.validate();
|
||||
assertThat(validation, notNullValue());
|
||||
assertThat(validation.isPresent(), is(false));
|
||||
}
|
||||
|
||||
static TermsGroupConfig randomTermsGroupConfig() {
|
||||
final String[] fields = new String[randomIntBetween(1, 10)];
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
fields[i] = randomAlphaOfLength(randomIntBetween(3, 10));
|
||||
}
|
||||
return new TermsGroupConfig(fields);
|
||||
}
|
||||
}
|
|
@ -131,7 +131,6 @@ def projectPathsToExclude = [
|
|||
':x-pack:plugin:mapper-flattened',
|
||||
':x-pack:plugin:monitoring',
|
||||
':x-pack:plugin:ql',
|
||||
':x-pack:plugin:rollup',
|
||||
':x-pack:plugin:search-business-rules',
|
||||
':x-pack:plugin:spatial',
|
||||
':x-pack:plugin:sql',
|
||||
|
|
|
@ -282,7 +282,7 @@ public abstract class ValuesSourceAggregationBuilder<AB extends ValuesSourceAggr
|
|||
public AB userValueTypeHint(ValueType valueType) {
|
||||
if (valueType == null) {
|
||||
// TODO: This is nonsense. We allow the value to be null (via constructor), but don't allow it to be set to null. This means
|
||||
// thing looking to copy settings (like RollupRequestTranslator) need to check if userValueTypeHint is not null, and then
|
||||
// thing looking to copy settings need to check if userValueTypeHint is not null, and then
|
||||
// set it if and only if it is non-null.
|
||||
throw new IllegalArgumentException("[userValueTypeHint] must not be null: [" + name + "]");
|
||||
}
|
||||
|
|
|
@ -427,8 +427,7 @@ public abstract class ESRestTestCase extends ESTestCase {
|
|||
/**
|
||||
* Returns whether to preserve the state of the cluster upon completion of this test. Defaults to false. If true, overrides the value of
|
||||
* {@link #preserveIndicesUponCompletion()}, {@link #preserveTemplatesUponCompletion()}, {@link #preserveReposUponCompletion()},
|
||||
* {@link #preserveSnapshotsUponCompletion()},{@link #preserveRollupJobsUponCompletion()},
|
||||
* and {@link #preserveILMPoliciesUponCompletion()}.
|
||||
* {@link #preserveSnapshotsUponCompletion()}, and {@link #preserveILMPoliciesUponCompletion()}.
|
||||
*
|
||||
* @return true if the state of the cluster should be preserved
|
||||
*/
|
||||
|
@ -493,15 +492,6 @@ public abstract class ESRestTestCase extends ESTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether to preserve the rollup jobs of this test. Defaults to
|
||||
* not preserving them. Only runs at all if xpack is installed on the
|
||||
* cluster being tested.
|
||||
*/
|
||||
protected boolean preserveRollupJobsUponCompletion() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether to preserve ILM Policies of this test. Defaults to not
|
||||
* preserving them. Only runs at all if xpack is installed on the cluster
|
||||
|
@ -536,14 +526,6 @@ public abstract class ESRestTestCase extends ESTestCase {
|
|||
|
||||
private void wipeCluster() throws Exception {
|
||||
|
||||
// Cleanup rollup before deleting indices. A rollup job might have bulks in-flight,
|
||||
// so we need to fully shut them down first otherwise a job might stall waiting
|
||||
// for a bulk to finish against a non-existing index (and then fail tests)
|
||||
if (hasXPack && false == preserveRollupJobsUponCompletion()) {
|
||||
wipeRollupJobs();
|
||||
waitForPendingRollupTasks();
|
||||
}
|
||||
|
||||
// Clean up SLM policies before trying to wipe snapshots so that no new ones get started by SLM after wiping
|
||||
if (nodeVersions.first().onOrAfter(Version.V_7_4_0)) { // SLM was introduced in version 7.4
|
||||
if (preserveSLMPoliciesUponCompletion() == false) {
|
||||
|
@ -775,47 +757,6 @@ public abstract class ESRestTestCase extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private void wipeRollupJobs() throws IOException {
|
||||
final Response response;
|
||||
try {
|
||||
response = adminClient().performRequest(new Request("GET", "/_rollup/job/_all"));
|
||||
} catch (ResponseException e) {
|
||||
// If we don't see the rollup endpoint (possibly because of running against an older ES version) we just bail
|
||||
if (e.getResponse().getStatusLine().getStatusCode() == RestStatus.NOT_FOUND.getStatus()) {
|
||||
return;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
Map<String, Object> jobs = entityAsMap(response);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Map<String, Object>> jobConfigs =
|
||||
(List<Map<String, Object>>) XContentMapValues.extractValue("jobs", jobs);
|
||||
|
||||
if (jobConfigs == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (Map<String, Object> jobConfig : jobConfigs) {
|
||||
@SuppressWarnings("unchecked")
|
||||
String jobId = (String) ((Map<String, Object>) jobConfig.get("config")).get("id");
|
||||
Request request = new Request("POST", "/_rollup/job/" + jobId + "/_stop");
|
||||
request.addParameter("ignore", "404");
|
||||
request.addParameter("wait_for_completion", "true");
|
||||
request.addParameter("timeout", "10s");
|
||||
logger.debug("stopping rollup job [{}]", jobId);
|
||||
adminClient().performRequest(request);
|
||||
}
|
||||
|
||||
for (Map<String, Object> jobConfig : jobConfigs) {
|
||||
@SuppressWarnings("unchecked")
|
||||
String jobId = (String) ((Map<String, Object>) jobConfig.get("config")).get("id");
|
||||
Request request = new Request("DELETE", "/_rollup/job/" + jobId);
|
||||
request.addParameter("ignore", "404"); // Ignore 404s because they imply someone was racing us to delete this
|
||||
logger.debug("deleting rollup job [{}]", jobId);
|
||||
adminClient().performRequest(request);
|
||||
}
|
||||
}
|
||||
|
||||
protected void refreshAllIndices() throws IOException {
|
||||
boolean includeHidden = minimumNodeVersion().onOrAfter(Version.V_7_7_0);
|
||||
Request refreshRequest = new Request("POST", "/_refresh");
|
||||
|
@ -835,10 +776,6 @@ public abstract class ESRestTestCase extends ESTestCase {
|
|||
client().performRequest(refreshRequest);
|
||||
}
|
||||
|
||||
private void waitForPendingRollupTasks() throws Exception {
|
||||
waitForPendingTasks(adminClient(), taskName -> taskName.startsWith("xpack/rollup/job") == false);
|
||||
}
|
||||
|
||||
private static void deleteAllILMPolicies(Set<String> exclusions) throws IOException {
|
||||
Map<String, Object> policies;
|
||||
|
||||
|
|
|
@ -102,11 +102,6 @@ public abstract class AbstractFullClusterRestartTestCase extends ESRestTestCase
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean preserveRollupJobsUponCompletion() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean preserveILMPoliciesUponCompletion() {
|
||||
return true;
|
||||
|
|
Loading…
Reference in New Issue