mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-24 17:09:48 +00:00
HLRC: Add get rollup job (#33921)
Adds support for the get rollup job to the High Level REST Client. I had to do three interesting and unexpected things: 1. I ported the rollup state wiping code into the high level client tests. I'll move this into the test framework in a followup and remove the x-pack version. 2. The `timeout` in the rollup config was serialized using the `toString` representation of `TimeValue` which produces fractional time values which are more human readable but aren't supported by parsing. So I switched it to `getStringRep`. 3. Refactor the xcontent round trip testing utilities so we can test parsing of classes that don't implements `ToXContent`.
This commit is contained in:
parent
7dbc403226
commit
f904c41506
@ -20,6 +20,8 @@
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||
|
||||
@ -73,4 +75,37 @@ public class RollupClient {
|
||||
PutRollupJobResponse::fromXContent,
|
||||
listener, Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a rollup job from the cluster.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public GetRollupJobResponse getRollupJob(GetRollupJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
RollupRequestConverters::getJob,
|
||||
options,
|
||||
GetRollupJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously get a rollup job from the cluster.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void getRollupJobAsync(GetRollupJobRequest request, RequestOptions options, ActionListener<GetRollupJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
RollupRequestConverters::getJob,
|
||||
options,
|
||||
GetRollupJobResponse::fromXContent,
|
||||
listener, Collections.emptySet());
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,9 @@
|
||||
*/
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -42,4 +44,14 @@ final class RollupRequestConverters {
|
||||
request.setEntity(createEntity(putRollupJobRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getJob(final GetRollupJobRequest getRollupJobRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("rollup")
|
||||
.addPathPartAsIs("job")
|
||||
.addPathPart(getRollupJobRequest.getJobId())
|
||||
.build();
|
||||
return new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,77 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Request to fetch rollup jobs.
|
||||
*/
|
||||
public class GetRollupJobRequest implements Validatable {
|
||||
private final String jobId;
|
||||
|
||||
/**
|
||||
* Create a requets .
|
||||
* @param jobId id of the job to return or {@code _all} to return all jobs
|
||||
*/
|
||||
public GetRollupJobRequest(final String jobId) {
|
||||
Objects.requireNonNull(jobId, "jobId is required");
|
||||
if ("_all".equals(jobId)) {
|
||||
throw new IllegalArgumentException("use the default ctor to ask for all jobs");
|
||||
}
|
||||
this.jobId = jobId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a request to load all rollup jobs.
|
||||
*/
|
||||
public GetRollupJobRequest() {
|
||||
this.jobId = "_all";
|
||||
}
|
||||
|
||||
/**
|
||||
* ID of the job to return.
|
||||
*/
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ValidationException> validate() {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
final GetRollupJobRequest that = (GetRollupJobRequest) o;
|
||||
return jobId.equals(that.jobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId);
|
||||
}
|
||||
}
|
@ -0,0 +1,374 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
import static java.util.Collections.unmodifiableList;
|
||||
import static java.util.stream.Collectors.joining;
|
||||
|
||||
/**
|
||||
* Response from rollup's get jobs api.
|
||||
*/
|
||||
public class GetRollupJobResponse {
|
||||
static final ParseField JOBS = new ParseField("jobs");
|
||||
static final ParseField CONFIG = new ParseField("config");
|
||||
static final ParseField STATS = new ParseField("stats");
|
||||
static final ParseField STATUS = new ParseField("status");
|
||||
static final ParseField NUM_PAGES = new ParseField("pages_processed");
|
||||
static final ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed");
|
||||
static final ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("rollups_indexed");
|
||||
static final ParseField NUM_INVOCATIONS = new ParseField("trigger_count");
|
||||
static final ParseField STATE = new ParseField("job_state");
|
||||
static final ParseField CURRENT_POSITION = new ParseField("current_position");
|
||||
static final ParseField UPGRADED_DOC_ID = new ParseField("upgraded_doc_id");
|
||||
|
||||
private List<JobWrapper> jobs;
|
||||
|
||||
GetRollupJobResponse(final List<JobWrapper> jobs) {
|
||||
this.jobs = Objects.requireNonNull(jobs, "jobs is required");
|
||||
}
|
||||
|
||||
/**
|
||||
* Jobs returned by the request.
|
||||
*/
|
||||
public List<JobWrapper> getJobs() {
|
||||
return jobs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final GetRollupJobResponse that = (GetRollupJobResponse) o;
|
||||
return jobs.equals(that.jobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobs);
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<GetRollupJobResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"get_rollup_job_response",
|
||||
true,
|
||||
args -> {
|
||||
@SuppressWarnings("unchecked") // We're careful about the type in the list
|
||||
List<JobWrapper> jobs = (List<JobWrapper>) args[0];
|
||||
return new GetRollupJobResponse(unmodifiableList(jobs));
|
||||
});
|
||||
static {
|
||||
PARSER.declareObjectArray(constructorArg(), JobWrapper.PARSER::apply, JOBS);
|
||||
}
|
||||
|
||||
public static GetRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return "{jobs=" + jobs.stream().map(Object::toString).collect(joining("\n")) + "\n}";
|
||||
}
|
||||
|
||||
public static class JobWrapper {
|
||||
private final RollupJobConfig job;
|
||||
private final RollupIndexerJobStats stats;
|
||||
private final RollupJobStatus status;
|
||||
|
||||
JobWrapper(RollupJobConfig job, RollupIndexerJobStats stats, RollupJobStatus status) {
|
||||
this.job = job;
|
||||
this.stats = stats;
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration of the job.
|
||||
*/
|
||||
public RollupJobConfig getJob() {
|
||||
return job;
|
||||
}
|
||||
|
||||
/**
|
||||
* Statistics about the execution of the job.
|
||||
*/
|
||||
public RollupIndexerJobStats getStats() {
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Current state of the job.
|
||||
*/
|
||||
public RollupJobStatus getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<JobWrapper, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"job",
|
||||
true,
|
||||
a -> new JobWrapper((RollupJobConfig) a[0], (RollupIndexerJobStats) a[1], (RollupJobStatus) a[2]));
|
||||
static {
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> RollupJobConfig.fromXContent(p, null), CONFIG);
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(), RollupIndexerJobStats.PARSER::apply, STATS);
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(), RollupJobStatus.PARSER::apply, STATUS);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
JobWrapper other = (JobWrapper) obj;
|
||||
return Objects.equals(job, other.job)
|
||||
&& Objects.equals(stats, other.stats)
|
||||
&& Objects.equals(status, other.status);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(job, stats, status);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return "{job=" + job
|
||||
+ ", stats=" + stats
|
||||
+ ", status=" + status + "}";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The Rollup specialization of stats for the AsyncTwoPhaseIndexer.
|
||||
* Note: instead of `documents_indexed`, this XContent show `rollups_indexed`
|
||||
*/
|
||||
public static class RollupIndexerJobStats {
|
||||
private final long numPages;
|
||||
private final long numInputDocuments;
|
||||
private final long numOuputDocuments;
|
||||
private final long numInvocations;
|
||||
|
||||
RollupIndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations) {
|
||||
this.numPages = numPages;
|
||||
this.numInputDocuments = numInputDocuments;
|
||||
this.numOuputDocuments = numOuputDocuments;
|
||||
this.numInvocations = numInvocations;
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of pages read from the input indices.
|
||||
*/
|
||||
public long getNumPages() {
|
||||
return numPages;
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of documents read from the input indices.
|
||||
*/
|
||||
public long getNumDocuments() {
|
||||
return numInputDocuments;
|
||||
}
|
||||
|
||||
/**
|
||||
* Number of times that the job woke up to write documents.
|
||||
*/
|
||||
public long getNumInvocations() {
|
||||
return numInvocations;
|
||||
}
|
||||
|
||||
/**
|
||||
* Number of documents written to the result indices.
|
||||
*/
|
||||
public long getOutputDocuments() {
|
||||
return numOuputDocuments;
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<RollupIndexerJobStats, Void> PARSER = new ConstructingObjectParser<>(
|
||||
STATS.getPreferredName(),
|
||||
true,
|
||||
args -> new RollupIndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3]));
|
||||
static {
|
||||
PARSER.declareLong(constructorArg(), NUM_PAGES);
|
||||
PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS);
|
||||
PARSER.declareLong(constructorArg(), NUM_OUTPUT_DOCUMENTS);
|
||||
PARSER.declareLong(constructorArg(), NUM_INVOCATIONS);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) return true;
|
||||
if (other == null || getClass() != other.getClass()) return false;
|
||||
RollupIndexerJobStats that = (RollupIndexerJobStats) other;
|
||||
return Objects.equals(this.numPages, that.numPages)
|
||||
&& Objects.equals(this.numInputDocuments, that.numInputDocuments)
|
||||
&& Objects.equals(this.numOuputDocuments, that.numOuputDocuments)
|
||||
&& Objects.equals(this.numInvocations, that.numInvocations);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return "{pages=" + numPages
|
||||
+ ", input_docs=" + numInputDocuments
|
||||
+ ", output_docs=" + numOuputDocuments
|
||||
+ ", invocations=" + numInvocations + "}";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Status of the rollup job.
|
||||
*/
|
||||
public static class RollupJobStatus {
|
||||
private final IndexerState state;
|
||||
private final Map<String, Object> currentPosition;
|
||||
private final boolean upgradedDocumentId;
|
||||
|
||||
RollupJobStatus(IndexerState state, Map<String, Object> position, boolean upgradedDocumentId) {
|
||||
this.state = state;
|
||||
this.currentPosition = position;
|
||||
this.upgradedDocumentId = upgradedDocumentId;
|
||||
}
|
||||
|
||||
/**
|
||||
* The state of the writer.
|
||||
*/
|
||||
public IndexerState getState() {
|
||||
return state;
|
||||
}
|
||||
/**
|
||||
* The current position of the writer.
|
||||
*/
|
||||
public Map<String, Object> getCurrentPosition() {
|
||||
return currentPosition;
|
||||
}
|
||||
/**
|
||||
* Flag holds the state of the ID scheme, e.g. if it has been upgraded
|
||||
* to the concatenation scheme.
|
||||
*/
|
||||
public boolean getUpgradedDocumentId() {
|
||||
return upgradedDocumentId;
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<RollupJobStatus, Void> PARSER = new ConstructingObjectParser<>(
|
||||
STATUS.getPreferredName(),
|
||||
true,
|
||||
args -> {
|
||||
IndexerState state = (IndexerState) args[0];
|
||||
@SuppressWarnings("unchecked") // We're careful of the contents
|
||||
Map<String, Object> currentPosition = (Map<String, Object>) args[1];
|
||||
Boolean upgradedDocumentId = (Boolean) args[2];
|
||||
return new RollupJobStatus(state, currentPosition, upgradedDocumentId == null ? false : upgradedDocumentId);
|
||||
});
|
||||
static {
|
||||
PARSER.declareField(constructorArg(), p -> IndexerState.fromString(p.text()), STATE, ObjectParser.ValueType.STRING);
|
||||
PARSER.declareField(optionalConstructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
return p.map();
|
||||
}
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||
return null;
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, CURRENT_POSITION, ObjectParser.ValueType.VALUE_OBJECT_ARRAY);
|
||||
|
||||
// Optional to accommodate old versions of state
|
||||
PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), UPGRADED_DOC_ID);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) return true;
|
||||
if (other == null || getClass() != other.getClass()) return false;
|
||||
RollupJobStatus that = (RollupJobStatus) other;
|
||||
return Objects.equals(state, that.state)
|
||||
&& Objects.equals(currentPosition, that.currentPosition)
|
||||
&& upgradedDocumentId == that.upgradedDocumentId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(state, currentPosition, upgradedDocumentId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return "{stats=" + state
|
||||
+ ", currentPosition=" + currentPosition
|
||||
+ ", upgradedDocumentId=" + upgradedDocumentId + "}";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* IndexerState represents the internal state of the indexer. It
|
||||
* is also persistent when changing from started/stopped in case the allocated
|
||||
* task is restarted elsewhere.
|
||||
*/
|
||||
public enum IndexerState {
|
||||
/** Indexer is running, but not actively indexing data (e.g. it's idle). */
|
||||
STARTED,
|
||||
|
||||
/** Indexer is actively indexing data. */
|
||||
INDEXING,
|
||||
|
||||
/**
|
||||
* Transition state to where an indexer has acknowledged the stop
|
||||
* but is still in process of halting.
|
||||
*/
|
||||
STOPPING,
|
||||
|
||||
/** Indexer is "paused" and ignoring scheduled triggers. */
|
||||
STOPPED,
|
||||
|
||||
/**
|
||||
* Something (internal or external) has requested the indexer abort
|
||||
* and shutdown.
|
||||
*/
|
||||
ABORTING;
|
||||
|
||||
static IndexerState fromString(String name) {
|
||||
return valueOf(name.trim().toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
String value() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
||||
}
|
@ -27,6 +27,10 @@ import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.IndexerState;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
||||
@ -50,6 +54,13 @@ import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.either;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
|
||||
public class RollupIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
@ -57,7 +68,7 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
|
||||
SumAggregationBuilder.NAME, AvgAggregationBuilder.NAME, ValueCountAggregationBuilder.NAME);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testPutRollupJob() throws Exception {
|
||||
public void testPutAndGetRollupJob() throws Exception {
|
||||
double sum = 0.0d;
|
||||
int max = Integer.MIN_VALUE;
|
||||
int min = Integer.MAX_VALUE;
|
||||
@ -90,7 +101,7 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
|
||||
assertEquals(RestStatus.OK, bulkResponse.status());
|
||||
if (bulkResponse.hasFailures()) {
|
||||
if (bulkResponse.hasFailures()) {
|
||||
for (BulkItemResponse itemResponse : bulkResponse.getItems()) {
|
||||
if (itemResponse.isFailed()) {
|
||||
logger.fatal(itemResponse.getFailureMessage());
|
||||
@ -158,5 +169,26 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// TODO when we move cleaning rollup into ESTestCase we can randomly choose the _all version of this request
|
||||
GetRollupJobRequest getRollupJobRequest = new GetRollupJobRequest(id);
|
||||
GetRollupJobResponse getResponse = execute(getRollupJobRequest, rollupClient::getRollupJob, rollupClient::getRollupJobAsync);
|
||||
assertThat(getResponse.getJobs(), hasSize(1));
|
||||
JobWrapper job = getResponse.getJobs().get(0);
|
||||
assertEquals(putRollupJobRequest.getConfig(), job.getJob());
|
||||
assertThat(job.getStats().getNumPages(), lessThan(10L));
|
||||
assertEquals(numDocs, job.getStats().getNumDocuments());
|
||||
assertThat(job.getStats().getNumInvocations(), greaterThan(0L));
|
||||
assertEquals(1, job.getStats().getOutputDocuments());
|
||||
assertThat(job.getStatus().getState(), either(equalTo(IndexerState.STARTED)).or(equalTo(IndexerState.INDEXING)));
|
||||
assertThat(job.getStatus().getCurrentPosition(), hasKey("date.date_histogram"));
|
||||
assertEquals(true, job.getStatus().getUpgradedDocumentId());
|
||||
}
|
||||
|
||||
public void testGetMissingRollupJob() throws Exception {
|
||||
GetRollupJobRequest getRollupJobRequest = new GetRollupJobRequest("missing");
|
||||
RollupClient rollupClient = highLevelClient().rollup();
|
||||
GetRollupJobResponse getResponse = execute(getRollupJobRequest, rollupClient::getRollupJob, rollupClient::getRollupJobAsync);
|
||||
assertThat(getResponse.getJobs(), empty());
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,61 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfigTests;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class RollupRequestConvertersTests extends ESTestCase {
|
||||
public void testPutJob() throws IOException {
|
||||
String job = randomAlphaOfLength(5);
|
||||
|
||||
RollupJobConfig config = RollupJobConfigTests.randomRollupJobConfig(job);
|
||||
PutRollupJobRequest put = new PutRollupJobRequest(config);
|
||||
|
||||
Request request = RollupRequestConverters.putJob(put);
|
||||
assertThat(request.getEndpoint(), equalTo("/_xpack/rollup/job/" + job));
|
||||
assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(request.getParameters().keySet(), empty());
|
||||
RequestConvertersTests.assertToXContentBody(put, request.getEntity());
|
||||
}
|
||||
|
||||
public void testGetJob() {
|
||||
boolean getAll = randomBoolean();
|
||||
String job = getAll ? "_all" : RequestConvertersTests.randomIndicesNames(1, 1)[0];
|
||||
GetRollupJobRequest get = getAll ? new GetRollupJobRequest() : new GetRollupJobRequest(job);
|
||||
|
||||
Request request = RollupRequestConverters.getJob(get);
|
||||
assertThat(request.getEndpoint(), equalTo("/_xpack/rollup/job/" + job));
|
||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(request.getParameters().keySet(), empty());
|
||||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
}
|
@ -27,8 +27,15 @@ import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupIndexerJobStats;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupJobStatus;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
||||
@ -38,19 +45,26 @@ import org.elasticsearch.client.rollup.job.config.MetricConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.TermsGroupConfig;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
|
||||
public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
@ -160,4 +174,110 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetRollupJob() throws Exception {
|
||||
testCreateRollupJob();
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
||||
// tag::x-pack-rollup-get-rollup-job-request
|
||||
GetRollupJobRequest getAll = new GetRollupJobRequest(); // <1>
|
||||
GetRollupJobRequest getJob = new GetRollupJobRequest("job_1"); // <2>
|
||||
// end::x-pack-rollup-get-rollup-job-request
|
||||
|
||||
// tag::x-pack-rollup-get-rollup-job-execute
|
||||
GetRollupJobResponse response = client.rollup().getRollupJob(getJob, RequestOptions.DEFAULT);
|
||||
// end::x-pack-rollup-get-rollup-job-execute
|
||||
|
||||
// tag::x-pack-rollup-get-rollup-job-response
|
||||
assertThat(response.getJobs(), hasSize(1));
|
||||
JobWrapper job = response.getJobs().get(0); // <1>
|
||||
RollupJobConfig config = job.getJob();
|
||||
RollupJobStatus status = job.getStatus();
|
||||
RollupIndexerJobStats stats = job.getStats();
|
||||
// end::x-pack-rollup-get-rollup-job-response
|
||||
assertNotNull(config);
|
||||
assertNotNull(status);
|
||||
assertNotNull(status);
|
||||
|
||||
// tag::x-pack-rollup-get-rollup-job-execute-listener
|
||||
ActionListener<GetRollupJobResponse> listener = new ActionListener<GetRollupJobResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetRollupJobResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::x-pack-rollup-get-rollup-job-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::x-pack-rollup-get-rollup-job-execute-async
|
||||
client.rollup().getRollupJobAsync(getJob, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::x-pack-rollup-get-rollup-job-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
@After
|
||||
public void wipeRollup() throws Exception {
|
||||
// TODO move this to ESRestTestCase
|
||||
deleteRollupJobs();
|
||||
waitForPendingRollupTasks();
|
||||
}
|
||||
|
||||
private void deleteRollupJobs() throws Exception {
|
||||
Response response = adminClient().performRequest(new Request("GET", "/_xpack/rollup/job/_all"));
|
||||
Map<String, Object> jobs = entityAsMap(response);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Map<String, Object>> jobConfigs =
|
||||
(List<Map<String, Object>>) XContentMapValues.extractValue("jobs", jobs);
|
||||
|
||||
if (jobConfigs == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (Map<String, Object> jobConfig : jobConfigs) {
|
||||
@SuppressWarnings("unchecked")
|
||||
String jobId = (String) ((Map<String, Object>) jobConfig.get("config")).get("id");
|
||||
Request request = new Request("DELETE", "/_xpack/rollup/job/" + jobId);
|
||||
request.addParameter("ignore", "404"); // Ignore 404s because they imply someone was racing us to delete this
|
||||
adminClient().performRequest(request);
|
||||
}
|
||||
}
|
||||
|
||||
private void waitForPendingRollupTasks() throws Exception {
|
||||
assertBusy(() -> {
|
||||
try {
|
||||
Request request = new Request("GET", "/_cat/tasks");
|
||||
request.addParameter("detailed", "true");
|
||||
Response response = adminClient().performRequest(request);
|
||||
|
||||
try (BufferedReader responseReader = new BufferedReader(
|
||||
new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) {
|
||||
int activeTasks = 0;
|
||||
String line;
|
||||
StringBuilder tasksListString = new StringBuilder();
|
||||
while ((line = responseReader.readLine()) != null) {
|
||||
|
||||
// We only care about Rollup jobs, otherwise this fails too easily due to unrelated tasks
|
||||
if (line.startsWith("xpack/rollup/job") == true) {
|
||||
activeTasks++;
|
||||
tasksListString.append(line).append('\n');
|
||||
}
|
||||
}
|
||||
assertEquals(activeTasks + " active tasks found:\n" + tasksListString, 0, activeTasks);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// Throw an assertion error so we retry
|
||||
throw new AssertionError("Error getting active tasks list", e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class GetRollupJobRequestTests extends ESTestCase {
|
||||
public void testRequiresJob() {
|
||||
final NullPointerException e = expectThrows(NullPointerException.class, () -> new GetRollupJobRequest(null));
|
||||
assertEquals("jobId is required", e.getMessage());
|
||||
}
|
||||
|
||||
public void testDoNotUseAll() {
|
||||
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GetRollupJobRequest("_all"));
|
||||
assertEquals("use the default ctor to ask for all jobs", e.getMessage());
|
||||
}
|
||||
}
|
@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.IndexerState;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupIndexerJobStats;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupJobStatus;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfigTests;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
|
||||
public class GetRollupJobResponseTests extends ESTestCase {
|
||||
public void testFromXContent() throws IOException {
|
||||
xContentTester(
|
||||
this::createParser,
|
||||
this::createTestInstance,
|
||||
this::toXContent,
|
||||
GetRollupJobResponse::fromXContent)
|
||||
.supportsUnknownFields(true)
|
||||
.randomFieldsExcludeFilter(field ->
|
||||
field.endsWith("status.current_position"))
|
||||
.test();
|
||||
}
|
||||
|
||||
private GetRollupJobResponse createTestInstance() {
|
||||
int jobCount = between(1, 5);
|
||||
List<JobWrapper> jobs = new ArrayList<>();
|
||||
for (int j = 0; j < jobCount; j++) {
|
||||
jobs.add(new JobWrapper(
|
||||
RollupJobConfigTests.randomRollupJobConfig(randomAlphaOfLength(5)),
|
||||
randomStats(),
|
||||
randomStatus()));
|
||||
}
|
||||
return new GetRollupJobResponse(jobs);
|
||||
}
|
||||
|
||||
private RollupIndexerJobStats randomStats() {
|
||||
return new RollupIndexerJobStats(randomLong(), randomLong(), randomLong(), randomLong());
|
||||
}
|
||||
|
||||
private RollupJobStatus randomStatus() {
|
||||
Map<String, Object> currentPosition = new HashMap<>();
|
||||
int positions = between(0, 10);
|
||||
while (currentPosition.size() < positions) {
|
||||
currentPosition.put(randomAlphaOfLength(2), randomAlphaOfLength(2));
|
||||
}
|
||||
return new RollupJobStatus(
|
||||
randomFrom(IndexerState.values()),
|
||||
currentPosition,
|
||||
randomBoolean());
|
||||
}
|
||||
|
||||
private void toXContent(GetRollupJobResponse response, XContentBuilder builder) throws IOException {
|
||||
ToXContent.Params params = ToXContent.EMPTY_PARAMS;
|
||||
builder.startObject();
|
||||
builder.startArray(GetRollupJobResponse.JOBS.getPreferredName());
|
||||
for (JobWrapper job : response.getJobs()) {
|
||||
toXContent(job, builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
private void toXContent(JobWrapper jobWrapper, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(GetRollupJobResponse.CONFIG.getPreferredName());
|
||||
jobWrapper.getJob().toXContent(builder, params);
|
||||
builder.field(GetRollupJobResponse.STATUS.getPreferredName());
|
||||
toXContent(jobWrapper.getStatus(), builder, params);
|
||||
builder.field(GetRollupJobResponse.STATS.getPreferredName());
|
||||
toXContent(jobWrapper.getStats(), builder, params);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
public void toXContent(RollupJobStatus status, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(GetRollupJobResponse.STATE.getPreferredName(), status.getState().value());
|
||||
if (status.getCurrentPosition() != null) {
|
||||
builder.field(GetRollupJobResponse.CURRENT_POSITION.getPreferredName(), status.getCurrentPosition());
|
||||
}
|
||||
builder.field(GetRollupJobResponse.UPGRADED_DOC_ID.getPreferredName(), status.getUpgradedDocumentId());
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
public void toXContent(RollupIndexerJobStats stats, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(GetRollupJobResponse.NUM_PAGES.getPreferredName(), stats.getNumPages());
|
||||
builder.field(GetRollupJobResponse.NUM_INPUT_DOCUMENTS.getPreferredName(), stats.getNumDocuments());
|
||||
builder.field(GetRollupJobResponse.NUM_OUTPUT_DOCUMENTS.getPreferredName(), stats.getOutputDocuments());
|
||||
builder.field(GetRollupJobResponse.NUM_INVOCATIONS.getPreferredName(), stats.getNumInvocations());
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
71
docs/java-rest/high-level/rollup/get_job.asciidoc
Normal file
71
docs/java-rest/high-level/rollup/get_job.asciidoc
Normal file
@ -0,0 +1,71 @@
|
||||
[[java-rest-high-x-pack-rollup-get-job]]
|
||||
=== Get Rollup Job API
|
||||
|
||||
The Get Rollup Job API can be used to get one or all rollup jobs from the
|
||||
cluster. It accepts a `GetRollupJobRequest` object as a request and returns
|
||||
a `GetRollupJobResponse`.
|
||||
|
||||
[[java-rest-high-x-pack-rollup-get-rollup-job-request]]
|
||||
==== Get Rollup Job Request
|
||||
|
||||
A `GetRollupJobRequest` can be built without any parameters to get all of the
|
||||
rollup jobs or with a job name to get a single job:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-request]
|
||||
--------------------------------------------------
|
||||
<1> Gets all jobs.
|
||||
<2> Gets `job_1`.
|
||||
|
||||
[[java-rest-high-x-pack-rollup-get-rollup-job-execution]]
|
||||
==== Execution
|
||||
|
||||
The Get Rollup Job API can be executed through a `RollupClient`
|
||||
instance. Such instance can be retrieved from a `RestHighLevelClient`
|
||||
using the `rollup()` method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-rollup-get-rollup-job-response]]
|
||||
==== Response
|
||||
|
||||
The returned `GetRollupJobResponse` includes a `JobWrapper` per returned job
|
||||
which contains the configuration of the job, the job's current status, and
|
||||
statistics about the job's past execution.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-response]
|
||||
--------------------------------------------------
|
||||
<1> We only asked for a single job
|
||||
|
||||
[[java-rest-high-x-pack-rollup-get-rollup-job-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
This request can be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetRollupJobRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `GetRollupJobResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
@ -285,8 +285,10 @@ include::migration/get-assistance.asciidoc[]
|
||||
The Java High Level REST Client supports the following Rollup APIs:
|
||||
|
||||
* <<java-rest-high-x-pack-rollup-put-job>>
|
||||
* <<java-rest-high-x-pack-rollup-get-job>>
|
||||
|
||||
include::rollup/put_job.asciidoc[]
|
||||
include::rollup/get_job.asciidoc[]
|
||||
|
||||
== Security APIs
|
||||
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.test;
|
||||
|
||||
import org.elasticsearch.common.CheckedBiFunction;
|
||||
import org.elasticsearch.common.CheckedBiConsumer;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
@ -38,34 +39,147 @@ import java.util.function.Supplier;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
||||
|
||||
public abstract class AbstractXContentTestCase<T extends ToXContent> extends ESTestCase {
|
||||
|
||||
public abstract class AbstractXContentTestCase<T extends ToXContent> extends ESTestCase {
|
||||
protected static final int NUMBER_OF_TEST_RUNS = 20;
|
||||
|
||||
public static <T extends ToXContent> void testFromXContent(int numberOfTestRuns, Supplier<T> instanceSupplier,
|
||||
boolean supportsUnknownFields, String[] shuffleFieldsExceptions,
|
||||
Predicate<String> randomFieldsExcludeFilter,
|
||||
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException>
|
||||
createParserFunction,
|
||||
CheckedFunction<XContentParser, T, IOException> parseFunction,
|
||||
BiConsumer<T, T> assertEqualsConsumer,
|
||||
boolean assertToXContentEquivalence,
|
||||
ToXContent.Params toXContentParams) throws IOException {
|
||||
for (int runs = 0; runs < numberOfTestRuns; runs++) {
|
||||
T testInstance = instanceSupplier.get();
|
||||
XContentType xContentType = randomFrom(XContentType.values());
|
||||
BytesReference shuffledContent = insertRandomFieldsAndShuffle(testInstance, xContentType, supportsUnknownFields,
|
||||
shuffleFieldsExceptions, randomFieldsExcludeFilter, createParserFunction, toXContentParams);
|
||||
XContentParser parser = createParserFunction.apply(XContentFactory.xContent(xContentType), shuffledContent);
|
||||
T parsed = parseFunction.apply(parser);
|
||||
assertEqualsConsumer.accept(testInstance, parsed);
|
||||
if (assertToXContentEquivalence) {
|
||||
assertToXContentEquivalent(
|
||||
public static <T> XContentTester<T> xContentTester(
|
||||
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParser,
|
||||
Supplier<T> instanceSupplier,
|
||||
CheckedBiConsumer<T, XContentBuilder, IOException> toXContent,
|
||||
CheckedFunction<XContentParser, T, IOException> fromXContent) {
|
||||
return new XContentTester<T>(
|
||||
createParser,
|
||||
instanceSupplier,
|
||||
(testInstance, xContentType) -> {
|
||||
try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
|
||||
toXContent.accept(testInstance, builder);
|
||||
return BytesReference.bytes(builder);
|
||||
}
|
||||
},
|
||||
fromXContent);
|
||||
}
|
||||
|
||||
public static <T extends ToXContent> XContentTester<T> xContentTester(
|
||||
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParser,
|
||||
Supplier<T> instanceSupplier,
|
||||
CheckedFunction<XContentParser, T, IOException> fromXContent) {
|
||||
return xContentTester(createParser, instanceSupplier, ToXContent.EMPTY_PARAMS, fromXContent);
|
||||
}
|
||||
|
||||
public static <T extends ToXContent> XContentTester<T> xContentTester(
|
||||
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParser,
|
||||
Supplier<T> instanceSupplier,
|
||||
ToXContent.Params toXContentParams,
|
||||
CheckedFunction<XContentParser, T, IOException> fromXContent) {
|
||||
return new XContentTester<T>(
|
||||
createParser,
|
||||
instanceSupplier,
|
||||
(testInstance, xContentType) ->
|
||||
XContentHelper.toXContent(testInstance, xContentType, toXContentParams, false),
|
||||
XContentHelper.toXContent(parsed, xContentType, toXContentParams, false),
|
||||
xContentType);
|
||||
fromXContent);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests converting to and from xcontent.
|
||||
*/
|
||||
public static class XContentTester<T> {
|
||||
private final CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParser;
|
||||
private final Supplier<T> instanceSupplier;
|
||||
private final CheckedBiFunction<T, XContentType, BytesReference, IOException> toXContent;
|
||||
private final CheckedFunction<XContentParser, T, IOException> fromXContent;
|
||||
|
||||
private int numberOfTestRuns = NUMBER_OF_TEST_RUNS;
|
||||
private boolean supportsUnknownFields = false;
|
||||
private String[] shuffleFieldsExceptions = Strings.EMPTY_ARRAY;
|
||||
private Predicate<String> randomFieldsExcludeFilter = field -> false;
|
||||
private BiConsumer<T, T> assertEqualsConsumer = (expectedInstance, newInstance) -> {
|
||||
assertNotSame(newInstance, expectedInstance);
|
||||
assertEquals(expectedInstance, newInstance);
|
||||
assertEquals(expectedInstance.hashCode(), newInstance.hashCode());
|
||||
};
|
||||
private boolean assertToXContentEquivalence = true;
|
||||
|
||||
private XContentTester(
|
||||
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParser,
|
||||
Supplier<T> instanceSupplier,
|
||||
CheckedBiFunction<T, XContentType, BytesReference, IOException> toXContent,
|
||||
CheckedFunction<XContentParser, T, IOException> fromXContent) {
|
||||
this.createParser = createParser;
|
||||
this.instanceSupplier = instanceSupplier;
|
||||
this.toXContent = toXContent;
|
||||
this.fromXContent = fromXContent;
|
||||
}
|
||||
|
||||
public void test() throws IOException {
|
||||
for (int runs = 0; runs < numberOfTestRuns; runs++) {
|
||||
T testInstance = instanceSupplier.get();
|
||||
XContentType xContentType = randomFrom(XContentType.values());
|
||||
BytesReference originalXContent = toXContent.apply(testInstance, xContentType);
|
||||
BytesReference shuffledContent = insertRandomFieldsAndShuffle(originalXContent, xContentType, supportsUnknownFields,
|
||||
shuffleFieldsExceptions, randomFieldsExcludeFilter, createParser);
|
||||
XContentParser parser = createParser.apply(XContentFactory.xContent(xContentType), shuffledContent);
|
||||
T parsed = fromXContent.apply(parser);
|
||||
assertEqualsConsumer.accept(testInstance, parsed);
|
||||
if (assertToXContentEquivalence) {
|
||||
assertToXContentEquivalent(
|
||||
toXContent.apply(testInstance, xContentType),
|
||||
toXContent.apply(parsed, xContentType),
|
||||
xContentType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public XContentTester<T> numberOfTestRuns(int numberOfTestRuns) {
|
||||
this.numberOfTestRuns = numberOfTestRuns;
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentTester<T> supportsUnknownFields(boolean supportsUnknownFields) {
|
||||
this.supportsUnknownFields = supportsUnknownFields;
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentTester<T> shuffleFieldsExceptions(String[] shuffleFieldsExceptions) {
|
||||
this.shuffleFieldsExceptions = shuffleFieldsExceptions;
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentTester<T> randomFieldsExcludeFilter(Predicate<String> randomFieldsExcludeFilter) {
|
||||
this.randomFieldsExcludeFilter = randomFieldsExcludeFilter;
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentTester<T> assertEqualsConsumer(BiConsumer<T, T> assertEqualsConsumer) {
|
||||
this.assertEqualsConsumer = assertEqualsConsumer;
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentTester<T> assertToXContentEquivalence(boolean assertToXContentEquivalence) {
|
||||
this.assertToXContentEquivalence = assertToXContentEquivalence;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public static <T extends ToXContent> void testFromXContent(
|
||||
int numberOfTestRuns,
|
||||
Supplier<T> instanceSupplier,
|
||||
boolean supportsUnknownFields,
|
||||
String[] shuffleFieldsExceptions,
|
||||
Predicate<String> randomFieldsExcludeFilter,
|
||||
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParserFunction,
|
||||
CheckedFunction<XContentParser, T, IOException> fromXContent,
|
||||
BiConsumer<T, T> assertEqualsConsumer,
|
||||
boolean assertToXContentEquivalence,
|
||||
ToXContent.Params toXContentParams) throws IOException {
|
||||
xContentTester(createParserFunction, instanceSupplier, toXContentParams, fromXContent)
|
||||
.numberOfTestRuns(numberOfTestRuns)
|
||||
.supportsUnknownFields(supportsUnknownFields)
|
||||
.shuffleFieldsExceptions(shuffleFieldsExceptions)
|
||||
.randomFieldsExcludeFilter(randomFieldsExcludeFilter)
|
||||
.assertEqualsConsumer(assertEqualsConsumer)
|
||||
.assertToXContentEquivalence(assertToXContentEquivalence)
|
||||
.test();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -133,11 +247,9 @@ public abstract class AbstractXContentTestCase<T extends ToXContent> extends EST
|
||||
return ToXContent.EMPTY_PARAMS;
|
||||
}
|
||||
|
||||
static BytesReference insertRandomFieldsAndShuffle(ToXContent testInstance, XContentType xContentType,
|
||||
static BytesReference insertRandomFieldsAndShuffle(BytesReference xContent, XContentType xContentType,
|
||||
boolean supportsUnknownFields, String[] shuffleFieldsExceptions, Predicate<String> randomFieldsExcludeFilter,
|
||||
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParserFunction,
|
||||
ToXContent.Params toXContentParams) throws IOException {
|
||||
BytesReference xContent = XContentHelper.toXContent(testInstance, xContentType, toXContentParams, false);
|
||||
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParserFunction) throws IOException {
|
||||
BytesReference withRandomFields;
|
||||
if (supportsUnknownFields) {
|
||||
// add a few random fields to check that the parser is lenient on new fields
|
||||
|
@ -22,13 +22,11 @@ package org.elasticsearch.test;
|
||||
import com.carrotsearch.randomizedtesting.RandomizedContext;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
@ -37,29 +35,24 @@ import static org.hamcrest.Matchers.not;
|
||||
public class AbstractXContentTestCaseTests extends ESTestCase {
|
||||
|
||||
public void testInsertRandomFieldsAndShuffle() throws Exception {
|
||||
TestInstance t = new TestInstance();
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field("field", 1);
|
||||
}
|
||||
builder.endObject();
|
||||
BytesReference insertRandomFieldsAndShuffle = RandomizedContext.current().runWithPrivateRandomness(1,
|
||||
() -> AbstractXContentTestCase.insertRandomFieldsAndShuffle(t, XContentType.JSON, true, new String[] {}, null,
|
||||
this::createParser, ToXContent.EMPTY_PARAMS));
|
||||
() -> AbstractXContentTestCase.insertRandomFieldsAndShuffle(
|
||||
BytesReference.bytes(builder),
|
||||
XContentType.JSON,
|
||||
true,
|
||||
new String[] {},
|
||||
null,
|
||||
this::createParser));
|
||||
try (XContentParser parser = createParser(XContentType.JSON.xContent(), insertRandomFieldsAndShuffle)) {
|
||||
Map<String, Object> mapOrdered = parser.mapOrdered();
|
||||
assertThat(mapOrdered.size(), equalTo(2));
|
||||
assertThat(mapOrdered.keySet().iterator().next(), not(equalTo("field")));
|
||||
}
|
||||
}
|
||||
|
||||
private class TestInstance implements ToXContentObject {
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field("field", 1);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -222,7 +222,7 @@ public class RollupJobConfig implements NamedWriteable, ToXContentObject {
|
||||
builder.endArray();
|
||||
}
|
||||
if (timeout != null) {
|
||||
builder.field(TIMEOUT, timeout);
|
||||
builder.field(TIMEOUT, timeout.getStringRep());
|
||||
}
|
||||
builder.field(PAGE_SIZE, pageSize);
|
||||
}
|
||||
|
@ -163,6 +163,6 @@ public class RollupJobStatus implements Task.Status, PersistentTaskState {
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(state, currentPosition, upgradedDocumentID);
|
||||
return Objects.hash(state, currentPosition, upgradedDocumentID);
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user