Add HLRC docs for ILM Start/Stop/Status (#35672)

Adds HLRC documentation for Start ILM, Stop ILM, and ILM Status APIs, as
these APIs are tightly linked to each other.
This commit is contained in:
Gordon Brown 2018-11-19 17:17:01 -07:00 committed by GitHub
parent bf02897173
commit e012ac4649
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 271 additions and 1 deletions

View File

@ -32,11 +32,16 @@ import org.elasticsearch.client.indexlifecycle.DeleteLifecyclePolicyRequest;
import org.elasticsearch.client.indexlifecycle.GetLifecyclePolicyRequest;
import org.elasticsearch.client.indexlifecycle.GetLifecyclePolicyResponse;
import org.elasticsearch.client.indexlifecycle.LifecycleAction;
import org.elasticsearch.client.indexlifecycle.LifecycleManagementStatusRequest;
import org.elasticsearch.client.indexlifecycle.LifecycleManagementStatusResponse;
import org.elasticsearch.client.indexlifecycle.LifecyclePolicy;
import org.elasticsearch.client.indexlifecycle.OperationMode;
import org.elasticsearch.client.indexlifecycle.LifecyclePolicyMetadata;
import org.elasticsearch.client.indexlifecycle.Phase;
import org.elasticsearch.client.indexlifecycle.PutLifecyclePolicyRequest;
import org.elasticsearch.client.indexlifecycle.RolloverAction;
import org.elasticsearch.client.indexlifecycle.StartILMRequest;
import org.elasticsearch.client.indexlifecycle.StopILMRequest;
import org.elasticsearch.client.indexlifecycle.ShrinkAction;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.unit.ByteSizeUnit;
@ -44,6 +49,7 @@ import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.Collections;
@ -52,6 +58,8 @@ import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.equalTo;
public class ILMDocumentationIT extends ESRestHighLevelClientTestCase {
public void testPutLifecyclePolicy() throws Exception {
@ -226,6 +234,152 @@ public class ILMDocumentationIT extends ESRestHighLevelClientTestCase {
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testStartStopStatus() throws Exception {
RestHighLevelClient client = highLevelClient();
stopILM(client);
// tag::ilm-status-request
LifecycleManagementStatusRequest request =
new LifecycleManagementStatusRequest();
// end::ilm-status-request
// Check that ILM has stopped
{
// tag::ilm-status-execute
LifecycleManagementStatusResponse response =
client.indexLifecycle()
.lifecycleManagementStatus(request, RequestOptions.DEFAULT);
// end::ilm-status-execute
// tag::ilm-status-response
OperationMode operationMode = response.getOperationMode(); // <1>
// end::ilm-status-response
assertThat(operationMode, Matchers.either(equalTo(OperationMode.STOPPING)).or(equalTo(OperationMode.STOPPED)));
}
startILM(client);
// tag::ilm-status-execute-listener
ActionListener<LifecycleManagementStatusResponse> listener =
new ActionListener<LifecycleManagementStatusResponse>() {
@Override
public void onResponse(
LifecycleManagementStatusResponse response) {
OperationMode operationMode = response
.getOperationMode(); // <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::ilm-status-execute-listener
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::ilm-status-execute-async
client.indexLifecycle().lifecycleManagementStatusAsync(request,
RequestOptions.DEFAULT, listener); // <1>
// end::ilm-status-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
// Check that ILM is running again
LifecycleManagementStatusResponse response =
client.indexLifecycle()
.lifecycleManagementStatus(request, RequestOptions.DEFAULT);
OperationMode operationMode = response.getOperationMode();
assertEquals(OperationMode.RUNNING, operationMode);
}
private void stopILM(RestHighLevelClient client) throws IOException, InterruptedException {
// tag::ilm-stop-ilm-request
StopILMRequest request = new StopILMRequest();
// end::ilm-stop-ilm-request
// tag::ilm-stop-ilm-execute
AcknowledgedResponse response = client.indexLifecycle()
.stopILM(request, RequestOptions.DEFAULT);
// end::ilm-stop-ilm-execute
// tag::ilm-stop-ilm-response
boolean acknowledged = response.isAcknowledged(); // <1>
// end::ilm-stop-ilm-response
assertTrue(acknowledged);
// tag::ilm-stop-ilm-execute-listener
ActionListener<AcknowledgedResponse> listener =
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
boolean acknowledged = response.isAcknowledged(); // <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::ilm-stop-ilm-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::ilm-stop-ilm-execute-async
client.indexLifecycle().stopILMAsync(request,
RequestOptions.DEFAULT, listener); // <1>
// end::ilm-stop-ilm-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
private void startILM(RestHighLevelClient client) throws IOException, InterruptedException {
// tag::ilm-start-ilm-request
StartILMRequest request1 = new StartILMRequest();
// end::ilm-start-ilm-request
// tag::ilm-start-ilm-execute
AcknowledgedResponse response = client.indexLifecycle()
.startILM(request1, RequestOptions.DEFAULT);
// end::ilm-start-ilm-execute
// tag::ilm-start-ilm-response
boolean acknowledged = response.isAcknowledged(); // <1>
// end::ilm-start-ilm-response
assertTrue(acknowledged);
// tag::ilm-start-ilm-execute-listener
ActionListener<AcknowledgedResponse> listener =
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
boolean acknowledged = response.isAcknowledged(); // <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::ilm-start-ilm-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::ilm-start-ilm-execute-async
client.indexLifecycle().startILMAsync(request1,
RequestOptions.DEFAULT, listener); // <1>
// end::ilm-start-ilm-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
static Map<String, Object> toMap(Response response) throws IOException {
return XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false);
}

View File

@ -0,0 +1,36 @@
--
:api: ilm-status
:request: LifecycleManagementStatusRequest
:response: AcknowledgedResponse
--
[id="{upid}-{api}"]
=== Index Lifecycle Management Status API
[id="{upid}-{api}-request"]
==== Request
The Index Lifecycle Management Status API allows you to retrieve the status
of Index Lifecycle Management
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
--------------------------------------------------
[id="{upid}-{api}-response"]
==== Response
The returned +{response}+ indicates the status of Index Lifecycle Management.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-response]
--------------------------------------------------
<1> The returned status can be `RUNNING`, `STOPPING`, or `STOPPED`.
include::../execution.asciidoc[]

View File

@ -0,0 +1,36 @@
--
:api: ilm-start-ilm
:request: StartILMRequest
:response: AcknowledgedResponse
--
[id="{upid}-{api}"]
=== Start Index Lifecycle Management API
[id="{upid}-{api}-request"]
==== Request
The Start Lifecycle Management API allows you to start Index Lifecycle
Management if it has previously been stopped.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
--------------------------------------------------
[id="{upid}-{api}-response"]
==== Response
The returned +{response}+ indicates if the request to start Index Lifecycle
Management was received.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-response]
--------------------------------------------------
<1> Whether or not the request to start Index Lifecycle Management was
acknowledged.
include::../execution.asciidoc[]

View File

@ -0,0 +1,38 @@
--
:api: ilm-stop-ilm
:request: StopILMRequest
:response: AcknowledgedResponse
--
[id="{upid}-{api}"]
=== Stop Index Lifecycle Management API
[id="{upid}-{api}-request"]
==== Request
The Stop Lifecycle Management API allows you to stop Index Lifecycle
Management temporarily.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
--------------------------------------------------
[id="{upid}-{api}-response"]
==== Response
The returned +{response}+ indicates if the request to stop Index Lifecycle
Management was received.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-response]
--------------------------------------------------
<1> Whether or not the request to stop Index Lifecycle Management was
acknowledged.
include::../execution.asciidoc[]

View File

@ -452,7 +452,13 @@ Management APIs:
* <<{upid}-ilm-put-lifecycle-policy>>
* <<{upid}-ilm-get-lifecycle-policy>>
* <<{upid}-ilm-start-ilm>>
* <<{upid}-ilm-stop-ilm>>
* <<{upid}-ilm-status>>
include::ilm/put_lifecycle_policy.asciidoc[]
include::ilm/get_lifecycle_policy.asciidoc[]
include::ilm/start_lifecycle_management.asciidoc[]
include::ilm/stop_lifecycle_management.asciidoc[]
include::ilm/lifecycle_management_status.asciidoc[]