[Ingest] Add REST _ingest/pipeline to get all pipelines

This adds an extra REST handler for "_ingest/pipeline" so that users do not need to supply "_ingest/pipeline/*" to get all of them.

- Also adds a teardown section to related REST-tests for ingest.
This commit is contained in:
Chris Earle 2016-07-26 13:14:27 -04:00
parent 3c0288ee98
commit 0553ba9151
16 changed files with 150 additions and 9 deletions

View File

@ -21,26 +21,25 @@ package org.elasticsearch.action.ingest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError;
public class GetPipelineRequest extends MasterNodeReadRequest<GetPipelineRequest> {
private String[] ids;
public GetPipelineRequest(String... ids) {
if (ids == null || ids.length == 0) {
throw new IllegalArgumentException("No ids specified");
if (ids == null) {
throw new IllegalArgumentException("ids cannot be null");
}
this.ids = ids;
}
GetPipelineRequest() {
this.ids = Strings.EMPTY_ARRAY;
}
public String[] getIds() {

View File

@ -207,6 +207,11 @@ public class PipelineStore extends AbstractComponent implements ClusterStateList
return Collections.emptyList();
}
// if we didn't ask for _any_ ID, then we get them all (this is the same as if they ask for '*')
if (ids.length == 0) {
return new ArrayList<>(ingestMetadata.getPipelines().values());
}
List<PipelineConfiguration> result = new ArrayList<>(ids.length);
for (String id : ids) {
if (Regex.isSimpleMatchPattern(id)) {

View File

@ -35,6 +35,7 @@ public class RestGetPipelineAction extends BaseRestHandler {
@Inject
public RestGetPipelineAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline", this);
controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}", this);
}

View File

@ -28,11 +28,9 @@ import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.LocalTransportAddress;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
@ -48,7 +46,6 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.mockito.Mockito.mock;
public class PipelineStoreTests extends ESTestCase {
@ -216,6 +213,19 @@ public class PipelineStoreTests extends ESTestCase {
assertThat(pipelines.size(), equalTo(2));
assertThat(pipelines.get(0).getId(), equalTo("_id1"));
assertThat(pipelines.get(1).getId(), equalTo("_id2"));
// get all variants: (no IDs or '*')
pipelines = store.innerGetPipelines(ingestMetadata);
pipelines.sort((o1, o2) -> o1.getId().compareTo(o2.getId()));
assertThat(pipelines.size(), equalTo(2));
assertThat(pipelines.get(0).getId(), equalTo("_id1"));
assertThat(pipelines.get(1).getId(), equalTo("_id2"));
pipelines = store.innerGetPipelines(ingestMetadata, "*");
pipelines.sort((o1, o2) -> o1.getId().compareTo(o2.getId()));
assertThat(pipelines.size(), equalTo(2));
assertThat(pipelines.get(0).getId(), equalTo("_id1"));
assertThat(pipelines.get(1).getId(), equalTo("_id2"));
}
public void testCrud() throws Exception {

View File

@ -1,3 +1,10 @@
---
teardown:
- do:
ingest.delete_pipeline:
id: "1"
ignore: 404
---
"Test date index name processor with defaults":
- do:

View File

@ -1,3 +1,10 @@
---
teardown:
- do:
ingest.delete_pipeline:
id: "my_pipeline"
ignore: 404
---
"Test sort Processor":
- do:

View File

@ -1,3 +1,10 @@
---
teardown:
- do:
ingest.delete_pipeline:
id: "my_pipeline"
ignore: 404
---
"Test Grok Pipeline":
- do:

View File

@ -1,3 +1,18 @@
---
teardown:
- do:
ingest.delete_pipeline:
id: "my_pipeline"
ignore: 404
- do:
ingest.delete_pipeline:
id: "first_pipeline"
ignore: 404
- do:
ingest.delete_pipeline:
id: "second_pipeline"
ignore: 404
---
"Test basic pipeline crud":
- do:
@ -23,6 +38,11 @@
- match: { pipelines.0.id: "my_pipeline" }
- match: { pipelines.0.config.description: "_description" }
- do:
ingest.get_pipeline: {}
- match: { pipelines.0.id: "my_pipeline" }
- match: { pipelines.0.config.description: "_description" }
- do:
ingest.delete_pipeline:
id: "my_pipeline"
@ -33,6 +53,38 @@
ingest.get_pipeline:
id: "my_pipeline"
---
"Test Get All Pipelines (unordered)":
- do:
ingest.put_pipeline:
id: "first_pipeline"
body: >
{
"description": "first",
"processors": [
{
"set" : {
"field" : "field1",
"value": "_value"
}
}
]
}
- do:
ingest.put_pipeline:
id: "second_pipeline"
body: >
{
"description": "second",
"processors": []
}
# Order is not guaranteed by the response, so we check for length instead; above tests that we get appropriate values
- do:
ingest.get_pipeline: {}
- length: { pipelines: 2 }
---
"Test invalid config":
- do:

View File

@ -1,3 +1,10 @@
---
teardown:
- do:
ingest.delete_pipeline:
id: "my_pipeline"
ignore: 404
---
"Test date processor":
- do:

View File

@ -1,3 +1,10 @@
---
teardown:
- do:
ingest.delete_pipeline:
id: "my_pipeline"
ignore: 404
---
"Test mutate processors":
- do:

View File

@ -1,3 +1,10 @@
---
teardown:
- do:
ingest.delete_pipeline:
id: "my_pipeline"
ignore: 404
---
"Test Pipeline With On Failure Block":
- do:

View File

@ -1,3 +1,10 @@
---
teardown:
- do:
ingest.delete_pipeline:
id: "my_pipeline"
ignore: 404
---
"Test Fail Processor":
- do:

View File

@ -31,6 +31,17 @@ setup:
]
}
---
teardown:
- do:
ingest.delete_pipeline:
id: "pipeline1"
ignore: 404
- do:
ingest.delete_pipeline:
id: "pipeline2"
ignore: 404
---
"Test bulk request without default pipeline":

View File

@ -1,3 +1,10 @@
---
teardown:
- do:
ingest.delete_pipeline:
id: "my_pipeline"
ignore: 404
---
"Test foreach Processor":
- do:

View File

@ -1,3 +1,10 @@
---
teardown:
- do:
ingest.delete_pipeline:
id: "my_pipeline"
ignore: 404
---
"Test simulate with stored ingest pipeline":
- do:

View File

@ -4,7 +4,7 @@
"methods": [ "GET" ],
"url": {
"path": "/_ingest/pipeline/{id}",
"paths": [ "/_ingest/pipeline/{id}" ],
"paths": [ "/_ingest/pipeline", "/_ingest/pipeline/{id}" ],
"parts": {
"id": {
"type" : "string",