From 18c1cc678a28c13827574a607b8a7c32d96c7b00 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 24 Sep 2015 22:18:27 +0200 Subject: [PATCH 001/347] wip --- plugins/ingest/licenses/no_deps.txt | 1 + plugins/ingest/pom.xml | 33 +++++ .../java/org/elasticsearch/ingest/Data.java | 64 +++++++++ .../org/elasticsearch/ingest/Pipeline.java | 103 +++++++++++++ .../org/elasticsearch/ingest/Processor.java | 35 +++++ .../elasticsearch/ingest/SimpleProcessor.java | 90 ++++++++++++ .../plugin/ingest/IngestModule.java | 34 +++++ .../plugin/ingest/IngestPlugin.java | 65 +++++++++ .../plugin/ingest/PipelineStore.java | 136 ++++++++++++++++++ .../plugin/ingest/rest/IngestRestFilter.java | 40 ++++++ .../ingest/transport/IngestActionFilter.java | 92 ++++++++++++ .../ingest/src/test/java/IngestRunner.java | 52 +++++++ .../org/elasticsearch/ingest/BasicTests.java | 83 +++++++++++ .../elasticsearch/ingest/IngestRestIT.java | 49 +++++++ .../rest-api-spec/test/ingest/10_basic.yaml | 6 + plugins/pom.xml | 2 + 16 files changed, 885 insertions(+) create mode 100644 plugins/ingest/licenses/no_deps.txt create mode 100644 plugins/ingest/pom.xml create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/Processor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java create mode 100644 plugins/ingest/src/test/java/IngestRunner.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/BasicTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml diff --git a/plugins/ingest/licenses/no_deps.txt b/plugins/ingest/licenses/no_deps.txt new file mode 100644 index 00000000000..8cce254d037 --- /dev/null +++ b/plugins/ingest/licenses/no_deps.txt @@ -0,0 +1 @@ +This plugin has no third party dependencies diff --git a/plugins/ingest/pom.xml b/plugins/ingest/pom.xml new file mode 100644 index 00000000000..3371dde3290 --- /dev/null +++ b/plugins/ingest/pom.xml @@ -0,0 +1,33 @@ + + + 4.0.0 + + + org.elasticsearch.plugin + plugins + 3.0.0-SNAPSHOT + + + ingest + Plugin: Node ingest + Plugin that allows to configure pipelines to preprocess documents before indexing + + + org.elasticsearch.plugin.ingest.IngestPlugin + ingest + false + -Xlint:-rawtypes + + + + + + org.apache.maven.plugins + maven-assembly-plugin + + + + + diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java new file mode 100644 index 00000000000..a3644c5c316 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.common.xcontent.support.XContentMapValues; + +import java.util.Map; + +public final class Data { + + private final String index; + private final String type; + private final String id; + private final Map document; + + public Data(String index, String type, String id, Map document) { + this.index = index; + this.type = type; + this.id = id; + this.document = document; + } + + @SuppressWarnings("unchecked") + public T getProperty(String path) { + return (T) XContentMapValues.extractValue(path, document); + } + + public void addField(String field, String value) { + document.put(field, value); + } + + public String getIndex() { + return index; + } + + public String getType() { + return type; + } + + public String getId() { + return id; + } + + public Map getDocument() { + return document; + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java new file mode 100644 index 00000000000..0228f345640 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -0,0 +1,103 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +package org.elasticsearch.ingest; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public final class Pipeline { + + private final String id; + private final String description; + private final List processors; + + private Pipeline(String id, String description, List processors) { + this.id = id; + this.description = description; + this.processors = processors; + } + + public void execute(Data data) { + for (Processor processor : processors) { + processor.execute(data); + } + } + + public String getId() { + return id; + } + + public String getDescription() { + return description; + } + + public List getProcessors() { + return processors; + } + + public final static class Builder { + + private final String name; + private String description; + private List processors = new ArrayList<>(); + + public Builder(String name) { + this.name = name; + } + + public Builder(Map config) { + name = (String) config.get("name"); + description = (String) config.get("description"); + @SuppressWarnings("unchecked") + List>> processors = (List>>) config.get("processors"); + if (processors != null ) { + for (Map> processor : processors) { + for (Map.Entry> entry : processor.entrySet()) { + // TODO: add lookup service... + if ("simple".equals(entry.getKey())) { + SimpleProcessor.Builder builder = new SimpleProcessor.Builder(); + builder.fromMap(entry.getValue()); + this.processors.add(builder.build()); + } else { + throw new UnsupportedOperationException(); + } + } + } + } + } + + public void setDescription(String description) { + this.description = description; + } + + public void addProcessors(Processor.Builder... processors) { + for (Processor.Builder processor : processors) { + this.processors.add(processor.build()); + } + } + + public Pipeline build() { + return new Pipeline(name, description, Collections.unmodifiableList(processors)); + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Processor.java new file mode 100644 index 00000000000..01739ec5def --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Processor.java @@ -0,0 +1,35 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +package org.elasticsearch.ingest; + +public interface Processor { + + void execute(Data data); + + String type(); + + interface Builder { + + Processor build(); + + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java new file mode 100644 index 00000000000..d8a1a0c4c46 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java @@ -0,0 +1,90 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import java.util.Map; + +public final class SimpleProcessor implements Processor { + + private final String path; + private final String expectedValue; + + private final String addField; + private final String addFieldValue; + + public SimpleProcessor(String path, String expectedValue, String addField, String addFieldValue) { + this.path = path; + this.expectedValue = expectedValue; + this.addField = addField; + this.addFieldValue = addFieldValue; + } + + @Override + public void execute(Data data) { + Object value = data.getProperty(path); + if (value != null) { + if (value.toString().equals(this.expectedValue)) { + data.addField(addField, addFieldValue); + } + } + } + + @Override + public String type() { + return "logging"; + } + + public static class Builder implements Processor.Builder { + + private String path; + private String value; + private String addField; + private String addFieldValue; + + public void setPath(String path) { + this.path = path; + } + + public void setValue(String value) { + this.value = value; + } + + public void setAddField(String addField) { + this.addField = addField; + } + + public void setAddFieldValue(String addFieldValue) { + this.addFieldValue = addFieldValue; + } + + public void fromMap(Map config) { + this.path = (String) config.get("path"); + this.value = (String) config.get("value"); + this.addField = (String) config.get("add_field"); + this.addFieldValue = (String) config.get("add_field_value"); + } + + @Override + public Processor build() { + return new SimpleProcessor(path, value, addField, addFieldValue); + } + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java new file mode 100644 index 00000000000..3d8725355dc --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -0,0 +1,34 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; +import org.elasticsearch.plugin.ingest.transport.IngestActionFilter; + +public class IngestModule extends AbstractModule { + + @Override + protected void configure() { + binder().bind(IngestRestFilter.class).asEagerSingleton(); + binder().bind(PipelineStore.class).asEagerSingleton(); + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java new file mode 100644 index 00000000000..1f9a365aaeb --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.action.ActionModule; +import org.elasticsearch.common.component.LifecycleComponent; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.plugin.ingest.transport.IngestActionFilter; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.action.RestActionModule; + +import java.util.Collection; +import java.util.Collections; + +public class IngestPlugin extends Plugin { + + public static final String INGEST_CONTEXT_KEY = "__ingest__"; + public static final String INGEST_HTTP_PARAM = "ingest"; + + @Override + public String name() { + return "ingest"; + } + + @Override + public String description() { + return "Plugin that allows to configure pipelines to preprocess documents before indexing"; + } + + @Override + public Collection nodeModules() { + return Collections.singletonList(new IngestModule()); + } + + @Override + public Collection> nodeServices() { + return Collections.singletonList(PipelineStore.class); + } + + public void onModule(ActionModule module) { + module.registerFilter(IngestActionFilter.class); + } + + public void onModule(RestActionModule module) { + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java new file mode 100644 index 00000000000..cd0de96e550 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -0,0 +1,136 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.collect.CopyOnWriteHashMap; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.Injector; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.sort.SortOrder; + +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.CountDownLatch; + +public class PipelineStore extends AbstractLifecycleComponent { + + public final static String INDEX = ".pipelines"; + public final static String TYPE = "pipeline"; + + private Client client; + private final Injector injector; + + private volatile Updater updater; + private volatile CopyOnWriteHashMap pipelines = new CopyOnWriteHashMap<>(); + + @Inject + public PipelineStore(Settings settings, Injector injector) { + super(settings); + this.injector = injector; + } + + @Override + protected void doStart() { + client = injector.getInstance(Client.class); + updater = new Updater(); + // TODO: start when local cluster state isn't blocked: ([SERVICE_UNAVAILABLE/1/state not recovered / initialized]) + updater.start(); + } + + @Override + protected void doStop() { + updater.shutdown(); + } + + @Override + protected void doClose() { + } + + public Pipeline get(String id) { + return pipelines.get(id); + } + + void updatePipelines() { + Map pipelines = new HashMap<>(); + SearchResponse searchResponse = client.prepareSearch(INDEX) + .setScroll(TimeValue.timeValueMinutes(1)) + .addSort("_doc", SortOrder.ASC) + .setIndicesOptions(IndicesOptions.lenientExpandOpen()) + .get(); + logger.info("Loading [{}] pipelines", searchResponse.getHits().totalHits()); + do { + for (SearchHit hit : searchResponse.getHits()) { + logger.info("Loading pipeline [{}] with source [{}]", hit.getId(), hit.sourceAsString()); + Pipeline.Builder builder = new Pipeline.Builder(hit.sourceAsMap()); + pipelines.put(hit.getId(), builder.build()); + } + searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()).get(); + } while (searchResponse.getHits().getHits().length != 0); + PipelineStore.this.pipelines = PipelineStore.this.pipelines.copyAndPutAll(pipelines); + } + + class Updater extends Thread { + + private volatile boolean running = true; + private final CountDownLatch latch = new CountDownLatch(1); + + public Updater() { + super(EsExecutors.threadName(settings, "[updater]")); + } + + @Override + public void run() { + try { + while (running) { + try { + Thread.sleep(3000); + updatePipelines(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } catch (Exception e) { + logger.error("update error", e); + } + } + } finally { + latch.countDown(); + } + } + + public void shutdown() { + running = false; + try { + interrupt(); + latch.await(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java new file mode 100644 index 00000000000..808ee01282c --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.rest; + +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.rest.*; + +import static org.elasticsearch.plugin.ingest.IngestPlugin.*; +import static org.elasticsearch.plugin.ingest.IngestPlugin.INGEST_CONTEXT_KEY; + +public class IngestRestFilter extends RestFilter { + + @Inject + public IngestRestFilter(RestController controller) { + controller.registerFilter(this); + } + + @Override + public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { + request.putInContext(INGEST_CONTEXT_KEY, request.param(INGEST_HTTP_PARAM)); + filterChain.continueProcessing(request, channel); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java new file mode 100644 index 00000000000..e4eef5b99b9 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActionFilter; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.plugin.ingest.IngestPlugin; +import org.elasticsearch.plugin.ingest.PipelineStore; + +import java.util.List; +import java.util.Map; + +public class IngestActionFilter extends ActionFilter.Simple { + + private final PipelineStore pipelineStore; + + @Inject + public IngestActionFilter(Settings settings, PipelineStore pipelineStore) { + super(settings); + this.pipelineStore = pipelineStore; + } + + @Override + protected boolean apply(String action, ActionRequest request, ActionListener listener) { + String pipelineId = request.getFromContext(IngestPlugin.INGEST_CONTEXT_KEY); + if (pipelineId == null) { + pipelineId = request.getHeader(IngestPlugin.INGEST_HTTP_PARAM); + if (pipelineId == null) { + return true; + } + } + Pipeline pipeline = pipelineStore.get(pipelineId); + if (pipeline == null) { + return true; + } + + if (request instanceof IndexRequest) { + processIndexRequest((IndexRequest) request, pipeline); + } else if (request instanceof BulkRequest) { + BulkRequest bulkRequest = (BulkRequest) request; + List actionRequests = bulkRequest.requests(); + for (ActionRequest actionRequest : actionRequests) { + if (actionRequest instanceof IndexRequest) { + processIndexRequest((IndexRequest) actionRequest, pipeline); + } + } + } + return true; + } + + void processIndexRequest(IndexRequest indexRequest, Pipeline pipeline) { + Map sourceAsMap = indexRequest.sourceAsMap(); + Data data = new Data(indexRequest.index(), indexRequest.type(), indexRequest.id(), sourceAsMap); + pipeline.execute(data); + indexRequest.source(data.getDocument()); + } + + @Override + protected boolean apply(String action, ActionResponse response, ActionListener listener) { + return true; + } + + @Override + public int order() { + return Integer.MAX_VALUE; + } +} diff --git a/plugins/ingest/src/test/java/IngestRunner.java b/plugins/ingest/src/test/java/IngestRunner.java new file mode 100644 index 00000000000..a860cb9938f --- /dev/null +++ b/plugins/ingest/src/test/java/IngestRunner.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.Version; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.node.MockNode; +import org.elasticsearch.node.Node; +import org.elasticsearch.plugin.ingest.IngestPlugin; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; + +public class IngestRunner { + + public static void main(String[] args) throws Exception { + Settings.Builder settings = Settings.builder(); + settings.put("http.cors.enabled", "true"); + settings.put("http.cors.allow-origin", "*"); + settings.put("script.inline", "on"); + settings.put("cluster.name", IngestRunner.class.getSimpleName()); + + final CountDownLatch latch = new CountDownLatch(1); + final Node node = new MockNode(settings.build(), Version.CURRENT, Collections.singleton(IngestPlugin.class)); + Runtime.getRuntime().addShutdownHook(new Thread() { + + @Override + public void run() { + node.close(); + latch.countDown(); + } + }); + node.start(); + latch.await(); + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/BasicTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/BasicTests.java new file mode 100644 index 00000000000..e23d84ab8cf --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/BasicTests.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.plugin.ingest.IngestPlugin; +import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Collection; +import java.util.Collections; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; + +@ESIntegTestCase.ClusterScope(numDataNodes = 1, numClientNodes = 0) +public class BasicTests extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Collections.singletonList(IngestPlugin.class); + } + + public void test() throws Exception { + client().prepareIndex(PipelineStore.INDEX, PipelineStore.TYPE, "_id") + .setSource(jsonBuilder().startObject() + .field("name", "my_pipeline") + .field("description", "my_pipeline") + .startArray("processors") + .startObject() + .startObject("simple") + .field("path", "field2") + .field("value", "abc") + .field("add_field", "field3") + .field("add_field_value", "xyz") + .endObject() + .endObject() + .endArray() + .endObject()) + .setRefresh(true) + .get(); + Thread.sleep(5000); + + createIndex("test"); + client().prepareIndex("test", "type", "1").setSource("field2", "abc") + .putHeader("ingest", "_id") + .get(); + + Map doc = client().prepareGet("test", "type", "1") + .get().getSourceAsMap(); + assertThat(doc.get("field3"), equalTo("xyz")); + + client().prepareBulk().add( + client().prepareIndex("test", "type", "2").setSource("field2", "abc") + ).putHeader("ingest", "_id").get(); + + doc = client().prepareGet("test", "type", "2").get().getSourceAsMap(); + assertThat(doc.get("field3"), equalTo("xyz")); + } + + @Override + protected boolean enableMockModules() { + return false; + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java new file mode 100644 index 00000000000..f6da5b541bb --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java @@ -0,0 +1,49 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.plugin.ingest.IngestPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestCandidate; +import org.elasticsearch.test.rest.parser.RestTestParseException; + +import java.io.IOException; +import java.util.Collection; + +public class IngestRestIT extends ESRestTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(IngestPlugin.class); + } + + public IngestRestIT(@Name("yaml") RestTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, RestTestParseException { + return ESRestTestCase.createParameters(0, 1); + } +} + diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml new file mode 100644 index 00000000000..ad10d9b0041 --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml @@ -0,0 +1,6 @@ +"Ingest plugin installed": + - do: + cluster.stats: {} + + - match: { nodes.plugins.0.name: ingest } + - match: { nodes.plugins.0.jvm: true } diff --git a/plugins/pom.xml b/plugins/pom.xml index d248c7f6fa0..7ce3b689b4a 100644 --- a/plugins/pom.xml +++ b/plugins/pom.xml @@ -228,6 +228,7 @@ api/info.json api/cluster.health.json api/cluster.state.json + api/cluster.stats.json api/index.json api/get.json @@ -402,6 +403,7 @@ repository-azure repository-s3 store-smb + ingest jvm-example From 2071db688c2753720ba8f8aeb8fca475d1de359d Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 5 Oct 2015 15:35:45 +0200 Subject: [PATCH 002/347] only update pipeline if the content has been changed split the actual fetching of pipeline docs from the pipeline store to make unit testing easier intoduced factory for builders replaced hardcoded processor lookups with simple factory based registry --- .../java/org/elasticsearch/ingest/Data.java | 3 + .../org/elasticsearch/ingest/Pipeline.java | 24 ++- .../org/elasticsearch/ingest/Processor.java | 34 +++- .../elasticsearch/ingest/SimpleProcessor.java | 16 +- .../plugin/ingest/IngestModule.java | 21 +++ .../plugin/ingest/IngestPlugin.java | 8 +- .../ingest/PipelineConfigDocReader.java | 129 +++++++++++++ .../plugin/ingest/PipelineStore.java | 176 +++++++++++------- .../plugin/ingest/rest/IngestRestFilter.java | 2 +- .../ingest/transport/IngestActionFilter.java | 3 +- .../ingest/PipelineConfigDocReaderTests.java | 53 ++++++ .../plugin/ingest/PipelineStoreTests.java | 117 ++++++++++++ plugins/pom.xml | 6 + 13 files changed, 509 insertions(+), 83 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineConfigDocReader.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineConfigDocReaderTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index a3644c5c316..83a9aede29b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -23,6 +23,9 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import java.util.Map; +/** + * Represents the data and meta data (like id and type) of a single document that is going to be indexed. + */ public final class Data { private final String index; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index 0228f345640..baa8427d069 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -25,6 +25,9 @@ import java.util.Collections; import java.util.List; import java.util.Map; +/** + * A pipeline is a list of {@link Processor} instances grouped under a unique id. + */ public final class Pipeline { private final String id; @@ -37,20 +40,32 @@ public final class Pipeline { this.processors = processors; } + /** + * Modifies the data of a document to be indexed based on the processor this pipeline holds + */ public void execute(Data data) { for (Processor processor : processors) { processor.execute(data); } } + /** + * The unique id of this pipeline + */ public String getId() { return id; } + /** + * An optional description of what this pipeline is doing to the data gets processed by this pipeline. + */ public String getDescription() { return description; } + /** + * Unmodifiable list containing each processor that operates on the data. + */ public List getProcessors() { return processors; } @@ -65,7 +80,7 @@ public final class Pipeline { this.name = name; } - public Builder(Map config) { + public Builder(Map config, Map processorRegistry) { name = (String) config.get("name"); description = (String) config.get("description"); @SuppressWarnings("unchecked") @@ -73,13 +88,12 @@ public final class Pipeline { if (processors != null ) { for (Map> processor : processors) { for (Map.Entry> entry : processor.entrySet()) { - // TODO: add lookup service... - if ("simple".equals(entry.getKey())) { - SimpleProcessor.Builder builder = new SimpleProcessor.Builder(); + Processor.Builder builder = processorRegistry.get(entry.getKey()).create(); + if (builder != null) { builder.fromMap(entry.getValue()); this.processors.add(builder.build()); } else { - throw new UnsupportedOperationException(); + throw new IllegalArgumentException("No processor type exist with name [" + entry.getKey() + "]"); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Processor.java index 01739ec5def..3a3711aaf28 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Processor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Processor.java @@ -20,16 +20,46 @@ package org.elasticsearch.ingest; +import java.util.Map; + +/** + * An processor implementation may modify the data belonging to a document. + * If and what exactly is modified is upto the implementation. + */ public interface Processor { + /** + * Introspect and potentially modify the incoming data. + */ void execute(Data data); - String type(); - + /** + * A builder to contruct a processor to be used in a pipeline. + */ interface Builder { + /** + * A general way to set processor related settings based on the config map. + */ + void fromMap(Map config); + + /** + * Builds the processor based on previous set settings. + */ Processor build(); + /** + * A factory that creates a processor builder when processor instances for pipelines are being created. + */ + interface Factory { + + /** + * Creates the builder. + */ + Builder create(); + + } + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java index d8a1a0c4c46..197f000118b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java @@ -23,6 +23,8 @@ import java.util.Map; public final class SimpleProcessor implements Processor { + public static final String TYPE = "simple"; + private final String path; private final String expectedValue; @@ -46,11 +48,6 @@ public final class SimpleProcessor implements Processor { } } - @Override - public String type() { - return "logging"; - } - public static class Builder implements Processor.Builder { private String path; @@ -85,6 +82,15 @@ public final class SimpleProcessor implements Processor { public Processor build() { return new SimpleProcessor(path, value, addField, addFieldValue); } + + public static class Factory implements Processor.Builder.Factory { + + @Override + public Processor.Builder create() { + return new Builder(); + } + } + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index 3d8725355dc..b8b11a898b2 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -20,15 +20,36 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.multibindings.MapBinder; +import org.elasticsearch.common.inject.multibindings.Multibinder; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.SimpleProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; import org.elasticsearch.plugin.ingest.transport.IngestActionFilter; +import java.util.HashMap; +import java.util.Map; + public class IngestModule extends AbstractModule { + private final Map> processors = new HashMap<>(); + @Override protected void configure() { binder().bind(IngestRestFilter.class).asEagerSingleton(); binder().bind(PipelineStore.class).asEagerSingleton(); + binder().bind(PipelineConfigDocReader.class).asEagerSingleton(); + + registerProcessor(SimpleProcessor.TYPE, SimpleProcessor.Builder.Factory.class); + + MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, Processor.Builder.Factory.class); + for (Map.Entry> entry : processors.entrySet()) { + mapBinder.addBinding(entry.getKey()).to(entry.getValue()); + } + } + + public void registerProcessor(String processorType, Class processorFactory) { + processors.put(processorType, processorFactory); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 1f9a365aaeb..2368ede0b5a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -27,13 +27,14 @@ import org.elasticsearch.plugin.ingest.transport.IngestActionFilter; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.action.RestActionModule; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; public class IngestPlugin extends Plugin { public static final String INGEST_CONTEXT_KEY = "__ingest__"; - public static final String INGEST_HTTP_PARAM = "ingest"; + public static final String INGEST_PARAM = "ingest"; @Override public String name() { @@ -52,14 +53,11 @@ public class IngestPlugin extends Plugin { @Override public Collection> nodeServices() { - return Collections.singletonList(PipelineStore.class); + return Arrays.asList(PipelineStore.class, PipelineConfigDocReader.class); } public void onModule(ActionModule module) { module.registerFilter(IngestActionFilter.class); } - public void onModule(RestActionModule module) { - } - } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineConfigDocReader.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineConfigDocReader.java new file mode 100644 index 00000000000..9b2dcf1e09e --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineConfigDocReader.java @@ -0,0 +1,129 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.Injector; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.sort.SortOrder; + +import java.util.Collections; +import java.util.Iterator; + +public class PipelineConfigDocReader extends AbstractLifecycleComponent { + + private volatile Client client; + private final Injector injector; + private final TimeValue scrollTimeout; + + @Inject + public PipelineConfigDocReader(Settings settings, Injector injector) { + super(settings); + this.injector = injector; + this.scrollTimeout = settings.getAsTime("ingest.pipeline.store.scroll.timeout", TimeValue.timeValueSeconds(30)); + } + + @Override + protected void doStart() { + client = injector.getInstance(Client.class); + } + + @Override + protected void doStop() { + client.close(); + } + + @Override + protected void doClose() { + } + + public Iterable readAll() { + // TODO: the search should be replaced with an ingest API when it is available + SearchResponse searchResponse = client.prepareSearch(PipelineStore.INDEX) + .setVersion(true) + .setScroll(scrollTimeout) + .addSort("_doc", SortOrder.ASC) + .setIndicesOptions(IndicesOptions.lenientExpandOpen()) + .get(); + + if (searchResponse.getHits().getTotalHits() == 0) { + return Collections.emptyList(); + } + logger.debug("reading [{}] pipeline documents", searchResponse.getHits().totalHits()); + return new Iterable() { + @Override + public Iterator iterator() { + return new SearchScrollIterator(searchResponse); + } + }; + } + + class SearchScrollIterator implements Iterator { + + private SearchResponse searchResponse; + + private int currentIndex; + private SearchHit[] currentHits; + + SearchScrollIterator(SearchResponse searchResponse) { + this.searchResponse = searchResponse; + this.currentHits = searchResponse.getHits().getHits(); + } + + @Override + public boolean hasNext() { + if (currentIndex < currentHits.length) { + return true; + } else { + if (searchResponse == null) { + return false; + } + + searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()) + .setScroll(scrollTimeout) + .get(); + if (searchResponse.getHits().getHits().length == 0) { + searchResponse = null; + return false; + } else { + currentHits = searchResponse.getHits().getHits(); + currentIndex = 0; + return true; + } + } + } + + @Override + public SearchHit next() { + SearchHit hit = currentHits[currentIndex++]; + if (logger.isTraceEnabled()) { + logger.trace("reading pipeline document [{}] with source [{}]", hit.getId(), hit.sourceAsString()); + } + return hit; + } + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index cd0de96e550..b33bac75995 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -19,52 +19,56 @@ package org.elasticsearch.plugin.ingest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.Client; -import org.elasticsearch.common.collect.CopyOnWriteHashMap; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.threadpool.ThreadPool; +import java.util.Collections; import java.util.HashMap; import java.util.Map; -import java.util.concurrent.CountDownLatch; +import java.util.Set; public class PipelineStore extends AbstractLifecycleComponent { - public final static String INDEX = ".pipelines"; + public final static String INDEX = ".ingest"; public final static String TYPE = "pipeline"; - private Client client; - private final Injector injector; + private final ThreadPool threadPool; + private final ClusterService clusterService; + private final TimeValue pipelineUpdateInterval; + private final PipelineConfigDocReader configDocReader; + private final Map processorFactoryRegistry; - private volatile Updater updater; - private volatile CopyOnWriteHashMap pipelines = new CopyOnWriteHashMap<>(); + private volatile Map pipelines = new HashMap<>(); @Inject - public PipelineStore(Settings settings, Injector injector) { + public PipelineStore(Settings settings, ThreadPool threadPool, ClusterService clusterService, PipelineConfigDocReader configDocReader, Map processors) { super(settings); - this.injector = injector; + this.threadPool = threadPool; + this.clusterService = clusterService; + this.pipelineUpdateInterval = settings.getAsTime("ingest.pipeline.store.update.interval", TimeValue.timeValueSeconds(1)); + this.configDocReader = configDocReader; + this.processorFactoryRegistry = Collections.unmodifiableMap(processors); + clusterService.add(new PipelineStoreListener()); } @Override protected void doStart() { - client = injector.getInstance(Client.class); - updater = new Updater(); - // TODO: start when local cluster state isn't blocked: ([SERVICE_UNAVAILABLE/1/state not recovered / initialized]) - updater.start(); } @Override protected void doStop() { - updater.shutdown(); } @Override @@ -72,65 +76,109 @@ public class PipelineStore extends AbstractLifecycleComponent { } public Pipeline get(String id) { - return pipelines.get(id); + PipelineReference ref = pipelines.get(id); + if (ref != null) { + return ref.getPipeline(); + } else { + return null; + } } void updatePipelines() { - Map pipelines = new HashMap<>(); - SearchResponse searchResponse = client.prepareSearch(INDEX) - .setScroll(TimeValue.timeValueMinutes(1)) - .addSort("_doc", SortOrder.ASC) - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .get(); - logger.info("Loading [{}] pipelines", searchResponse.getHits().totalHits()); - do { - for (SearchHit hit : searchResponse.getHits()) { - logger.info("Loading pipeline [{}] with source [{}]", hit.getId(), hit.sourceAsString()); - Pipeline.Builder builder = new Pipeline.Builder(hit.sourceAsMap()); - pipelines.put(hit.getId(), builder.build()); + int changed = 0; + Map newPipelines = new HashMap<>(pipelines); + for (SearchHit hit : configDocReader.readAll()) { + String pipelineId = hit.getId(); + BytesReference pipelineSource = hit.getSourceRef(); + PipelineReference previous = newPipelines.get(pipelineId); + if (previous != null) { + if (previous.getSource().equals(pipelineSource)) { + continue; + } } - searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()).get(); - } while (searchResponse.getHits().getHits().length != 0); - PipelineStore.this.pipelines = PipelineStore.this.pipelines.copyAndPutAll(pipelines); + + changed++; + Pipeline.Builder builder = new Pipeline.Builder(hit.sourceAsMap(), processorFactoryRegistry); + newPipelines.put(pipelineId, new PipelineReference(builder.build(), hit.getVersion(), pipelineSource)); + } + + if (changed != 0) { + logger.debug("adding or updating [{}] pipelines", changed); + pipelines = newPipelines; + } else { + logger.debug("adding no new pipelines"); + } } - class Updater extends Thread { - - private volatile boolean running = true; - private final CountDownLatch latch = new CountDownLatch(1); - - public Updater() { - super(EsExecutors.threadName(settings, "[updater]")); + void startUpdateWorker() { + if (lifecycleState() == Lifecycle.State.STARTED) { + threadPool.schedule(pipelineUpdateInterval, ThreadPool.Names.GENERIC, new Updater()); } + } + + class Updater implements Runnable { @Override public void run() { try { - while (running) { - try { - Thread.sleep(3000); - updatePipelines(); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } catch (Exception e) { - logger.error("update error", e); - } - } + updatePipelines(); + } catch (Exception e) { + logger.error("pipeline store update failure", e); } finally { - latch.countDown(); - } - } - - public void shutdown() { - running = false; - try { - interrupt(); - latch.await(); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); + startUpdateWorker(); } } } + class PipelineStoreListener implements ClusterStateListener { + + @Override + public void clusterChanged(ClusterChangedEvent event) { + if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) == false) { + startUpdateWorker(); + clusterService.remove(this); + } + } + } + + static class PipelineReference { + + private final Pipeline pipeline; + private final long version; + private final BytesReference source; + + PipelineReference(Pipeline pipeline, long version, BytesReference source) { + this.pipeline = pipeline; + this.version = version; + this.source = source; + } + + public Pipeline getPipeline() { + return pipeline; + } + + public long getVersion() { + return version; + } + + public BytesReference getSource() { + return source; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + PipelineReference holder = (PipelineReference) o; + return source.equals(holder.source); + } + + @Override + public int hashCode() { + return source.hashCode(); + } + } + } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java index 808ee01282c..c6afd2e4ebd 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java @@ -34,7 +34,7 @@ public class IngestRestFilter extends RestFilter { @Override public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { - request.putInContext(INGEST_CONTEXT_KEY, request.param(INGEST_HTTP_PARAM)); + request.putInContext(INGEST_CONTEXT_KEY, request.param(INGEST_PARAM)); filterChain.continueProcessing(request, channel); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index e4eef5b99b9..1fc34565afc 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -49,7 +49,7 @@ public class IngestActionFilter extends ActionFilter.Simple { protected boolean apply(String action, ActionRequest request, ActionListener listener) { String pipelineId = request.getFromContext(IngestPlugin.INGEST_CONTEXT_KEY); if (pipelineId == null) { - pipelineId = request.getHeader(IngestPlugin.INGEST_HTTP_PARAM); + pipelineId = request.getHeader(IngestPlugin.INGEST_PARAM); if (pipelineId == null) { return true; } @@ -73,6 +73,7 @@ public class IngestActionFilter extends ActionFilter.Simple { return true; } + // TODO: this should be delegated to a PipelineExecutor service that executes on a different thread (pipeline TP) void processIndexRequest(IndexRequest indexRequest, Pipeline pipeline) { Map sourceAsMap = indexRequest.sourceAsMap(); Data data = new Data(indexRequest.index(), indexRequest.type(), indexRequest.id(), sourceAsMap); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineConfigDocReaderTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineConfigDocReaderTests.java new file mode 100644 index 00000000000..782d5a674b4 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineConfigDocReaderTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.test.ESSingleNodeTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class PipelineConfigDocReaderTests extends ESSingleNodeTestCase { + + public void testReadAll() { + PipelineConfigDocReader reader = new PipelineConfigDocReader(Settings.EMPTY, node().injector()); + reader.start(); + + createIndex(PipelineStore.INDEX); + int numDocs = scaledRandomIntBetween(32, 128); + for (int i = 0; i < numDocs; i++) { + client().prepareIndex(PipelineStore.INDEX, PipelineStore.TYPE, Integer.toString(i)) + .setSource("field", "value" + i) + .get(); + } + client().admin().indices().prepareRefresh().get(); + + int i = 0; + for (SearchHit hit : reader.readAll()) { + assertThat(hit.getId(), equalTo(Integer.toString(i))); + assertThat(hit.getVersion(), equalTo(1l)); + assertThat(hit.getSource().get("field"), equalTo("value" + i)); + i++; + } + assertThat(i, equalTo(numDocs)); + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java new file mode 100644 index 00000000000..b10291fefbd --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.text.StringText; +import org.elasticsearch.ingest.SimpleProcessor; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.internal.InternalSearchHit; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class PipelineStoreTests extends ESTestCase { + + private PipelineStore store; + private ThreadPool threadPool; + private PipelineConfigDocReader docReader; + + @Before + public void init() { + threadPool = new ThreadPool("test"); + ClusterService clusterService = mock(ClusterService.class); + docReader = mock(PipelineConfigDocReader.class); + store = new PipelineStore(Settings.EMPTY, threadPool, clusterService, docReader, Collections.singletonMap(SimpleProcessor.TYPE, new SimpleProcessor.Builder.Factory())); + store.start(); + } + + @After + public void cleanup() { + store.stop(); + threadPool.shutdown(); + } + + + public void testUpdatePipeline() { + List hits = new ArrayList<>(); + hits.add(new InternalSearchHit(0, "1", new StringText("type"), Collections.emptyMap()) + .sourceRef(new BytesArray("{\"name\": \"_name1\", \"description\": \"_description1\"}")) + ); + + when(docReader.readAll()).thenReturn(hits); + assertThat(store.get("1"), nullValue()); + + store.updatePipelines(); + assertThat(store.get("1").getId(), equalTo("_name1")); + assertThat(store.get("1").getDescription(), equalTo("_description1")); + + hits.add(new InternalSearchHit(0, "2", new StringText("type"), Collections.emptyMap()) + .sourceRef(new BytesArray("{\"name\": \"_name2\", \"description\": \"_description2\"}")) + ); + store.updatePipelines(); + assertThat(store.get("1").getId(), equalTo("_name1")); + assertThat(store.get("1").getDescription(), equalTo("_description1")); + assertThat(store.get("2").getId(), equalTo("_name2")); + assertThat(store.get("2").getDescription(), equalTo("_description2")); + } + + public void testPipelineUpdater() throws Exception { + List hits = new ArrayList<>(); + hits.add(new InternalSearchHit(0, "1", new StringText("type"), Collections.emptyMap()) + .sourceRef(new BytesArray("{\"name\": \"_name1\", \"description\": \"_description1\"}")) + ); + when(docReader.readAll()).thenReturn(hits); + assertThat(store.get("1"), nullValue()); + + store.startUpdateWorker(); + assertBusy(() -> { + assertThat(store.get("1"), notNullValue()); + assertThat(store.get("1").getId(), equalTo("_name1")); + assertThat(store.get("1").getDescription(), equalTo("_description1")); + }); + + hits.add(new InternalSearchHit(0, "2", new StringText("type"), Collections.emptyMap()) + .sourceRef(new BytesArray("{\"name\": \"_name2\", \"description\": \"_description2\"}")) + ); + assertBusy(() -> { + assertThat(store.get("1"), notNullValue()); + assertThat(store.get("1").getId(), equalTo("_name1")); + assertThat(store.get("1").getDescription(), equalTo("_description1")); + assertThat(store.get("2"), notNullValue()); + assertThat(store.get("2").getId(), equalTo("_name2")); + assertThat(store.get("2").getDescription(), equalTo("_description2")); + }); + } + +} diff --git a/plugins/pom.xml b/plugins/pom.xml index 7ce3b689b4a..cf8abc8bd17 100644 --- a/plugins/pom.xml +++ b/plugins/pom.xml @@ -47,6 +47,12 @@ test-jar test + + org.elasticsearch + securemock + 1.1 + test + From fa187a2e697c73865a820ec24d7744adc3b8f161 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 7 Oct 2015 22:42:14 +0200 Subject: [PATCH 003/347] fix smoke test qa test by adding ingest plugin dependency --- qa/smoke-test-plugins/pom.xml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/qa/smoke-test-plugins/pom.xml b/qa/smoke-test-plugins/pom.xml index 238ea9c7f6b..99e016a12a6 100644 --- a/qa/smoke-test-plugins/pom.xml +++ b/qa/smoke-test-plugins/pom.xml @@ -381,6 +381,14 @@ true + + org.elasticsearch.plugin + ingest + ${elasticsearch.version} + zip + true + + org.elasticsearch.plugin From 82a9ba355d9fc37ae6b2bc61b932113a73987d11 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 7 Oct 2015 13:03:15 +0200 Subject: [PATCH 004/347] Added pipeline execution service that deals with updating data as it comes in using a dedicated thread pool. Also changed how bulk requests are handled, because before it just didn't work, but added a todo there because it can potentially be handled differently. --- .../java/org/elasticsearch/ingest/Data.java | 7 + .../elasticsearch/ingest/SimpleProcessor.java | 10 +- .../plugin/ingest/IngestModule.java | 1 + .../plugin/ingest/IngestPlugin.java | 19 +- .../ingest/PipelineExecutionService.java | 86 +++++++ .../ingest/transport/IngestActionFilter.java | 105 ++++++--- .../org/elasticsearch/ingest/BasicTests.java | 23 +- .../ingest/PipelineExecutionServiceTests.java | 127 +++++++++++ .../transport/IngestActionFilterTests.java | 209 ++++++++++++++++++ 9 files changed, 543 insertions(+), 44 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index 83a9aede29b..6a4f2f965ce 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -33,6 +33,8 @@ public final class Data { private final String id; private final Map document; + private boolean modified = false; + public Data(String index, String type, String id, Map document) { this.index = index; this.type = type; @@ -46,6 +48,7 @@ public final class Data { } public void addField(String field, String value) { + modified = true; document.put(field, value); } @@ -64,4 +67,8 @@ public final class Data { public Map getDocument() { return document; } + + public boolean isModified() { + return modified; + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java index 197f000118b..956e444cb48 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java @@ -51,7 +51,7 @@ public final class SimpleProcessor implements Processor { public static class Builder implements Processor.Builder { private String path; - private String value; + private String expectedValue; private String addField; private String addFieldValue; @@ -59,8 +59,8 @@ public final class SimpleProcessor implements Processor { this.path = path; } - public void setValue(String value) { - this.value = value; + public void setExpectedValue(String value) { + this.expectedValue = value; } public void setAddField(String addField) { @@ -73,14 +73,14 @@ public final class SimpleProcessor implements Processor { public void fromMap(Map config) { this.path = (String) config.get("path"); - this.value = (String) config.get("value"); + this.expectedValue = (String) config.get("expected_value"); this.addField = (String) config.get("add_field"); this.addFieldValue = (String) config.get("add_field_value"); } @Override public Processor build() { - return new SimpleProcessor(path, value, addField, addFieldValue); + return new SimpleProcessor(path, expectedValue, addField, addFieldValue); } public static class Factory implements Processor.Builder.Factory { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index b8b11a898b2..6a2138c9089 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -37,6 +37,7 @@ public class IngestModule extends AbstractModule { @Override protected void configure() { binder().bind(IngestRestFilter.class).asEagerSingleton(); + binder().bind(PipelineExecutionService.class).asEagerSingleton(); binder().bind(PipelineStore.class).asEagerSingleton(); binder().bind(PipelineConfigDocReader.class).asEagerSingleton(); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 2368ede0b5a..366eea29ae6 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -23,6 +23,7 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.action.ActionModule; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugin.ingest.transport.IngestActionFilter; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.action.RestActionModule; @@ -31,14 +32,23 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import static org.elasticsearch.common.settings.Settings.settingsBuilder; + public class IngestPlugin extends Plugin { public static final String INGEST_CONTEXT_KEY = "__ingest__"; public static final String INGEST_PARAM = "ingest"; + public static final String NAME = "ingest"; + + private final Settings nodeSettings; + + public IngestPlugin(Settings nodeSettings) { + this.nodeSettings = nodeSettings; + } @Override public String name() { - return "ingest"; + return NAME; } @Override @@ -56,6 +66,13 @@ public class IngestPlugin extends Plugin { return Arrays.asList(PipelineStore.class, PipelineConfigDocReader.class); } + @Override + public Settings additionalSettings() { + return settingsBuilder() + .put(PipelineExecutionService.additionalSettings(nodeSettings)) + .build(); + } + public void onModule(ActionModule module) { module.registerFilter(IngestActionFilter.class); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java new file mode 100644 index 00000000000..18d656813ec --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.support.LoggerMessageFormat; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.threadpool.ThreadPool; + +public class PipelineExecutionService { + + static final String THREAD_POOL_NAME = IngestPlugin.NAME; + + private final PipelineStore store; + private final ThreadPool threadPool; + + @Inject + public PipelineExecutionService(PipelineStore store, ThreadPool threadPool) { + this.store = store; + this.threadPool = threadPool; + } + + public void execute(Data data, String pipelineId, Listener listener) { + Pipeline pipeline = store.get(pipelineId); + if (pipeline == null) { + listener.failed(new IllegalArgumentException(LoggerMessageFormat.format("pipeline with id [{}] does not exist", pipelineId))); + return; + } + + threadPool.executor(THREAD_POOL_NAME).execute(new Runnable() { + @Override + public void run() { + try { + pipeline.execute(data); + listener.executed(data); + } catch (Exception e) { + listener.failed(e); + } + } + }); + } + + public interface Listener { + + void executed(Data data); + + void failed(Exception e); + + } + + public static Settings additionalSettings(Settings nodeSettings) { + Settings settings = nodeSettings.getAsSettings("threadpool." + THREAD_POOL_NAME); + if (!settings.names().isEmpty()) { + // the TP is already configured in the node settings + // no need for additional settings + return Settings.EMPTY; + } + int availableProcessors = EsExecutors.boundedNumberOfProcessors(nodeSettings); + return Settings.builder() + .put("threadpool." + THREAD_POOL_NAME + ".type", "fixed") + .put("threadpool." + THREAD_POOL_NAME + ".size", availableProcessors) + .put("threadpool." + THREAD_POOL_NAME + ".queue_size", 200) + .build(); + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index 1fc34565afc..228243d3240 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -25,65 +25,108 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilter; +import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.Data; -import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.plugin.ingest.IngestPlugin; -import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.plugin.ingest.PipelineExecutionService; -import java.util.List; +import java.util.Iterator; import java.util.Map; -public class IngestActionFilter extends ActionFilter.Simple { +public class IngestActionFilter extends AbstractComponent implements ActionFilter { - private final PipelineStore pipelineStore; + private final PipelineExecutionService executionService; @Inject - public IngestActionFilter(Settings settings, PipelineStore pipelineStore) { + public IngestActionFilter(Settings settings, PipelineExecutionService executionService) { super(settings); - this.pipelineStore = pipelineStore; + this.executionService = executionService; } @Override - protected boolean apply(String action, ActionRequest request, ActionListener listener) { + public void apply(String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { String pipelineId = request.getFromContext(IngestPlugin.INGEST_CONTEXT_KEY); if (pipelineId == null) { pipelineId = request.getHeader(IngestPlugin.INGEST_PARAM); if (pipelineId == null) { - return true; + chain.proceed(action, request, listener); + return; } } - Pipeline pipeline = pipelineStore.get(pipelineId); - if (pipeline == null) { - return true; - } if (request instanceof IndexRequest) { - processIndexRequest((IndexRequest) request, pipeline); + processIndexRequest(action, listener, chain, (IndexRequest) request, pipelineId); } else if (request instanceof BulkRequest) { BulkRequest bulkRequest = (BulkRequest) request; - List actionRequests = bulkRequest.requests(); - for (ActionRequest actionRequest : actionRequests) { - if (actionRequest instanceof IndexRequest) { - processIndexRequest((IndexRequest) actionRequest, pipeline); - } - } + processBulkIndexRequest(action, listener, chain, bulkRequest, pipelineId, bulkRequest.requests().iterator()); + } else { + chain.proceed(action, request, listener); } - return true; - } - - // TODO: this should be delegated to a PipelineExecutor service that executes on a different thread (pipeline TP) - void processIndexRequest(IndexRequest indexRequest, Pipeline pipeline) { - Map sourceAsMap = indexRequest.sourceAsMap(); - Data data = new Data(indexRequest.index(), indexRequest.type(), indexRequest.id(), sourceAsMap); - pipeline.execute(data); - indexRequest.source(data.getDocument()); } @Override - protected boolean apply(String action, ActionResponse response, ActionListener listener) { - return true; + public void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { + chain.proceed(action, response, listener); + } + + void processIndexRequest(String action, ActionListener listener, ActionFilterChain chain, IndexRequest indexRequest, String pipelineId) { + Map sourceAsMap = indexRequest.sourceAsMap(); + Data data = new Data(indexRequest.index(), indexRequest.type(), indexRequest.id(), sourceAsMap); + executionService.execute(data, pipelineId, new PipelineExecutionService.Listener() { + @Override + public void executed(Data data) { + if (data.isModified()) { + indexRequest.source(data.getDocument()); + } + chain.proceed(action, indexRequest, listener); + } + + @Override + public void failed(Exception e) { + logger.error("failed to execute pipeline [{}]", e, pipelineId); + listener.onFailure(e); + } + }); + } + + // TODO: rethink how to deal with bulk requests: + // This doesn't scale very well for a single bulk requests, so it would be great if a bulk requests could be broken up into several chunks so that the ingesting can be paralized + // on the other hand if there are many index/bulk requests then breaking up bulk requests isn't going to help much. + // I think the execution service should be smart enough about when it should break things up in chunks based on the ingest threadpool usage, + // this means that the contract of the execution service should change in order to accept multiple data instances. + void processBulkIndexRequest(String action, ActionListener listener, ActionFilterChain chain, BulkRequest bulkRequest, String pipelineId, Iterator requests) { + if (!requests.hasNext()) { + chain.proceed(action, bulkRequest, listener); + return; + } + + ActionRequest actionRequest = requests.next(); + if (!(actionRequest instanceof IndexRequest)) { + processBulkIndexRequest(action, listener, chain, bulkRequest, pipelineId, requests); + return; + } + + IndexRequest indexRequest = (IndexRequest) actionRequest; + Map sourceAsMap = indexRequest.sourceAsMap(); + Data data = new Data(indexRequest.index(), indexRequest.type(), indexRequest.id(), sourceAsMap); + executionService.execute(data, pipelineId, new PipelineExecutionService.Listener() { + @Override + public void executed(Data data) { + if (data.isModified()) { + indexRequest.source(data.getDocument()); + } + processBulkIndexRequest(action, listener, chain, bulkRequest, pipelineId, requests); + } + + @Override + public void failed(Exception e) { + logger.error("failed to execute pipeline [{}]", e, pipelineId); + listener.onFailure(e); + } + }); } @Override diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/BasicTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/BasicTests.java index e23d84ab8cf..0c884713374 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/BasicTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/BasicTests.java @@ -48,7 +48,7 @@ public class BasicTests extends ESIntegTestCase { .startObject() .startObject("simple") .field("path", "field2") - .field("value", "abc") + .field("expected_value", "abc") .field("add_field", "field3") .field("add_field_value", "xyz") .endObject() @@ -64,16 +64,25 @@ public class BasicTests extends ESIntegTestCase { .putHeader("ingest", "_id") .get(); - Map doc = client().prepareGet("test", "type", "1") - .get().getSourceAsMap(); - assertThat(doc.get("field3"), equalTo("xyz")); + assertBusy(new Runnable() { + @Override + public void run() { + Map doc = client().prepareGet("test", "type", "1") + .get().getSourceAsMap(); + assertThat(doc.get("field3"), equalTo("xyz")); + } + }); client().prepareBulk().add( client().prepareIndex("test", "type", "2").setSource("field2", "abc") ).putHeader("ingest", "_id").get(); - - doc = client().prepareGet("test", "type", "2").get().getSourceAsMap(); - assertThat(doc.get("field3"), equalTo("xyz")); + assertBusy(new Runnable() { + @Override + public void run() { + Map doc = client().prepareGet("test", "type", "2").get().getSourceAsMap(); + assertThat(doc.get("field3"), equalTo("xyz")); + } + }); } @Override diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java new file mode 100644 index 00000000000..e2f966ba37e --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -0,0 +1,127 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.util.Collections; +import java.util.Map; + +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.*; + +public class PipelineExecutionServiceTests extends ESTestCase { + + private PipelineStore store; + private ThreadPool threadPool; + private PipelineExecutionService executionService; + + @Before + public void setup() { + store = mock(PipelineStore.class); + threadPool = new ThreadPool( + Settings.builder() + .put("name", "_name") + .put(PipelineExecutionService.additionalSettings(Settings.EMPTY)) + .build() + ); + executionService = new PipelineExecutionService(store, threadPool); + } + + @After + public void destroy() { + threadPool.shutdown(); + } + + public void testExecute_pipelineDoesNotExist() { + when(store.get("_id")).thenReturn(null); + Data data = new Data("_index", "_type", "_id", Collections.emptyMap()); + PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); + executionService.execute(data, "_id", listener); + verify(listener).failed(any(IllegalArgumentException.class)); + verify(listener, times(0)).executed(data); + } + + public void testExecute_success() throws Exception { + Pipeline.Builder builder = new Pipeline.Builder("_id"); + Processor processor = mock(Processor.class); + builder.addProcessors(new Processor.Builder() { + @Override + public void fromMap(Map config) { + } + + @Override + public Processor build() { + return processor; + } + }); + + when(store.get("_id")).thenReturn(builder.build()); + + Data data = new Data("_index", "_type", "_id", Collections.emptyMap()); + PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); + executionService.execute(data, "_id", listener); + assertBusy(new Runnable() { + @Override + public void run() { + verify(processor).execute(data); + verify(listener).executed(data); + verify(listener, times(0)).failed(any(Exception.class)); + } + }); + } + + public void testExecute_failure() throws Exception { + Pipeline.Builder builder = new Pipeline.Builder("_id"); + Processor processor = mock(Processor.class); + builder.addProcessors(new Processor.Builder() { + @Override + public void fromMap(Map config) { + } + + @Override + public Processor build() { + return processor; + } + }); + + when(store.get("_id")).thenReturn(builder.build()); + Data data = new Data("_index", "_type", "_id", Collections.emptyMap()); + doThrow(new RuntimeException()).when(processor).execute(data); + PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); + executionService.execute(data, "_id", listener); + assertBusy(new Runnable() { + @Override + public void run() { + verify(processor).execute(data); + verify(listener, times(0)).executed(data); + verify(listener).failed(any(RuntimeException.class)); + } + }); + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java new file mode 100644 index 00000000000..4b19f0bd91b --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -0,0 +1,209 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.SimpleProcessor; +import org.elasticsearch.plugin.ingest.IngestPlugin; +import org.elasticsearch.plugin.ingest.PipelineExecutionService; +import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.Before; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.*; + +public class IngestActionFilterTests extends ESTestCase { + + private IngestActionFilter filter; + private PipelineExecutionService executionService; + + @Before + public void setup() { + executionService = mock(PipelineExecutionService.class); + filter = new IngestActionFilter(Settings.EMPTY, executionService); + } + + public void testApplyNoIngestId() throws Exception { + IndexRequest indexRequest = new IndexRequest(); + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + + filter.apply("_action", indexRequest, actionListener, actionFilterChain); + + verify(actionFilterChain).proceed("_action", indexRequest, actionListener); + verifyZeroInteractions(executionService, actionFilterChain); + } + + public void testApplyIngestIdViaRequestParam() throws Exception { + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + indexRequest.source("field", "value"); + indexRequest.putHeader(IngestPlugin.INGEST_PARAM, "_id"); + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + + filter.apply("_action", indexRequest, actionListener, actionFilterChain); + + verify(executionService).execute(any(Data.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verifyZeroInteractions(actionFilterChain); + } + + public void testApplyIngestIdViaContext() throws Exception { + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + indexRequest.source("field", "value"); + indexRequest.putInContext(IngestPlugin.INGEST_CONTEXT_KEY, "_id"); + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + + filter.apply("_action", indexRequest, actionListener, actionFilterChain); + + verify(executionService).execute(any(Data.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verifyZeroInteractions(actionFilterChain); + } + + public void testApply_executed() throws Exception { + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + indexRequest.source("field", "value"); + indexRequest.putHeader(IngestPlugin.INGEST_PARAM, "_id"); + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + + Answer answer = new Answer() { + @Override + public Object answer(InvocationOnMock invocationOnMock) throws Throwable { + Data data = (Data) invocationOnMock.getArguments()[0]; + PipelineExecutionService.Listener listener = (PipelineExecutionService.Listener) invocationOnMock.getArguments()[2]; + listener.executed(data); + return null; + } + }; + doAnswer(answer).when(executionService).execute(any(Data.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + filter.apply("_action", indexRequest, actionListener, actionFilterChain); + + verify(executionService).execute(any(Data.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verify(actionFilterChain).proceed("_action", indexRequest, actionListener); + verifyZeroInteractions(actionListener); + } + + public void testApply_failed() throws Exception { + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + indexRequest.source("field", "value"); + indexRequest.putHeader(IngestPlugin.INGEST_PARAM, "_id"); + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + + RuntimeException exception = new RuntimeException(); + Answer answer = new Answer() { + @Override + public Object answer(InvocationOnMock invocationOnMock) throws Throwable { + PipelineExecutionService.Listener listener = (PipelineExecutionService.Listener) invocationOnMock.getArguments()[2]; + listener.failed(exception); + return null; + } + }; + doAnswer(answer).when(executionService).execute(any(Data.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + filter.apply("_action", indexRequest, actionListener, actionFilterChain); + + verify(executionService).execute(any(Data.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verify(actionListener).onFailure(exception); + verifyZeroInteractions(actionFilterChain); + } + + public void testApply_withBulkRequest() throws Exception { + ThreadPool threadPool = new ThreadPool( + Settings.builder() + .put("name", "_name") + .put(PipelineExecutionService.additionalSettings(Settings.EMPTY)) + .build() + ); + PipelineStore store = mock(PipelineStore.class); + Pipeline.Builder pipelineBuilder = new Pipeline.Builder("_id"); + SimpleProcessor.Builder processorBuilder = new SimpleProcessor.Builder(); + processorBuilder.setPath("field1"); + processorBuilder.setExpectedValue("value1"); + processorBuilder.setAddField("field2"); + processorBuilder.setAddFieldValue("value2"); + pipelineBuilder.addProcessors(processorBuilder); + when(store.get("_id")).thenReturn(pipelineBuilder.build()); + executionService = new PipelineExecutionService(store, threadPool); + filter = new IngestActionFilter(Settings.EMPTY, executionService); + + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.putHeader(IngestPlugin.INGEST_PARAM, "_id"); + int numRequest = scaledRandomIntBetween(8, 64); + for (int i = 0; i < numRequest; i++) { + if (rarely()) { + ActionRequest request; + if (randomBoolean()) { + request = new DeleteRequest("_index", "_type", "_id"); + } else { + request = new UpdateRequest("_index", "_type", "_id"); + } + bulkRequest.add(request); + } else { + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + indexRequest.source("field1", "value1"); + bulkRequest.add(indexRequest); + } + } + + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + + filter.apply("_action", bulkRequest, actionListener, actionFilterChain); + + assertBusy(new Runnable() { + @Override + public void run() { + verify(actionFilterChain).proceed("_action", bulkRequest, actionListener); + verifyZeroInteractions(actionListener); + + int assertedRequests = 0; + for (ActionRequest actionRequest : bulkRequest.requests()) { + if (actionRequest instanceof IndexRequest) { + IndexRequest indexRequest = (IndexRequest) actionRequest; + assertThat(indexRequest.sourceAsMap().size(), equalTo(2)); + assertThat(indexRequest.sourceAsMap().get("field1"), equalTo("value1")); + assertThat(indexRequest.sourceAsMap().get("field2"), equalTo("value2")); + } + assertedRequests++; + } + assertThat(assertedRequests, equalTo(numRequest)); + } + }); + + threadPool.shutdown(); + } + +} From b3ad3f35fa70cb6adb6f7a7d0b447c96224b443c Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 8 Oct 2015 13:27:31 +0200 Subject: [PATCH 005/347] prevent IndexRequest from being processed multipel times --- .../plugin/ingest/IngestPlugin.java | 3 ++- .../plugin/ingest/rest/IngestRestFilter.java | 4 ++-- .../ingest/transport/IngestActionFilter.java | 11 ++++++++++- .../transport/IngestActionFilterTests.java | 16 +++++++++++++++- 4 files changed, 29 insertions(+), 5 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 366eea29ae6..54fa0ead010 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -36,8 +36,9 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; public class IngestPlugin extends Plugin { - public static final String INGEST_CONTEXT_KEY = "__ingest__"; + public static final String INGEST_PAREM_CONTEXT_KEY = "__ingest__"; public static final String INGEST_PARAM = "ingest"; + public static final String INGEST_ALREADY_PROCESSED = "ingest_already_processed"; public static final String NAME = "ingest"; private final Settings nodeSettings; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java index c6afd2e4ebd..c82e8f1800e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.rest.*; import static org.elasticsearch.plugin.ingest.IngestPlugin.*; -import static org.elasticsearch.plugin.ingest.IngestPlugin.INGEST_CONTEXT_KEY; +import static org.elasticsearch.plugin.ingest.IngestPlugin.INGEST_PAREM_CONTEXT_KEY; public class IngestRestFilter extends RestFilter { @@ -34,7 +34,7 @@ public class IngestRestFilter extends RestFilter { @Override public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { - request.putInContext(INGEST_CONTEXT_KEY, request.param(INGEST_PARAM)); + request.putInContext(INGEST_PAREM_CONTEXT_KEY, request.param(INGEST_PARAM)); filterChain.continueProcessing(request, channel); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index 228243d3240..1edd481bbe8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -48,7 +48,7 @@ public class IngestActionFilter extends AbstractComponent implements ActionFilte @Override public void apply(String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { - String pipelineId = request.getFromContext(IngestPlugin.INGEST_CONTEXT_KEY); + String pipelineId = request.getFromContext(IngestPlugin.INGEST_PAREM_CONTEXT_KEY); if (pipelineId == null) { pipelineId = request.getHeader(IngestPlugin.INGEST_PARAM); if (pipelineId == null) { @@ -73,6 +73,14 @@ public class IngestActionFilter extends AbstractComponent implements ActionFilte } void processIndexRequest(String action, ActionListener listener, ActionFilterChain chain, IndexRequest indexRequest, String pipelineId) { + // The IndexRequest has the same type on the node that receives the request and the node that + // processes the primary action. This could lead to a pipeline being executed twice for the same + // index request, hence this check + if (indexRequest.hasHeader(IngestPlugin.INGEST_ALREADY_PROCESSED)) { + chain.proceed(action, indexRequest, listener); + return; + } + Map sourceAsMap = indexRequest.sourceAsMap(); Data data = new Data(indexRequest.index(), indexRequest.type(), indexRequest.id(), sourceAsMap); executionService.execute(data, pipelineId, new PipelineExecutionService.Listener() { @@ -81,6 +89,7 @@ public class IngestActionFilter extends AbstractComponent implements ActionFilte if (data.isModified()) { indexRequest.source(data.getDocument()); } + indexRequest.putHeader(IngestPlugin.INGEST_ALREADY_PROCESSED, true); chain.proceed(action, indexRequest, listener); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index 4b19f0bd91b..dfa61cf1fd1 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -82,7 +82,7 @@ public class IngestActionFilterTests extends ESTestCase { public void testApplyIngestIdViaContext() throws Exception { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putInContext(IngestPlugin.INGEST_CONTEXT_KEY, "_id"); + indexRequest.putInContext(IngestPlugin.INGEST_PAREM_CONTEXT_KEY, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -92,6 +92,20 @@ public class IngestActionFilterTests extends ESTestCase { verifyZeroInteractions(actionFilterChain); } + public void testApplyAlreadyProcessed() throws Exception { + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + indexRequest.source("field", "value"); + indexRequest.putHeader(IngestPlugin.INGEST_PARAM, "_id"); + indexRequest.putHeader(IngestPlugin.INGEST_ALREADY_PROCESSED, true); + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + + filter.apply("_action", indexRequest, actionListener, actionFilterChain); + + verify(actionFilterChain).proceed("_action", indexRequest, actionListener); + verifyZeroInteractions(executionService, actionListener); + } + public void testApply_executed() throws Exception { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); From 11f17c0d7d2fa059ef02f357b3e7ada68b1b3dc5 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 9 Oct 2015 10:14:33 +0200 Subject: [PATCH 006/347] rename constant name and removed the todo --- .../java/org/elasticsearch/plugin/ingest/IngestPlugin.java | 2 +- .../elasticsearch/plugin/ingest/rest/IngestRestFilter.java | 4 ++-- .../plugin/ingest/transport/IngestActionFilter.java | 7 +------ .../plugin/ingest/transport/IngestActionFilterTests.java | 2 +- 4 files changed, 5 insertions(+), 10 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 54fa0ead010..7cb62e86a72 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -36,7 +36,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; public class IngestPlugin extends Plugin { - public static final String INGEST_PAREM_CONTEXT_KEY = "__ingest__"; + public static final String INGEST_PARAM_CONTEXT_KEY = "__ingest__"; public static final String INGEST_PARAM = "ingest"; public static final String INGEST_ALREADY_PROCESSED = "ingest_already_processed"; public static final String NAME = "ingest"; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java index c82e8f1800e..702220addff 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.rest.*; import static org.elasticsearch.plugin.ingest.IngestPlugin.*; -import static org.elasticsearch.plugin.ingest.IngestPlugin.INGEST_PAREM_CONTEXT_KEY; +import static org.elasticsearch.plugin.ingest.IngestPlugin.INGEST_PARAM_CONTEXT_KEY; public class IngestRestFilter extends RestFilter { @@ -34,7 +34,7 @@ public class IngestRestFilter extends RestFilter { @Override public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { - request.putInContext(INGEST_PAREM_CONTEXT_KEY, request.param(INGEST_PARAM)); + request.putInContext(INGEST_PARAM_CONTEXT_KEY, request.param(INGEST_PARAM)); filterChain.continueProcessing(request, channel); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index 1edd481bbe8..aec9c4554c7 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -48,7 +48,7 @@ public class IngestActionFilter extends AbstractComponent implements ActionFilte @Override public void apply(String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { - String pipelineId = request.getFromContext(IngestPlugin.INGEST_PAREM_CONTEXT_KEY); + String pipelineId = request.getFromContext(IngestPlugin.INGEST_PARAM_CONTEXT_KEY); if (pipelineId == null) { pipelineId = request.getHeader(IngestPlugin.INGEST_PARAM); if (pipelineId == null) { @@ -101,11 +101,6 @@ public class IngestActionFilter extends AbstractComponent implements ActionFilte }); } - // TODO: rethink how to deal with bulk requests: - // This doesn't scale very well for a single bulk requests, so it would be great if a bulk requests could be broken up into several chunks so that the ingesting can be paralized - // on the other hand if there are many index/bulk requests then breaking up bulk requests isn't going to help much. - // I think the execution service should be smart enough about when it should break things up in chunks based on the ingest threadpool usage, - // this means that the contract of the execution service should change in order to accept multiple data instances. void processBulkIndexRequest(String action, ActionListener listener, ActionFilterChain chain, BulkRequest bulkRequest, String pipelineId, Iterator requests) { if (!requests.hasNext()) { chain.proceed(action, bulkRequest, listener); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index dfa61cf1fd1..4ed32fbf650 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -82,7 +82,7 @@ public class IngestActionFilterTests extends ESTestCase { public void testApplyIngestIdViaContext() throws Exception { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putInContext(IngestPlugin.INGEST_PAREM_CONTEXT_KEY, "_id"); + indexRequest.putInContext(IngestPlugin.INGEST_PARAM_CONTEXT_KEY, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); From 5a3c75ebac2e7f729188b800314385d8d75fb759 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 9 Oct 2015 15:02:12 +0200 Subject: [PATCH 007/347] added put, get and delete pipeline APIs. --- docs/plugins/ingest.asciidoc | 74 +++++++++++++ .../org/elasticsearch/ingest/Pipeline.java | 11 +- .../plugin/ingest/IngestModule.java | 4 +- .../plugin/ingest/IngestPlugin.java | 39 ++++++- .../plugin/ingest/PipelineStore.java | 53 ++++++--- ...ocReader.java => PipelineStoreClient.java} | 12 ++- .../ingest/rest/RestDeletePipelineAction.java | 50 +++++++++ .../ingest/rest/RestGetPipelineAction.java | 51 +++++++++ .../ingest/rest/RestPutPipelineAction.java | 50 +++++++++ .../delete/DeletePipelineAction.java | 43 ++++++++ .../delete/DeletePipelineRequest.java | 63 +++++++++++ .../delete/DeletePipelineRequestBuilder.java | 36 +++++++ .../delete/DeletePipelineResponse.java | 80 ++++++++++++++ .../delete/DeletePipelineTransportAction.java | 69 ++++++++++++ .../transport/get/GetPipelineAction.java | 43 ++++++++ .../transport/get/GetPipelineRequest.java | 63 +++++++++++ .../get/GetPipelineRequestBuilder.java | 36 +++++++ .../transport/get/GetPipelineResponse.java | 102 ++++++++++++++++++ .../get/GetPipelineTransportAction.java | 58 ++++++++++ .../transport/put/PutPipelineAction.java | 43 ++++++++ .../transport/put/PutPipelineRequest.java | 78 ++++++++++++++ .../put/PutPipelineRequestBuilder.java | 42 ++++++++ .../transport/put/PutPipelineResponse.java | 79 ++++++++++++++ .../put/PutPipelineTransportAction.java | 70 ++++++++++++ .../{BasicTests.java => IngestClientIT.java} | 58 ++++++++-- ...sts.java => PipelineStoreClientTests.java} | 6 +- .../plugin/ingest/PipelineStoreTests.java | 90 +++++++++++++--- .../api/ingest.delete_pipeline.json | 20 ++++ .../api/ingest.get_pipeline.json | 20 ++++ .../api/ingest.put_pipeline.json | 23 ++++ .../rest-api-spec/test/ingest/20_crud.yaml | 57 ++++++++++ 31 files changed, 1467 insertions(+), 56 deletions(-) create mode 100644 docs/plugins/ingest.asciidoc rename plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/{PipelineConfigDocReader.java => PipelineStoreClient.java} (90%) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestDeletePipelineAction.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestGetPipelineAction.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestPutPipelineAction.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineAction.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineRequest.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineRequestBuilder.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineResponse.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineAction.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineRequest.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineRequestBuilder.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineAction.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineRequest.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineRequestBuilder.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineResponse.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java rename plugins/ingest/src/test/java/org/elasticsearch/ingest/{BasicTests.java => IngestClientIT.java} (56%) rename plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/{PipelineConfigDocReaderTests.java => PipelineStoreClientTests.java} (88%) create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/api/ingest.delete_pipeline.json create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/api/ingest.get_pipeline.json create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/api/ingest.put_pipeline.json create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc new file mode 100644 index 00000000000..3fb61bdc3b7 --- /dev/null +++ b/docs/plugins/ingest.asciidoc @@ -0,0 +1,74 @@ +[[ingest]] +== Ingest Plugin + +TODO + +=== Put pipeline API + +The put pipeline api adds pipelines and updates existing pipelines in the cluster. + +[source,js] +-------------------------------------------------- +PUT _ingest/pipeline/my-pipeline-id +{ + "description" : "describe pipeline", + "processors" : [ + { + "simple" : { + // settings + } + }, + // other processors + ] +} +-------------------------------------------------- +// AUTOSENSE + +NOTE: Each ingest node updates its processors asynchronously in the background, so it may take a few seconds for all + nodes to have the latest version of the pipeline. + +=== Get pipeline API + +The get pipeline api returns pipelines based on id. This api always returns a local reference of the pipeline. + +[source,js] +-------------------------------------------------- +GET _ingest/pipeline/my-pipeline-id +-------------------------------------------------- +// AUTOSENSE + +Example response: + +[source,js] +-------------------------------------------------- +{ + "my-pipeline-id": { + "_source" : { + "description": "describe pipeline", + "processors": [ + { + "simple" : { + // settings + } + }, + // other processors + ] + }, + "_version" : 0 + } +} +-------------------------------------------------- + +For each returned pipeline the source and the version is returned. +The version is useful for knowing what version of the pipeline the node has. +Multiple ids can be provided at the same time. Also wildcards are supported. + +=== Delete pipeline API + +The delete pipeline api deletes pipelines by id. + +[source,js] +-------------------------------------------------- +DELETE _ingest/pipeline/my-pipeline-id +-------------------------------------------------- +// AUTOSENSE \ No newline at end of file diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index baa8427d069..1ea8f58a12e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -72,16 +72,15 @@ public final class Pipeline { public final static class Builder { - private final String name; + private final String id; private String description; private List processors = new ArrayList<>(); - public Builder(String name) { - this.name = name; + public Builder(String id) { + this.id = id; } - public Builder(Map config, Map processorRegistry) { - name = (String) config.get("name"); + public void fromMap(Map config, Map processorRegistry) { description = (String) config.get("description"); @SuppressWarnings("unchecked") List>> processors = (List>>) config.get("processors"); @@ -111,7 +110,7 @@ public final class Pipeline { } public Pipeline build() { - return new Pipeline(name, description, Collections.unmodifiableList(processors)); + return new Pipeline(id, description, Collections.unmodifiableList(processors)); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index 6a2138c9089..2786923b7ef 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -21,11 +21,9 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; -import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.SimpleProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; -import org.elasticsearch.plugin.ingest.transport.IngestActionFilter; import java.util.HashMap; import java.util.Map; @@ -39,7 +37,7 @@ public class IngestModule extends AbstractModule { binder().bind(IngestRestFilter.class).asEagerSingleton(); binder().bind(PipelineExecutionService.class).asEagerSingleton(); binder().bind(PipelineStore.class).asEagerSingleton(); - binder().bind(PipelineConfigDocReader.class).asEagerSingleton(); + binder().bind(PipelineStoreClient.class).asEagerSingleton(); registerProcessor(SimpleProcessor.TYPE, SimpleProcessor.Builder.Factory.class); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 7cb62e86a72..9dfbf314499 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -21,12 +21,22 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.action.ActionModule; +import org.elasticsearch.client.Client; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugin.ingest.rest.RestDeletePipelineAction; +import org.elasticsearch.plugin.ingest.rest.RestGetPipelineAction; +import org.elasticsearch.plugin.ingest.rest.RestPutPipelineAction; import org.elasticsearch.plugin.ingest.transport.IngestActionFilter; +import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; +import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineTransportAction; +import org.elasticsearch.plugin.ingest.transport.get.GetPipelineAction; +import org.elasticsearch.plugin.ingest.transport.get.GetPipelineTransportAction; +import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; +import org.elasticsearch.plugin.ingest.transport.put.PutPipelineTransportAction; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.rest.action.RestActionModule; +import org.elasticsearch.rest.RestModule; import java.util.Arrays; import java.util.Collection; @@ -42,9 +52,11 @@ public class IngestPlugin extends Plugin { public static final String NAME = "ingest"; private final Settings nodeSettings; + private final boolean transportClient; public IngestPlugin(Settings nodeSettings) { this.nodeSettings = nodeSettings; + transportClient = "transport".equals(nodeSettings.get(Client.CLIENT_TYPE_SETTING)); } @Override @@ -59,12 +71,20 @@ public class IngestPlugin extends Plugin { @Override public Collection nodeModules() { - return Collections.singletonList(new IngestModule()); + if (transportClient) { + return Collections.emptyList(); + } else { + return Collections.singletonList(new IngestModule()); + } } @Override public Collection> nodeServices() { - return Arrays.asList(PipelineStore.class, PipelineConfigDocReader.class); + if (transportClient) { + return Collections.emptyList(); + } else { + return Arrays.asList(PipelineStore.class, PipelineStoreClient.class); + } } @Override @@ -75,7 +95,18 @@ public class IngestPlugin extends Plugin { } public void onModule(ActionModule module) { - module.registerFilter(IngestActionFilter.class); + if (!transportClient) { + module.registerFilter(IngestActionFilter.class); + } + module.registerAction(PutPipelineAction.INSTANCE, PutPipelineTransportAction.class); + module.registerAction(GetPipelineAction.INSTANCE, GetPipelineTransportAction.class); + module.registerAction(DeletePipelineAction.INSTANCE, DeletePipelineTransportAction.class); + } + + public void onModule(RestModule restModule) { + restModule.addRestAction(RestPutPipelineAction.class); + restModule.addRestAction(RestGetPipelineAction.class); + restModule.addRestAction(RestDeletePipelineAction.class); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index b33bac75995..b5c9b0ffe18 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.gateway.GatewayService; @@ -34,10 +35,7 @@ import org.elasticsearch.ingest.Processor; import org.elasticsearch.search.SearchHit; import org.elasticsearch.threadpool.ThreadPool; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; +import java.util.*; public class PipelineStore extends AbstractLifecycleComponent { @@ -47,13 +45,13 @@ public class PipelineStore extends AbstractLifecycleComponent { private final ThreadPool threadPool; private final ClusterService clusterService; private final TimeValue pipelineUpdateInterval; - private final PipelineConfigDocReader configDocReader; + private final PipelineStoreClient configDocReader; private final Map processorFactoryRegistry; private volatile Map pipelines = new HashMap<>(); @Inject - public PipelineStore(Settings settings, ThreadPool threadPool, ClusterService clusterService, PipelineConfigDocReader configDocReader, Map processors) { + public PipelineStore(Settings settings, ThreadPool threadPool, ClusterService clusterService, PipelineStoreClient configDocReader, Map processors) { super(settings); this.threadPool = threadPool; this.clusterService = clusterService; @@ -84,10 +82,32 @@ public class PipelineStore extends AbstractLifecycleComponent { } } + public List getReference(String... ids) { + List result = new ArrayList<>(ids.length); + for (String id : ids) { + if (Regex.isSimpleMatchPattern(id)) { + for (Map.Entry entry : pipelines.entrySet()) { + if (Regex.simpleMatch(id, entry.getKey())) { + result.add(entry.getValue()); + } + } + } else { + PipelineReference reference = pipelines.get(id); + if (reference != null) { + result.add(reference); + } + } + } + return result; + } + void updatePipelines() { + // note: this process isn't fast or smart, but the idea is that there will not be many pipelines, + // so for that reason the goal is to keep the update logic simple. + int changed = 0; Map newPipelines = new HashMap<>(pipelines); - for (SearchHit hit : configDocReader.readAll()) { + for (SearchHit hit : configDocReader.readAllPipelines()) { String pipelineId = hit.getId(); BytesReference pipelineSource = hit.getSourceRef(); PipelineReference previous = newPipelines.get(pipelineId); @@ -98,15 +118,24 @@ public class PipelineStore extends AbstractLifecycleComponent { } changed++; - Pipeline.Builder builder = new Pipeline.Builder(hit.sourceAsMap(), processorFactoryRegistry); + Pipeline.Builder builder = new Pipeline.Builder(hit.getId()); + builder.fromMap(hit.sourceAsMap(), processorFactoryRegistry); newPipelines.put(pipelineId, new PipelineReference(builder.build(), hit.getVersion(), pipelineSource)); } - if (changed != 0) { - logger.debug("adding or updating [{}] pipelines", changed); + int removed = 0; + for (String existingPipelineId : pipelines.keySet()) { + if (!configDocReader.existPipeline(existingPipelineId)) { + newPipelines.remove(existingPipelineId); + removed++; + } + } + + if (changed != 0 || removed != 0) { + logger.debug("adding or updating [{}] pipelines and [{}] pipelines removed", changed, removed); pipelines = newPipelines; } else { - logger.debug("adding no new pipelines"); + logger.debug("no pipelines changes detected"); } } @@ -142,7 +171,7 @@ public class PipelineStore extends AbstractLifecycleComponent { } } - static class PipelineReference { + public static class PipelineReference { private final Pipeline pipeline; private final long version; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineConfigDocReader.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStoreClient.java similarity index 90% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineConfigDocReader.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStoreClient.java index 9b2dcf1e09e..c11969f840c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineConfigDocReader.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStoreClient.java @@ -19,6 +19,7 @@ package org.elasticsearch.plugin.ingest; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; @@ -33,14 +34,14 @@ import org.elasticsearch.search.sort.SortOrder; import java.util.Collections; import java.util.Iterator; -public class PipelineConfigDocReader extends AbstractLifecycleComponent { +public class PipelineStoreClient extends AbstractLifecycleComponent { private volatile Client client; private final Injector injector; private final TimeValue scrollTimeout; @Inject - public PipelineConfigDocReader(Settings settings, Injector injector) { + public PipelineStoreClient(Settings settings, Injector injector) { super(settings); this.injector = injector; this.scrollTimeout = settings.getAsTime("ingest.pipeline.store.scroll.timeout", TimeValue.timeValueSeconds(30)); @@ -60,7 +61,7 @@ public class PipelineConfigDocReader extends AbstractLifecycleComponent { protected void doClose() { } - public Iterable readAll() { + public Iterable readAllPipelines() { // TODO: the search should be replaced with an ingest API when it is available SearchResponse searchResponse = client.prepareSearch(PipelineStore.INDEX) .setVersion(true) @@ -81,6 +82,11 @@ public class PipelineConfigDocReader extends AbstractLifecycleComponent { }; } + public boolean existPipeline(String pipelineId) { + GetResponse response = client.prepareGet(PipelineStore.INDEX, PipelineStore.TYPE, pipelineId).get(); + return response.isExists(); + } + class SearchScrollIterator implements Iterator { private SearchResponse searchResponse; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestDeletePipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestDeletePipelineAction.java new file mode 100644 index 00000000000..f09c229a710 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestDeletePipelineAction.java @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.rest; + +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; +import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequest; +import org.elasticsearch.plugin.ingest.transport.get.GetPipelineAction; +import org.elasticsearch.plugin.ingest.transport.get.GetPipelineRequest; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.support.RestToXContentListener; + +public class RestDeletePipelineAction extends BaseRestHandler { + + @Inject + public RestDeletePipelineAction(Settings settings, RestController controller, Client client) { + super(settings, controller, client); + controller.registerHandler(RestRequest.Method.DELETE, "/_ingest/pipeline/{id}", this); + } + + @Override + protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { + DeletePipelineRequest request = new DeletePipelineRequest(); + request.id(restRequest.param("id")); + client.execute(DeletePipelineAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestGetPipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestGetPipelineAction.java new file mode 100644 index 00000000000..a58366eedb0 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestGetPipelineAction.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.rest; + +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugin.ingest.transport.get.GetPipelineAction; +import org.elasticsearch.plugin.ingest.transport.get.GetPipelineRequest; +import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; +import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequest; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.support.RestStatusToXContentListener; +import org.elasticsearch.rest.action.support.RestToXContentListener; + +public class RestGetPipelineAction extends BaseRestHandler { + + @Inject + public RestGetPipelineAction(Settings settings, RestController controller, Client client) { + super(settings, controller, client); + controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{ids}", this); + } + + @Override + protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { + GetPipelineRequest request = new GetPipelineRequest(); + request.ids(Strings.splitStringByCommaToArray(restRequest.param("ids"))); + client.execute(GetPipelineAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestPutPipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestPutPipelineAction.java new file mode 100644 index 00000000000..5b5bd0a0d2e --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestPutPipelineAction.java @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.rest; + +import org.elasticsearch.client.Client; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; +import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequest; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.support.RestToXContentListener; + +public class RestPutPipelineAction extends BaseRestHandler { + + @Inject + public RestPutPipelineAction(Settings settings, RestController controller, Client client) { + super(settings, controller, client); + controller.registerHandler(RestRequest.Method.PUT, "/_ingest/pipeline/{id}", this); + } + + @Override + protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { + PutPipelineRequest request = new PutPipelineRequest(); + request.id(restRequest.param("id")); + if (restRequest.hasContent()) { + request.source(restRequest.content()); + } + client.execute(PutPipelineAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineAction.java new file mode 100644 index 00000000000..b6405362ce5 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineAction.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.delete; + +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; + +public class DeletePipelineAction extends Action { + + public static final DeletePipelineAction INSTANCE = new DeletePipelineAction(); + public static final String NAME = "cluster:admin/ingest/pipeline/delete"; + + public DeletePipelineAction() { + super(NAME); + } + + @Override + public DeletePipelineRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new DeletePipelineRequestBuilder(client, this); + } + + @Override + public DeletePipelineResponse newResponse() { + return new DeletePipelineResponse(); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineRequest.java new file mode 100644 index 00000000000..1b31d5f44b2 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineRequest.java @@ -0,0 +1,63 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.delete; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class DeletePipelineRequest extends ActionRequest { + + private String id; + + public void id(String id) { + this.id = id; + } + + public String id() { + return id; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (id == null) { + validationException = addValidationError("id is missing", validationException); + } + return validationException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineRequestBuilder.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineRequestBuilder.java new file mode 100644 index 00000000000..ab7eea1c972 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineRequestBuilder.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.delete; + +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +public class DeletePipelineRequestBuilder extends ActionRequestBuilder { + + public DeletePipelineRequestBuilder(ElasticsearchClient client, DeletePipelineAction action) { + super(client, action, new DeletePipelineRequest()); + } + + public DeletePipelineRequestBuilder setId(String id) { + request.id(id); + return this; + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineResponse.java new file mode 100644 index 00000000000..a35752636b6 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineResponse.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.delete; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.common.xcontent.XContentHelper; + +import java.io.IOException; +import java.util.Map; + +public class DeletePipelineResponse extends ActionResponse implements ToXContent { + + private String id; + private boolean found; + + DeletePipelineResponse() { + } + + public DeletePipelineResponse(String id, boolean found) { + this.id = id; + this.found = found; + } + + public String id() { + return id; + } + + public boolean found() { + return found; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + this.id = in.readString(); + this.found = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeBoolean(found); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(Fields.ID, id); + builder.field(Fields.FOUND, found); + return builder; + } + + static final class Fields { + static final XContentBuilderString ID = new XContentBuilderString("_id"); + static final XContentBuilderString FOUND = new XContentBuilderString("_found"); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java new file mode 100644 index 00000000000..b57a3db00c9 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.delete; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.delete.TransportDeleteAction; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DeletePipelineTransportAction extends HandledTransportAction { + + private final TransportDeleteAction deleteAction; + + @Inject + public DeletePipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportDeleteAction deleteAction) { + super(settings, DeletePipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, DeletePipelineRequest::new); + this.deleteAction = deleteAction; + } + + @Override + protected void doExecute(DeletePipelineRequest request, ActionListener listener) { + DeleteRequest deleteRequest = new DeleteRequest(); + deleteRequest.index(PipelineStore.INDEX); + deleteRequest.type(PipelineStore.TYPE); + deleteRequest.id(request.id()); + deleteRequest.refresh(true); + deleteAction.execute(deleteRequest, new ActionListener() { + @Override + public void onResponse(DeleteResponse deleteResponse) { + listener.onResponse(new DeletePipelineResponse(deleteResponse.getId(), deleteResponse.isFound())); + } + + @Override + public void onFailure(Throwable e) { + listener.onFailure(e); + } + }); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineAction.java new file mode 100644 index 00000000000..0904a8a3f9f --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineAction.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.get; + +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; + +public class GetPipelineAction extends Action { + + public static final GetPipelineAction INSTANCE = new GetPipelineAction(); + public static final String NAME = "cluster:admin/ingest/pipeline/get"; + + public GetPipelineAction() { + super(NAME); + } + + @Override + public GetPipelineRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new GetPipelineRequestBuilder(client, this); + } + + @Override + public GetPipelineResponse newResponse() { + return new GetPipelineResponse(); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineRequest.java new file mode 100644 index 00000000000..0ff673a7bdb --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineRequest.java @@ -0,0 +1,63 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.get; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class GetPipelineRequest extends ActionRequest { + + private String[] ids; + + public void ids(String... ids) { + this.ids = ids; + } + + public String[] ids() { + return ids; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (ids == null || ids.length == 0) { + validationException = addValidationError("ids is missing", validationException); + } + return validationException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + ids = in.readStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(ids); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineRequestBuilder.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineRequestBuilder.java new file mode 100644 index 00000000000..4269b6ceccd --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineRequestBuilder.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.get; + +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +public class GetPipelineRequestBuilder extends ActionRequestBuilder { + + public GetPipelineRequestBuilder(ElasticsearchClient client, GetPipelineAction action) { + super(client, action, new GetPipelineRequest()); + } + + public GetPipelineRequestBuilder setIds(String... ids) { + request.ids(ids); + return this; + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java new file mode 100644 index 00000000000..020c8004631 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java @@ -0,0 +1,102 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.get; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.StatusToXContent; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class GetPipelineResponse extends ActionResponse implements StatusToXContent { + + private Map pipelines; + private Map versions; + + public GetPipelineResponse() { + } + + public GetPipelineResponse(Map pipelines, Map versions) { + this.pipelines = pipelines; + this.versions = versions; + } + + public Map pipelines() { + return pipelines; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + int size = in.readVInt(); + pipelines = new HashMap<>(size); + for (int i = 0; i < size; i++) { + pipelines.put(in.readString(), in.readBytesReference()); + } + size = in.readVInt(); + versions = new HashMap<>(size); + for (int i = 0; i < size; i++) { + versions.put(in.readString(), in.readVLong()); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVInt(pipelines.size()); + for (Map.Entry entry : pipelines.entrySet()) { + out.writeString(entry.getKey()); + out.writeBytesReference(entry.getValue()); + } + out.writeVInt(versions.size()); + for (Map.Entry entry : versions.entrySet()) { + out.writeString(entry.getKey()); + out.writeVLong(entry.getValue()); + } + } + + public boolean isFound() { + return !pipelines.isEmpty(); + } + + @Override + public RestStatus status() { + return isFound() ? RestStatus.OK : RestStatus.NOT_FOUND; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + for (Map.Entry entry : pipelines.entrySet()) { + builder.startObject(entry.getKey()); + XContentHelper.writeRawField("_source", entry.getValue(), builder, params); + builder.field("_version", versions.get(entry.getKey())); + builder.endObject(); + } + return builder; + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java new file mode 100644 index 00000000000..89ba97d81b3 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.get; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class GetPipelineTransportAction extends HandledTransportAction { + + private final PipelineStore pipelineStore; + + @Inject + public GetPipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PipelineStore pipelineStore) { + super(settings, GetPipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, GetPipelineRequest::new); + this.pipelineStore = pipelineStore; + } + + @Override + protected void doExecute(GetPipelineRequest request, ActionListener listener) { + List references = pipelineStore.getReference(request.ids()); + Map result = new HashMap<>(); + Map versions = new HashMap<>(); + for (PipelineStore.PipelineReference reference : references) { + result.put(reference.getPipeline().getId(), reference.getSource()); + versions.put(reference.getPipeline().getId(), reference.getVersion()); + } + listener.onResponse(new GetPipelineResponse(result, versions)); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineAction.java new file mode 100644 index 00000000000..a638d0c8010 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineAction.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.put; + +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; + +public class PutPipelineAction extends Action { + + public static final PutPipelineAction INSTANCE = new PutPipelineAction(); + public static final String NAME = "cluster:admin/ingest/pipeline/put"; + + public PutPipelineAction() { + super(NAME); + } + + @Override + public PutPipelineRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new PutPipelineRequestBuilder(client, this); + } + + @Override + public PutPipelineResponse newResponse() { + return new PutPipelineResponse(); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineRequest.java new file mode 100644 index 00000000000..b9ef9c17e45 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineRequest.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.put; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class PutPipelineRequest extends ActionRequest { + + private String id; + private BytesReference source; + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (id == null) { + validationException = addValidationError("id is missing", validationException); + } + if (source == null) { + validationException = addValidationError("source is missing", validationException); + } + return validationException; + } + + public String id() { + return id; + } + + public void id(String id) { + this.id = id; + } + + public BytesReference source() { + return source; + } + + public void source(BytesReference source) { + this.source = source; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + source = in.readBytesReference(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeBytesReference(source); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineRequestBuilder.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineRequestBuilder.java new file mode 100644 index 00000000000..732756adee4 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineRequestBuilder.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.put; + +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.bytes.BytesReference; + +public class PutPipelineRequestBuilder extends ActionRequestBuilder { + + public PutPipelineRequestBuilder(ElasticsearchClient client, PutPipelineAction action) { + super(client, action, new PutPipelineRequest()); + } + + public PutPipelineRequestBuilder setId(String id) { + request.id(id); + return this; + } + + public PutPipelineRequestBuilder setSource(BytesReference source) { + request.source(source); + return this; + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineResponse.java new file mode 100644 index 00000000000..eb733bcbffc --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineResponse.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.put; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; + +import java.io.IOException; + +public class PutPipelineResponse extends ActionResponse implements ToXContent { + + private String id; + private long version; + + public String id() { + return id; + } + + public PutPipelineResponse id(String id) { + this.id = id; + return this; + } + + public long version() { + return version; + } + + public PutPipelineResponse version(long version) { + this.version = version; + return this; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeLong(version); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + version = in.readLong(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(Fields.ID, id); + builder.field(Fields.VERSION, version); + return builder; + } + + static final class Fields { + static final XContentBuilderString ID = new XContentBuilderString("_id"); + static final XContentBuilderString VERSION = new XContentBuilderString("_version"); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java new file mode 100644 index 00000000000..fc5b7e7f124 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java @@ -0,0 +1,70 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.put; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.index.TransportIndexAction; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.function.Supplier; + +public class PutPipelineTransportAction extends HandledTransportAction { + + private final TransportIndexAction indexAction; + + @Inject + public PutPipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportIndexAction indexAction) { + super(settings, PutPipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, PutPipelineRequest::new); + this.indexAction = indexAction; + } + + @Override + protected void doExecute(PutPipelineRequest request, ActionListener listener) { + IndexRequest indexRequest = new IndexRequest(); + indexRequest.index(PipelineStore.INDEX); + indexRequest.type(PipelineStore.TYPE); + indexRequest.id(request.id()); + indexRequest.source(request.source()); + indexRequest.refresh(true); + indexAction.execute(indexRequest, new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + PutPipelineResponse response = new PutPipelineResponse(); + response.id(indexResponse.getId()); + response.version(indexResponse.getVersion()); + listener.onResponse(response); + } + + @Override + public void onFailure(Throwable e) { + listener.onFailure(e); + } + }); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/BasicTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java similarity index 56% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/BasicTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 0c884713374..dd83939530d 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/BasicTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -21,6 +21,14 @@ package org.elasticsearch.ingest; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; +import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequestBuilder; +import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineResponse; +import org.elasticsearch.plugin.ingest.transport.get.GetPipelineAction; +import org.elasticsearch.plugin.ingest.transport.get.GetPipelineRequestBuilder; +import org.elasticsearch.plugin.ingest.transport.get.GetPipelineResponse; +import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; +import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequestBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -29,20 +37,25 @@ import java.util.Collections; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.equalTo; +import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; +import static org.hamcrest.Matchers.*; -@ESIntegTestCase.ClusterScope(numDataNodes = 1, numClientNodes = 0) -public class BasicTests extends ESIntegTestCase { +public class IngestClientIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singletonList(IngestPlugin.class); + return pluginList(IngestPlugin.class); } - public void test() throws Exception { - client().prepareIndex(PipelineStore.INDEX, PipelineStore.TYPE, "_id") + @Override + protected Collection> transportClientPlugins() { + return nodePlugins(); + } + + public void testBasics() throws Exception { + new PutPipelineRequestBuilder(client(), PutPipelineAction.INSTANCE) + .setId("_id") .setSource(jsonBuilder().startObject() - .field("name", "my_pipeline") .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -54,10 +67,18 @@ public class BasicTests extends ESIntegTestCase { .endObject() .endObject() .endArray() - .endObject()) - .setRefresh(true) + .endObject().bytes()) .get(); - Thread.sleep(5000); + assertBusy(new Runnable() { + @Override + public void run() { + GetPipelineResponse response = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) + .setIds("_id") + .get(); + assertThat(response.isFound(), is(true)); + assertThat(response.pipelines().get("_id"), notNullValue()); + } + }); createIndex("test"); client().prepareIndex("test", "type", "1").setSource("field2", "abc") @@ -83,6 +104,23 @@ public class BasicTests extends ESIntegTestCase { assertThat(doc.get("field3"), equalTo("xyz")); } }); + + DeletePipelineResponse response = new DeletePipelineRequestBuilder(client(), DeletePipelineAction.INSTANCE) + .setId("_id") + .get(); + assertThat(response.found(), is(true)); + assertThat(response.id(), equalTo("_id")); + + assertBusy(new Runnable() { + @Override + public void run() { + GetPipelineResponse response = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) + .setIds("_id") + .get(); + assertThat(response.isFound(), is(false)); + assertThat(response.pipelines().get("_id"), nullValue()); + } + }); } @Override diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineConfigDocReaderTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreClientTests.java similarity index 88% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineConfigDocReaderTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreClientTests.java index 782d5a674b4..f670bdab7f9 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineConfigDocReaderTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreClientTests.java @@ -25,10 +25,10 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import static org.hamcrest.Matchers.equalTo; -public class PipelineConfigDocReaderTests extends ESSingleNodeTestCase { +public class PipelineStoreClientTests extends ESSingleNodeTestCase { public void testReadAll() { - PipelineConfigDocReader reader = new PipelineConfigDocReader(Settings.EMPTY, node().injector()); + PipelineStoreClient reader = new PipelineStoreClient(Settings.EMPTY, node().injector()); reader.start(); createIndex(PipelineStore.INDEX); @@ -41,7 +41,7 @@ public class PipelineConfigDocReaderTests extends ESSingleNodeTestCase { client().admin().indices().prepareRefresh().get(); int i = 0; - for (SearchHit hit : reader.readAll()) { + for (SearchHit hit : reader.readAllPipelines()) { assertThat(hit.getId(), equalTo(Integer.toString(i))); assertThat(hit.getVersion(), equalTo(1l)); assertThat(hit.getSource().get("field"), equalTo("value" + i)); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java index b10291fefbd..306df53600c 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java @@ -33,11 +33,13 @@ import org.junit.Before; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.List; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -45,14 +47,14 @@ public class PipelineStoreTests extends ESTestCase { private PipelineStore store; private ThreadPool threadPool; - private PipelineConfigDocReader docReader; + private PipelineStoreClient client; @Before public void init() { threadPool = new ThreadPool("test"); ClusterService clusterService = mock(ClusterService.class); - docReader = mock(PipelineConfigDocReader.class); - store = new PipelineStore(Settings.EMPTY, threadPool, clusterService, docReader, Collections.singletonMap(SimpleProcessor.TYPE, new SimpleProcessor.Builder.Factory())); + client = mock(PipelineStoreClient.class); + store = new PipelineStore(Settings.EMPTY, threadPool, clusterService, client, Collections.singletonMap(SimpleProcessor.TYPE, new SimpleProcessor.Builder.Factory())); store.start(); } @@ -66,52 +68,110 @@ public class PipelineStoreTests extends ESTestCase { public void testUpdatePipeline() { List hits = new ArrayList<>(); hits.add(new InternalSearchHit(0, "1", new StringText("type"), Collections.emptyMap()) - .sourceRef(new BytesArray("{\"name\": \"_name1\", \"description\": \"_description1\"}")) + .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) ); - when(docReader.readAll()).thenReturn(hits); + when(client.readAllPipelines()).thenReturn(hits); + when(client.existPipeline("1")).thenReturn(true); assertThat(store.get("1"), nullValue()); store.updatePipelines(); - assertThat(store.get("1").getId(), equalTo("_name1")); + assertThat(store.get("1").getId(), equalTo("1")); assertThat(store.get("1").getDescription(), equalTo("_description1")); + when(client.existPipeline("2")).thenReturn(true); hits.add(new InternalSearchHit(0, "2", new StringText("type"), Collections.emptyMap()) - .sourceRef(new BytesArray("{\"name\": \"_name2\", \"description\": \"_description2\"}")) + .sourceRef(new BytesArray("{\"description\": \"_description2\"}")) ); store.updatePipelines(); - assertThat(store.get("1").getId(), equalTo("_name1")); + assertThat(store.get("1").getId(), equalTo("1")); assertThat(store.get("1").getDescription(), equalTo("_description1")); - assertThat(store.get("2").getId(), equalTo("_name2")); + assertThat(store.get("2").getId(), equalTo("2")); assertThat(store.get("2").getDescription(), equalTo("_description2")); + + hits.remove(1); + when(client.existPipeline("2")).thenReturn(false); + store.updatePipelines(); + assertThat(store.get("1").getId(), equalTo("1")); + assertThat(store.get("1").getDescription(), equalTo("_description1")); + assertThat(store.get("2"), nullValue()); } public void testPipelineUpdater() throws Exception { List hits = new ArrayList<>(); hits.add(new InternalSearchHit(0, "1", new StringText("type"), Collections.emptyMap()) - .sourceRef(new BytesArray("{\"name\": \"_name1\", \"description\": \"_description1\"}")) + .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) ); - when(docReader.readAll()).thenReturn(hits); + when(client.readAllPipelines()).thenReturn(hits); + when(client.existPipeline(anyString())).thenReturn(true); assertThat(store.get("1"), nullValue()); store.startUpdateWorker(); assertBusy(() -> { assertThat(store.get("1"), notNullValue()); - assertThat(store.get("1").getId(), equalTo("_name1")); + assertThat(store.get("1").getId(), equalTo("1")); assertThat(store.get("1").getDescription(), equalTo("_description1")); }); hits.add(new InternalSearchHit(0, "2", new StringText("type"), Collections.emptyMap()) - .sourceRef(new BytesArray("{\"name\": \"_name2\", \"description\": \"_description2\"}")) + .sourceRef(new BytesArray("{\"description\": \"_description2\"}")) ); assertBusy(() -> { assertThat(store.get("1"), notNullValue()); - assertThat(store.get("1").getId(), equalTo("_name1")); + assertThat(store.get("1").getId(), equalTo("1")); assertThat(store.get("1").getDescription(), equalTo("_description1")); assertThat(store.get("2"), notNullValue()); - assertThat(store.get("2").getId(), equalTo("_name2")); + assertThat(store.get("2").getId(), equalTo("2")); assertThat(store.get("2").getDescription(), equalTo("_description2")); }); } + public void testGetReference() { + // fill the store up for the test: + List hits = new ArrayList<>(); + hits.add(new InternalSearchHit(0, "foo", new StringText("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); + hits.add(new InternalSearchHit(0, "bar", new StringText("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); + hits.add(new InternalSearchHit(0, "foobar", new StringText("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); + when(client.readAllPipelines()).thenReturn(hits); + store.updatePipelines(); + + List result = store.getReference("foo"); + assertThat(result.size(), equalTo(1)); + assertThat(result.get(0).getPipeline().getId(), equalTo("foo")); + + result = store.getReference("foo*"); + // to make sure the order is consistent in the test: + Collections.sort(result, new Comparator() { + @Override + public int compare(PipelineStore.PipelineReference first, PipelineStore.PipelineReference second) { + return first.getPipeline().getId().compareTo(second.getPipeline().getId()); + } + }); + assertThat(result.size(), equalTo(2)); + assertThat(result.get(0).getPipeline().getId(), equalTo("foo")); + assertThat(result.get(1).getPipeline().getId(), equalTo("foobar")); + + result = store.getReference("bar*"); + assertThat(result.size(), equalTo(1)); + assertThat(result.get(0).getPipeline().getId(), equalTo("bar")); + + result = store.getReference("*"); + // to make sure the order is consistent in the test: + Collections.sort(result, new Comparator() { + @Override + public int compare(PipelineStore.PipelineReference first, PipelineStore.PipelineReference second) { + return first.getPipeline().getId().compareTo(second.getPipeline().getId()); + } + }); + assertThat(result.size(), equalTo(3)); + assertThat(result.get(0).getPipeline().getId(), equalTo("bar")); + assertThat(result.get(1).getPipeline().getId(), equalTo("foo")); + assertThat(result.get(2).getPipeline().getId(), equalTo("foobar")); + + result = store.getReference("foo", "bar"); + assertThat(result.size(), equalTo(2)); + assertThat(result.get(0).getPipeline().getId(), equalTo("foo")); + assertThat(result.get(1).getPipeline().getId(), equalTo("bar")); + } + } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.delete_pipeline.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.delete_pipeline.json new file mode 100644 index 00000000000..69b8f53d63a --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.delete_pipeline.json @@ -0,0 +1,20 @@ +{ + "ingest.delete_pipeline": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/ingest.html", + "methods": [ "DELETE" ], + "url": { + "path": "/_ingest/pipeline/{id}", + "paths": [ "/_ingest/pipeline/{id}" ], + "parts": { + "id": { + "type" : "string", + "description" : "Pipeline ID", + "required" : true + } + }, + "params": { + } + }, + "body": null + } +} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.get_pipeline.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.get_pipeline.json new file mode 100644 index 00000000000..246c6535e92 --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.get_pipeline.json @@ -0,0 +1,20 @@ +{ + "ingest.get_pipeline": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/ingest.html", + "methods": [ "GET" ], + "url": { + "path": "/_ingest/pipeline/{ids}", + "paths": [ "/_ingest/pipeline/{ids}" ], + "parts": { + "ids": { + "type" : "string", + "description" : "Comma separated list of pipeline ids. Wildcards supported", + "required" : true + } + }, + "params": { + } + }, + "body": null + } +} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.put_pipeline.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.put_pipeline.json new file mode 100644 index 00000000000..fd88d352731 --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.put_pipeline.json @@ -0,0 +1,23 @@ +{ + "ingest.put_pipeline": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/ingest.html", + "methods": [ "PUT" ], + "url": { + "path": "/_ingest/pipeline/{id}", + "paths": [ "/_ingest/pipeline/{id}" ], + "parts": { + "id": { + "type" : "string", + "description" : "Pipeline ID", + "required" : true + } + }, + "params": { + } + }, + "body": { + "description" : "The ingest definition", + "required" : true + } + } +} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml new file mode 100644 index 00000000000..ae076f3e34f --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml @@ -0,0 +1,57 @@ +--- +"Test basic pipeline crud": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "simple" : { + "path" : "field1", + "value" : "_value", + "add_field" : "field2", + "add_field_value" : "_value" + } + } + ] + } + - match: { _id: "my_pipeline" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.get_pipeline: + ids: "my_pipeline" + - match: { my_pipeline._source.description: "_description" } + - match: { my_pipeline._version: 1 } + + - do: + ingest.delete_pipeline: + id: "my_pipeline" + - match: { _id: "my_pipeline" } + - match: { _found: true } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + catch: missing + ingest.get_pipeline: + ids: "my_pipeline" From 17cf06ccc2da42d10b1c8f51a5d21a1d71bd3b90 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Tue, 20 Oct 2015 11:08:04 +0300 Subject: [PATCH 008/347] restructure processors This Commit does the following: - moves processors into their own sub-packages - adds ability to add any typed field value into Data --- .../ingest/src/main/java/org/elasticsearch/ingest/Data.java | 2 +- .../src/main/java/org/elasticsearch/ingest/Pipeline.java | 2 ++ .../org/elasticsearch/ingest/{ => processor}/Processor.java | 4 +++- .../ingest/{ => processor/simple}/SimpleProcessor.java | 5 ++++- .../java/org/elasticsearch/plugin/ingest/IngestModule.java | 4 ++-- .../java/org/elasticsearch/plugin/ingest/PipelineStore.java | 2 +- .../test/java/org/elasticsearch/ingest/IngestClientIT.java | 3 --- .../plugin/ingest/PipelineExecutionServiceTests.java | 2 +- .../org/elasticsearch/plugin/ingest/PipelineStoreTests.java | 2 +- .../plugin/ingest/transport/IngestActionFilterTests.java | 2 +- 10 files changed, 16 insertions(+), 12 deletions(-) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/{ => processor}/Processor.java (95%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/{ => processor/simple}/SimpleProcessor.java (95%) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index 6a4f2f965ce..d27360c0380 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -47,7 +47,7 @@ public final class Data { return (T) XContentMapValues.extractValue(path, document); } - public void addField(String field, String value) { + public void addField(String field, Object value) { modified = true; document.put(field, value); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index 1ea8f58a12e..83627d6b2e8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -20,6 +20,8 @@ package org.elasticsearch.ingest; +import org.elasticsearch.ingest.processor.Processor; + import java.util.ArrayList; import java.util.Collections; import java.util.List; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java similarity index 95% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/Processor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java index 3a3711aaf28..6059bb1bb71 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Processor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java @@ -18,7 +18,9 @@ */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.processor; + +import org.elasticsearch.ingest.Data; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java similarity index 95% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java index 956e444cb48..82e25e77e8c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/SimpleProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java @@ -17,7 +17,10 @@ * under the License. */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.processor.simple; + +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.processor.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index 2786923b7ef..f3d2469c17c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -21,8 +21,8 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; -import org.elasticsearch.ingest.Processor; -import org.elasticsearch.ingest.SimpleProcessor; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.processor.simple.SimpleProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; import java.util.HashMap; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index b5c9b0ffe18..9b181c805bc 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.search.SearchHit; import org.elasticsearch.threadpool.ThreadPool; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index dd83939530d..c082fe21b14 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest; import org.elasticsearch.plugin.ingest.IngestPlugin; -import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequestBuilder; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineResponse; @@ -33,11 +32,9 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import java.util.Collection; -import java.util.Collections; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.hamcrest.Matchers.*; public class IngestClientIT extends ESIntegTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index e2f966ba37e..7dfddb3e1cc 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -22,7 +22,7 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java index 306df53600c..bdcb19bbc6f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.StringText; -import org.elasticsearch.ingest.SimpleProcessor; +import org.elasticsearch.ingest.processor.simple.SimpleProcessor; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index 4ed32fbf650..f338992d7c0 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.SimpleProcessor; +import org.elasticsearch.ingest.processor.simple.SimpleProcessor; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineExecutionService; import org.elasticsearch.plugin.ingest.PipelineStore; From 80849bbd6b1db6eaa2cd238482d7308be3c09153 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Mon, 19 Oct 2015 10:57:19 +0300 Subject: [PATCH 009/347] add date processor --- .../ingest/processor/date/DateParser.java | 29 ++++ .../ingest/processor/date/DateProcessor.java | 144 ++++++++++++++++++ .../processor/date/ISO8601DateParser.java | 48 ++++++ .../processor/date/JodaPatternDateParser.java | 66 ++++++++ .../processor/date/TAI64NDateParser.java | 42 +++++ .../ingest/processor/date/UnixDateParser.java | 35 +++++ .../processor/date/UnixMsDateParser.java | 35 +++++ .../plugin/ingest/IngestModule.java | 2 + .../elasticsearch/ingest/DatePipelineIT.java | 101 ++++++++++++ .../date/ISO8601DateParserTests.java | 40 +++++ .../date/JodaPatternDateParserTests.java | 41 +++++ .../processor/date/TAI64NDateParserTests.java | 35 +++++ .../processor/date/UnixDateParserTests.java | 32 ++++ .../processor/date/UnixMsDateParserTests.java | 32 ++++ 14 files changed, 682 insertions(+) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParser.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/ISO8601DateParser.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/DatePipelineIT.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/ISO8601DateParserTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParserTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/TAI64NDateParserTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixDateParserTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixMsDateParserTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParser.java new file mode 100644 index 00000000000..587bfc40053 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParser.java @@ -0,0 +1,29 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.joda.time.DateTime; + +public interface DateParser { + + public long parseMillis(String date); + + public DateTime parseDateTime(String date); +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java new file mode 100644 index 00000000000..31828f4b84c --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java @@ -0,0 +1,144 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.Processor; +import org.joda.time.DateTimeZone; + +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.stream.Collectors; + +public final class DateProcessor implements Processor { + + public static final String TYPE = "date"; + public static final String DEFAULT_TARGET_FIELD = "@timestamp"; + + private final DateTimeZone timezone; + private final Locale locale; + private final String matchField; + private final String targetField; + private final List matchFormats; + private final List parserList; + + public DateProcessor(String timezone, String locale, String matchField, List matchFormats, String targetField) { + this.timezone = DateTimeZone.forID(timezone); + this.locale = Locale.forLanguageTag(locale); + this.matchField = matchField; + this.matchFormats = matchFormats; + this.parserList = matchFormats.stream().map(elt -> getParser(elt)).collect(Collectors.toList()); + this.targetField = (targetField == null) ? DEFAULT_TARGET_FIELD : targetField; + } + + @Override + public void execute(Data data) { + String value = (String) data.getProperty(matchField); + // TODO(talevy): handle multiple patterns + // TODO(talevy): handle custom timestamp fields + String dateAsISO8601 = parserList.get(0).parseDateTime(value).toString(); + data.addField(targetField, dateAsISO8601); + } + + private DateParser getParser(String format) { + if ("ISO8601".equals(format)) { + // TODO(talevy): fallback solution for almost ISO8601 + if (timezone == null) { + return new ISO8601DateParser(); + } else { + return new ISO8601DateParser(timezone); + } + } else if ("UNIX".equals(format)) { + return new UnixDateParser(); + } else if ("UNIX_MS".equals(format)) { + return new UnixMsDateParser(); + } else if ("TAI64N".equals(format)) { + return new TAI64NDateParser(); + } else { + if (timezone != null && locale != null) { + return new JodaPatternDateParser(format, timezone, locale); + } else if (timezone != null) { + return new JodaPatternDateParser(format, timezone); + } else if (locale != null) { + return new JodaPatternDateParser(format, locale); + } else { + return new JodaPatternDateParser(format); + } + } + } + + public static class Builder implements Processor.Builder { + + private String timezone; + private String locale; + private String matchField; + private List matchFormats; + private String targetField; + + public Builder() { + matchFormats = new ArrayList(); + } + + public void setTimezone(String timezone) { + this.timezone = timezone; + } + + public void setLocale(String locale) { + this.locale = locale; + } + + public void setMatchField(String matchField) { + this.matchField = matchField; + } + + public void addMatchFormat(String matchFormat) { + matchFormats.add(matchFormat); + } + + public void setTargetField(String targetField) { + this.targetField = targetField; + } + + public void fromMap(Map config) { + this.timezone = (String) config.get("timezone"); + this.locale = (String) config.get("locale"); + this.matchField = (String) config.get("match_field"); + this.matchFormats = (List) config.get("match_formats"); + this.targetField = (String) config.get("target_field"); + } + + @Override + public Processor build() { + return new DateProcessor(timezone, locale, matchField, matchFormats, targetField); + } + + public static class Factory implements Processor.Builder.Factory { + + @Override + public Processor.Builder create() { + return new Builder(); + } + } + + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/ISO8601DateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/ISO8601DateParser.java new file mode 100644 index 00000000000..aaab1340f61 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/ISO8601DateParser.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.format.DateTimeFormatter; +import org.joda.time.format.ISODateTimeFormat; + +public class ISO8601DateParser implements DateParser { + + private final DateTimeFormatter formatter; + + public ISO8601DateParser(DateTimeZone timezone) { + formatter = ISODateTimeFormat.dateTimeParser().withZone(timezone); + } + + public ISO8601DateParser() { + formatter = ISODateTimeFormat.dateTimeParser().withOffsetParsed(); + } + + @Override + public long parseMillis(String date) { + return formatter.parseMillis(date); + } + + @Override + public DateTime parseDateTime(String date) { + return formatter.parseDateTime(date); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java new file mode 100644 index 00000000000..029cfe2cdbd --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; + +import java.util.Locale; + +public class JodaPatternDateParser implements DateParser { + + private final DateTimeFormatter formatter; + + public JodaPatternDateParser(String format) { + formatter = DateTimeFormat.forPattern(format) + .withDefaultYear((new DateTime()).getYear()) + .withOffsetParsed(); + } + + public JodaPatternDateParser(String format, DateTimeZone timezone) { + formatter = DateTimeFormat.forPattern(format) + .withDefaultYear((new DateTime()).getYear()) + .withZone(timezone); + } + + public JodaPatternDateParser(String format, Locale locale) { + formatter = DateTimeFormat.forPattern(format) + .withDefaultYear((new DateTime()).getYear()) + .withLocale(locale); + } + + public JodaPatternDateParser(String format, DateTimeZone timezone, Locale locale) { + formatter = DateTimeFormat.forPattern(format) + .withDefaultYear((new DateTime()).getYear()) + .withZone(timezone).withLocale(locale); + } + + @Override + public long parseMillis(String date) { + return formatter.parseMillis(date); + } + + @Override + public DateTime parseDateTime(String date) { + return formatter.parseDateTime(date); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java new file mode 100644 index 00000000000..3ee32508a3a --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.joda.time.DateTime; + +public class TAI64NDateParser implements DateParser { + + @Override + public long parseMillis(String date) { + if (date.startsWith("@")) { + date = date.substring(1); + } + long base = Long.parseLong(date.substring(1, 16), 16); + // 1356138046000 + long rest = Long.parseLong(date.substring(16, 24), 16); + + return ((base * 1000) - 10000) + (rest/1000000); + } + + @Override + public DateTime parseDateTime(String date) { + return new DateTime(parseMillis(date)); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java new file mode 100644 index 00000000000..6439ac1564e --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java @@ -0,0 +1,35 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.joda.time.DateTime; + +public class UnixDateParser implements DateParser { + + @Override + public long parseMillis(String date) { + return (long) (Float.parseFloat(date) * 1000); + } + + @Override + public DateTime parseDateTime(String date) { + return new DateTime(parseMillis(date)); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java new file mode 100644 index 00000000000..c56b558afd8 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java @@ -0,0 +1,35 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.joda.time.DateTime; + +public class UnixMsDateParser implements DateParser { + + @Override + public long parseMillis(String date) { + return Long.parseLong(date); + } + + @Override + public DateTime parseDateTime(String date) { + return new DateTime(parseMillis(date)); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index f3d2469c17c..ad86e3d9df5 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.ingest.processor.simple.SimpleProcessor; +import org.elasticsearch.ingest.processor.date.DateProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; import java.util.HashMap; @@ -40,6 +41,7 @@ public class IngestModule extends AbstractModule { binder().bind(PipelineStoreClient.class).asEagerSingleton(); registerProcessor(SimpleProcessor.TYPE, SimpleProcessor.Builder.Factory.class); + registerProcessor(DateProcessor.TYPE, DateProcessor.Builder.Factory.class); MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, Processor.Builder.Factory.class); for (Map.Entry> entry : processors.entrySet()) { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DatePipelineIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DatePipelineIT.java new file mode 100644 index 00000000000..370dece2a44 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DatePipelineIT.java @@ -0,0 +1,101 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.plugin.ingest.IngestPlugin; +import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; +import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequestBuilder; +import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineResponse; +import org.elasticsearch.plugin.ingest.transport.get.GetPipelineAction; +import org.elasticsearch.plugin.ingest.transport.get.GetPipelineRequestBuilder; +import org.elasticsearch.plugin.ingest.transport.get.GetPipelineResponse; +import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; +import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequestBuilder; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Collection; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.*; + +public class DatePipelineIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(IngestPlugin.class); + } + + @Override + protected Collection> transportClientPlugins() { + return nodePlugins(); + } + + public void testBasics() throws Exception { + new PutPipelineRequestBuilder(client(), PutPipelineAction.INSTANCE) + .setId("_id") + .setSource(jsonBuilder().startObject() + .field("description", "my_pipeline") + .startArray("processors") + .startObject() + .startObject("date") + .field("timezone", "UTC") + .field("locale", "en") + .field("match_field", "my_date_field") + .startArray("match_formats") + .value("yyyy MMM dd HH:mm:ss Z") + .endArray() + .endObject() + .endObject() + .endArray() + .endObject().bytes()) + .get(); + assertBusy(new Runnable() { + @Override + public void run() { + GetPipelineResponse response = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) + .setIds("_id") + .get(); + assertThat(response.isFound(), is(true)); + assertThat(response.pipelines().get("_id"), notNullValue()); + } + }); + + createIndex("test"); + client().prepareIndex("test", "type", "1").setSource("my_date_field", "2015 Nov 24 01:29:01 -0800") + .putHeader("ingest", "_id") + .get(); + + assertBusy(new Runnable() { + @Override + public void run() { + Map doc = client().prepareGet("test", "type", "1") + .get().getSourceAsMap(); + assertThat(doc.get("@timestamp"), equalTo("2015-11-24T09:29:01.000Z")); + } + }); + } + + @Override + protected boolean enableMockModules() { + return false; + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/ISO8601DateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/ISO8601DateParserTests.java new file mode 100644 index 00000000000..a726fa43b8e --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/ISO8601DateParserTests.java @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTimeZone; +import org.junit.Test; + +import static org.hamcrest.core.IsEqual.equalTo; + +public class ISO8601DateParserTests extends ESTestCase { + + public void testParseUTC() { + ISO8601DateParser parser = new ISO8601DateParser(DateTimeZone.UTC); + assertThat(parser.parseMillis("2001-01-01T00:00:00-0800"), equalTo(978336000000L)); + } + + @Test(expected=IllegalArgumentException.class) + public void testParseFailure() { + ISO8601DateParser parser = new ISO8601DateParser(DateTimeZone.UTC); + parser.parseMillis("2001-01-0:00-0800"); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParserTests.java new file mode 100644 index 00000000000..214459fa03a --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParserTests.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.elasticsearch.test.ESTestCase; + +import java.time.Instant; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.util.Locale; + +import static org.hamcrest.core.IsEqual.equalTo; + +public class JodaPatternDateParserTests extends ESTestCase { + + public void testParse() { + JodaPatternDateParser parser = new JodaPatternDateParser("MMM dd HH:mm:ss Z", Locale.ENGLISH); + + assertThat(Instant.ofEpochMilli(parser.parseMillis("Nov 24 01:29:01 -0800")) + .atZone(ZoneId.of("GMT-8")) + .format(DateTimeFormatter.ofPattern("MM dd HH:mm:ss")), + equalTo("11 24 01:29:01")); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/TAI64NDateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/TAI64NDateParserTests.java new file mode 100644 index 00000000000..0e4cab841df --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/TAI64NDateParserTests.java @@ -0,0 +1,35 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.core.IsEqual.equalTo; + +public class TAI64NDateParserTests extends ESTestCase { + + public void testParse() { + TAI64NDateParser parser = new TAI64NDateParser(); + String input = "4000000050d506482dbdf024"; + String expected = "2012-12-22T03:00:46.767+02:00"; + assertThat(parser.parseDateTime("@" + input).toString(), equalTo(expected)); + assertThat(parser.parseDateTime(input).toString(), equalTo(expected)); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixDateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixDateParserTests.java new file mode 100644 index 00000000000..4a3025fae9f --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixDateParserTests.java @@ -0,0 +1,32 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.core.IsEqual.equalTo; + +public class UnixDateParserTests extends ESTestCase { + + public void testParse() { + UnixDateParser parser = new UnixDateParser(); + assertThat(parser.parseMillis("1000.5"), equalTo(1000500L)); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixMsDateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixMsDateParserTests.java new file mode 100644 index 00000000000..1ffaf251819 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixMsDateParserTests.java @@ -0,0 +1,32 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.core.IsEqual.equalTo; + +public class UnixMsDateParserTests extends ESTestCase { + + public void testParse() { + UnixMsDateParser parser = new UnixMsDateParser(); + assertThat(parser.parseMillis("1000500"), equalTo(1000500L)); + } +} From e204671aa97f037b102c6a36f1c25d23225179ff Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Mon, 26 Oct 2015 22:50:58 -0700 Subject: [PATCH 010/347] fix forbiddenapis --- .../ingest/processor/date/DateProcessor.java | 13 +++++++------ .../processor/date/JodaPatternDateParser.java | 8 ++++---- .../ingest/processor/date/TAI64NDateParser.java | 8 +++++++- .../ingest/processor/date/UnixDateParser.java | 8 +++++++- .../ingest/processor/date/UnixMsDateParser.java | 8 +++++++- .../processor/date/JodaPatternDateParserTests.java | 6 ++++-- .../processor/date/TAI64NDateParserTests.java | 3 ++- .../ingest/processor/date/UnixDateParserTests.java | 3 ++- .../processor/date/UnixMsDateParserTests.java | 3 ++- 9 files changed, 42 insertions(+), 18 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java index 31828f4b84c..4d4beed1dda 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java @@ -20,7 +20,7 @@ package org.elasticsearch.ingest.processor.date; import org.elasticsearch.ingest.Data; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.processor.Processor; import org.joda.time.DateTimeZone; import java.util.ArrayList; @@ -42,7 +42,7 @@ public final class DateProcessor implements Processor { private final List parserList; public DateProcessor(String timezone, String locale, String matchField, List matchFormats, String targetField) { - this.timezone = DateTimeZone.forID(timezone); + this.timezone = (timezone == null) ? DateTimeZone.UTC : DateTimeZone.forID(timezone); this.locale = Locale.forLanguageTag(locale); this.matchField = matchField; this.matchFormats = matchFormats; @@ -68,11 +68,11 @@ public final class DateProcessor implements Processor { return new ISO8601DateParser(timezone); } } else if ("UNIX".equals(format)) { - return new UnixDateParser(); + return new UnixDateParser(timezone); } else if ("UNIX_MS".equals(format)) { - return new UnixMsDateParser(); + return new UnixMsDateParser(timezone); } else if ("TAI64N".equals(format)) { - return new TAI64NDateParser(); + return new TAI64NDateParser(timezone); } else { if (timezone != null && locale != null) { return new JodaPatternDateParser(format, timezone, locale); @@ -95,7 +95,7 @@ public final class DateProcessor implements Processor { private String targetField; public Builder() { - matchFormats = new ArrayList(); + matchFormats = new ArrayList(); } public void setTimezone(String timezone) { @@ -118,6 +118,7 @@ public final class DateProcessor implements Processor { this.targetField = targetField; } + @SuppressWarnings("unchecked") public void fromMap(Map config) { this.timezone = (String) config.get("timezone"); this.locale = (String) config.get("locale"); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java index 029cfe2cdbd..03afe781cdd 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java @@ -32,25 +32,25 @@ public class JodaPatternDateParser implements DateParser { public JodaPatternDateParser(String format) { formatter = DateTimeFormat.forPattern(format) - .withDefaultYear((new DateTime()).getYear()) + .withDefaultYear((new DateTime(DateTimeZone.UTC)).getYear()) .withOffsetParsed(); } public JodaPatternDateParser(String format, DateTimeZone timezone) { formatter = DateTimeFormat.forPattern(format) - .withDefaultYear((new DateTime()).getYear()) + .withDefaultYear((new DateTime(timezone)).getYear()) .withZone(timezone); } public JodaPatternDateParser(String format, Locale locale) { formatter = DateTimeFormat.forPattern(format) - .withDefaultYear((new DateTime()).getYear()) + .withDefaultYear((new DateTime(DateTimeZone.UTC)).getYear()) .withLocale(locale); } public JodaPatternDateParser(String format, DateTimeZone timezone, Locale locale) { formatter = DateTimeFormat.forPattern(format) - .withDefaultYear((new DateTime()).getYear()) + .withDefaultYear((new DateTime(DateTimeZone.UTC)).getYear()) .withZone(timezone).withLocale(locale); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java index 3ee32508a3a..f51269b5b48 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java @@ -20,8 +20,14 @@ package org.elasticsearch.ingest.processor.date; import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; public class TAI64NDateParser implements DateParser { + private DateTimeZone timezone; + + public TAI64NDateParser(DateTimeZone timezone) { + this.timezone = timezone; + } @Override public long parseMillis(String date) { @@ -37,6 +43,6 @@ public class TAI64NDateParser implements DateParser { @Override public DateTime parseDateTime(String date) { - return new DateTime(parseMillis(date)); + return new DateTime(parseMillis(date), timezone); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java index 6439ac1564e..1fa9c836a2d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java @@ -20,8 +20,14 @@ package org.elasticsearch.ingest.processor.date; import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; public class UnixDateParser implements DateParser { + private final DateTimeZone timezone; + + public UnixDateParser(DateTimeZone timezone) { + this.timezone = timezone; + } @Override public long parseMillis(String date) { @@ -30,6 +36,6 @@ public class UnixDateParser implements DateParser { @Override public DateTime parseDateTime(String date) { - return new DateTime(parseMillis(date)); + return new DateTime(parseMillis(date), timezone); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java index c56b558afd8..640fb52fe32 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java @@ -20,8 +20,14 @@ package org.elasticsearch.ingest.processor.date; import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; public class UnixMsDateParser implements DateParser { + private final DateTimeZone timezone; + + public UnixMsDateParser(DateTimeZone timezone) { + this.timezone = timezone; + } @Override public long parseMillis(String date) { @@ -30,6 +36,6 @@ public class UnixMsDateParser implements DateParser { @Override public DateTime parseDateTime(String date) { - return new DateTime(parseMillis(date)); + return new DateTime(parseMillis(date), timezone); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParserTests.java index 214459fa03a..268222feb35 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParserTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParserTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest.processor.date; import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTimeZone; import java.time.Instant; import java.time.ZoneId; @@ -31,11 +32,12 @@ import static org.hamcrest.core.IsEqual.equalTo; public class JodaPatternDateParserTests extends ESTestCase { public void testParse() { - JodaPatternDateParser parser = new JodaPatternDateParser("MMM dd HH:mm:ss Z", Locale.ENGLISH); + JodaPatternDateParser parser = new JodaPatternDateParser("MMM dd HH:mm:ss Z", + DateTimeZone.forOffsetHours(-8), Locale.ENGLISH); assertThat(Instant.ofEpochMilli(parser.parseMillis("Nov 24 01:29:01 -0800")) .atZone(ZoneId.of("GMT-8")) - .format(DateTimeFormatter.ofPattern("MM dd HH:mm:ss")), + .format(DateTimeFormatter.ofPattern("MM dd HH:mm:ss", Locale.ENGLISH)), equalTo("11 24 01:29:01")); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/TAI64NDateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/TAI64NDateParserTests.java index 0e4cab841df..d0007cfa5d7 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/TAI64NDateParserTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/TAI64NDateParserTests.java @@ -20,13 +20,14 @@ package org.elasticsearch.ingest.processor.date; import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTimeZone; import static org.hamcrest.core.IsEqual.equalTo; public class TAI64NDateParserTests extends ESTestCase { public void testParse() { - TAI64NDateParser parser = new TAI64NDateParser(); + TAI64NDateParser parser = new TAI64NDateParser(DateTimeZone.forOffsetHours(2)); String input = "4000000050d506482dbdf024"; String expected = "2012-12-22T03:00:46.767+02:00"; assertThat(parser.parseDateTime("@" + input).toString(), equalTo(expected)); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixDateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixDateParserTests.java index 4a3025fae9f..68636d88811 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixDateParserTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixDateParserTests.java @@ -20,13 +20,14 @@ package org.elasticsearch.ingest.processor.date; import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTimeZone; import static org.hamcrest.core.IsEqual.equalTo; public class UnixDateParserTests extends ESTestCase { public void testParse() { - UnixDateParser parser = new UnixDateParser(); + UnixDateParser parser = new UnixDateParser(DateTimeZone.UTC); assertThat(parser.parseMillis("1000.5"), equalTo(1000500L)); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixMsDateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixMsDateParserTests.java index 1ffaf251819..db2fa506e5e 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixMsDateParserTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixMsDateParserTests.java @@ -20,13 +20,14 @@ package org.elasticsearch.ingest.processor.date; import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTimeZone; import static org.hamcrest.core.IsEqual.equalTo; public class UnixMsDateParserTests extends ESTestCase { public void testParse() { - UnixMsDateParser parser = new UnixMsDateParser(); + UnixMsDateParser parser = new UnixMsDateParser(DateTimeZone.UTC); assertThat(parser.parseMillis("1000500"), equalTo(1000500L)); } } From 8df9fc82d9ce708741e2ee15b2d13e59c73ef948 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 27 Oct 2015 15:34:54 +0700 Subject: [PATCH 011/347] added custom rest specs for index and bulk that support the ingest param and added rest tests that verify that the ingest param is working --- .../rest-api-spec/api/ingest.bulk.json | 56 +++++++++++++ .../rest-api-spec/api/ingest.index.json | 80 +++++++++++++++++++ .../rest-api-spec/test/ingest/20_crud.yaml | 2 +- .../test/ingest/30_simple_processor.yaml | 63 +++++++++++++++ 4 files changed, 200 insertions(+), 1 deletion(-) create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_simple_processor.yaml diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json new file mode 100644 index 00000000000..5a4c7a856b9 --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json @@ -0,0 +1,56 @@ +{ + "ingest.bulk": { + "documentation": "Copied from bulk in core to add the pipeline parameter to rest spec", + "methods": ["POST", "PUT"], + "url": { + "path": "/_bulk", + "paths": ["/_bulk", "/{index}/_bulk", "/{index}/{type}/_bulk"], + "parts": { + "index": { + "type" : "string", + "description" : "Default index for items which don't provide one" + }, + "type": { + "type" : "string", + "description" : "Default document type for items which don't provide one" + } + }, + "params": { + "consistency": { + "type" : "enum", + "options" : ["one", "quorum", "all"], + "description" : "Explicit write consistency setting for the operation" + }, + "refresh": { + "type" : "boolean", + "description" : "Refresh the index after performing the operation" + }, + "routing": { + "type" : "string", + "description" : "Specific routing value" + }, + "timeout": { + "type" : "time", + "description" : "Explicit operation timeout" + }, + "type": { + "type" : "string", + "description" : "Default document type for items which don't provide one" + }, + "fields": { + "type": "list", + "description" : "Default comma-separated list of fields to return in the response for updates" + }, + "ingest" : { + "type" : "string", + "description" : "The pipeline id to preprocess incoming documents with" + } + } + }, + "body": { + "description" : "The operation definition and data (action-data pairs), separated by newlines", + "required" : true, + "serialize" : "bulk" + } + } +} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json new file mode 100644 index 00000000000..23d3337bf60 --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json @@ -0,0 +1,80 @@ +{ + "ingest.index": { + "documentation": "Copied from index in core to add support for the pipeline parameter to rest spec", + "methods": ["POST", "PUT"], + "url": { + "path": "/{index}/{type}", + "paths": ["/{index}/{type}", "/{index}/{type}/{id}"], + "parts": { + "id": { + "type" : "string", + "description" : "Document ID" + }, + "index": { + "type" : "string", + "required" : true, + "description" : "The name of the index" + }, + "type": { + "type" : "string", + "required" : true, + "description" : "The type of the document" + } + }, + "params": { + "consistency": { + "type" : "enum", + "options" : ["one", "quorum", "all"], + "description" : "Explicit write consistency setting for the operation" + }, + "op_type": { + "type" : "enum", + "options" : ["index", "create"], + "default" : "index", + "description" : "Explicit operation type" + }, + "parent": { + "type" : "string", + "description" : "ID of the parent document" + }, + "refresh": { + "type" : "boolean", + "description" : "Refresh the index after performing the operation" + }, + "routing": { + "type" : "string", + "description" : "Specific routing value" + }, + "timeout": { + "type" : "time", + "description" : "Explicit operation timeout" + }, + "timestamp": { + "type" : "time", + "description" : "Explicit timestamp for the document" + }, + "ttl": { + "type" : "duration", + "description" : "Expiration time for the document" + }, + "version" : { + "type" : "number", + "description" : "Explicit version number for concurrency control" + }, + "version_type": { + "type" : "enum", + "options" : ["internal", "external", "external_gte", "force"], + "description" : "Specific version type" + }, + "ingest" : { + "type" : "string", + "description" : "The pipeline id to preprocess incoming documents with" + } + } + }, + "body": { + "description" : "The document", + "required" : true + } + } +} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml index ae076f3e34f..56e12cdd59b 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml @@ -14,7 +14,7 @@ { "simple" : { "path" : "field1", - "value" : "_value", + "expected_value" : "_value", "add_field" : "field2", "add_field_value" : "_value" } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_simple_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_simple_processor.yaml new file mode 100644 index 00000000000..cd7e45ca3ae --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_simple_processor.yaml @@ -0,0 +1,63 @@ +--- +"Test simple processor": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "simple" : { + "path" : "field1", + "expected_value" : "_value", + "add_field" : "field2", + "add_field_value" : "_value" + } + } + ] + } + - match: { _id: "my_pipeline" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.index: + index: test + type: test + id: 1 + ingest: "my_pipeline" + body: {field1: "_value"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.field1: "_value" } + - match: { _source.field2: "_value" } + + - do: + ingest.bulk: + ingest: "my_pipeline" + body: + - '{ "index": { "_index": "test", "_type": "test", "_id": "2" } }' + - '{ "field1": "_value" }' + + - do: + get: + index: test + type: test + id: 2 + - match: { _source.field1: "_value" } + - match: { _source.field2: "_value" } From 9706b472401c591f5d36e1c858187599e3fafa16 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 27 Oct 2015 17:03:02 +0700 Subject: [PATCH 012/347] field rename --- .../org/elasticsearch/plugin/ingest/PipelineStore.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index 9b181c805bc..0abf1edad29 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -45,18 +45,18 @@ public class PipelineStore extends AbstractLifecycleComponent { private final ThreadPool threadPool; private final ClusterService clusterService; private final TimeValue pipelineUpdateInterval; - private final PipelineStoreClient configDocReader; + private final PipelineStoreClient client; private final Map processorFactoryRegistry; private volatile Map pipelines = new HashMap<>(); @Inject - public PipelineStore(Settings settings, ThreadPool threadPool, ClusterService clusterService, PipelineStoreClient configDocReader, Map processors) { + public PipelineStore(Settings settings, ThreadPool threadPool, ClusterService clusterService, PipelineStoreClient client, Map processors) { super(settings); this.threadPool = threadPool; this.clusterService = clusterService; this.pipelineUpdateInterval = settings.getAsTime("ingest.pipeline.store.update.interval", TimeValue.timeValueSeconds(1)); - this.configDocReader = configDocReader; + this.client = client; this.processorFactoryRegistry = Collections.unmodifiableMap(processors); clusterService.add(new PipelineStoreListener()); } @@ -107,7 +107,7 @@ public class PipelineStore extends AbstractLifecycleComponent { int changed = 0; Map newPipelines = new HashMap<>(pipelines); - for (SearchHit hit : configDocReader.readAllPipelines()) { + for (SearchHit hit : client.readAllPipelines()) { String pipelineId = hit.getId(); BytesReference pipelineSource = hit.getSourceRef(); PipelineReference previous = newPipelines.get(pipelineId); @@ -125,7 +125,7 @@ public class PipelineStore extends AbstractLifecycleComponent { int removed = 0; for (String existingPipelineId : pipelines.keySet()) { - if (!configDocReader.existPipeline(existingPipelineId)) { + if (!client.existPipeline(existingPipelineId)) { newPipelines.remove(existingPipelineId); removed++; } From cf52ab5df049ec7827351df48fd78cf9455bbc14 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Thu, 22 Oct 2015 15:26:29 +0300 Subject: [PATCH 013/347] add ability to add nested field values to Data document --- .../java/org/elasticsearch/ingest/Data.java | 27 +++++++- .../org/elasticsearch/ingest/DataTests.java | 61 +++++++++++++++++++ 2 files changed, 86 insertions(+), 2 deletions(-) create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index d27360c0380..6ff50cfff9f 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -19,8 +19,11 @@ package org.elasticsearch.ingest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import java.util.Arrays; +import java.util.HashMap; import java.util.Map; /** @@ -42,14 +45,34 @@ public final class Data { this.document = document; } + // TODO(talevy): support elements of lists @SuppressWarnings("unchecked") public T getProperty(String path) { return (T) XContentMapValues.extractValue(path, document); } - public void addField(String field, Object value) { + /** + * add `value` to path in document. If path does not exist, + * nested hashmaps will be put in as parent key values until + * leaf key name in path is reached. + * + * @param path The path within the document in dot-notation + * @param value The value to put in for the path key + */ + public void addField(String path, Object value) { modified = true; - document.put(field, value); + + String[] pathElements = Strings.splitStringToArray(path, '.'); + + String writeKey = pathElements[pathElements.length - 1]; + Map inner = document; + + for (int i = 0; i < pathElements.length - 1; i++) { + inner.putIfAbsent(pathElements[i], new HashMap()); + inner = (HashMap) inner.get(pathElements[i]); + } + + inner.put(writeKey, value); } public String getIndex() { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java new file mode 100644 index 00000000000..76d14a6e74a --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.HashMap; + +import static org.hamcrest.Matchers.*; + +public class DataTests extends ESTestCase { + + private Data data; + + @Before + public void setData() { + data = new Data("index", "type", "id", + new HashMap() {{ + put("foo", "bar"); + put("fizz", new HashMap() {{ + put("buzz", "hello world"); + }}); + }}); + } + + public void testSimpleGetProperty() { + assertThat(data.getProperty("foo"), equalTo("bar")); + } + + public void testNestedGetProperty() { + assertThat(data.getProperty("fizz.buzz"), equalTo("hello world")); + } + + public void testSimpleAddField() { + data.addField("new_field", "foo"); + assertThat(data.getDocument().get("new_field"), equalTo("foo")); + } + + public void testNestedAddField() { + data.addField("a.b.c.d", "foo"); + assertThat(data.getProperty("a.b.c.d"), equalTo("foo")); + } +} From a2ce7b199590c2cd5a35d57e3befda17d86406e1 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Tue, 27 Oct 2015 16:20:20 -0700 Subject: [PATCH 014/347] add tests / remove TODO --- .../src/main/java/org/elasticsearch/ingest/Data.java | 7 +++++-- .../test/java/org/elasticsearch/ingest/DataTests.java | 10 ++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index 6ff50cfff9f..690dad062bb 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -22,8 +22,10 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import java.lang.reflect.Array; import java.util.Arrays; import java.util.HashMap; +import java.util.List; import java.util.Map; /** @@ -45,7 +47,6 @@ public final class Data { this.document = document; } - // TODO(talevy): support elements of lists @SuppressWarnings("unchecked") public T getProperty(String path) { return (T) XContentMapValues.extractValue(path, document); @@ -68,7 +69,9 @@ public final class Data { Map inner = document; for (int i = 0; i < pathElements.length - 1; i++) { - inner.putIfAbsent(pathElements[i], new HashMap()); + if (!inner.containsKey(pathElements[i])) { + inner.put(pathElements[i], new HashMap()); + } inner = (HashMap) inner.get(pathElements[i]); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java index 76d14a6e74a..bafc0c92356 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java @@ -58,4 +58,14 @@ public class DataTests extends ESTestCase { data.addField("a.b.c.d", "foo"); assertThat(data.getProperty("a.b.c.d"), equalTo("foo")); } + + public void testAddFieldOnExistingField() { + data.addField("foo", "newbar"); + assertThat(data.getProperty("foo"), equalTo("newbar")); + } + + public void testAddFieldOnExistingParent() { + data.addField("fizz.new", "bar"); + assertThat(data.getProperty("fizz.new"), equalTo("bar")); + } } From 2b26381c08146c65ed82ade98e7b577b1e3fdd68 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 29 Oct 2015 12:48:15 +0700 Subject: [PATCH 015/347] fix build errors --- plugins/ingest/pom.xml | 2 +- .../main/java/org/elasticsearch/ingest/Data.java | 2 +- .../java/org/elasticsearch/ingest/DataTests.java | 14 +++++++------- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/plugins/ingest/pom.xml b/plugins/ingest/pom.xml index 3371dde3290..c280a8a6cac 100644 --- a/plugins/ingest/pom.xml +++ b/plugins/ingest/pom.xml @@ -18,7 +18,7 @@ org.elasticsearch.plugin.ingest.IngestPlugin ingest false - -Xlint:-rawtypes + -Xlint:-rawtypes,-unchecked diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index 690dad062bb..4ba41c113d4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -72,7 +72,7 @@ public final class Data { if (!inner.containsKey(pathElements[i])) { inner.put(pathElements[i], new HashMap()); } - inner = (HashMap) inner.get(pathElements[i]); + inner = (Map) inner.get(pathElements[i]); } inner.put(writeKey, value); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java index bafc0c92356..d048620cea3 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.test.ESTestCase; import org.junit.Before; import java.util.HashMap; +import java.util.Map; import static org.hamcrest.Matchers.*; @@ -32,13 +33,12 @@ public class DataTests extends ESTestCase { @Before public void setData() { - data = new Data("index", "type", "id", - new HashMap() {{ - put("foo", "bar"); - put("fizz", new HashMap() {{ - put("buzz", "hello world"); - }}); - }}); + Map document = new HashMap<>(); + document.put("foo", "bar"); + Map innerObject = new HashMap<>(); + innerObject.put("buzz", "hello world"); + document.put("fizz", innerObject); + data = new Data("index", "type", "id", document); } public void testSimpleGetProperty() { From c6f4cd68daa90da3e42248e3d22daec6fee7286c Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Thu, 29 Oct 2015 12:24:35 -0700 Subject: [PATCH 016/347] add gradle build script to plugin --- plugins/ingest/build.gradle | 30 ++++++++++++++++++++++++++++++ settings.gradle | 1 + 2 files changed, 31 insertions(+) create mode 100644 plugins/ingest/build.gradle diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle new file mode 100644 index 00000000000..920dcc54da7 --- /dev/null +++ b/plugins/ingest/build.gradle @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +esplugin { + description 'Plugin that allows to configure pipelines to preprocess documents before indexing' + classname 'org.elasticsearch.plugin.ingest.IngestPlugin' +} + +dependencies { + testCompile 'org.elasticsearch:securemock:1.1' +} + +compileJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" +compileTestJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" diff --git a/settings.gradle b/settings.gradle index 296026b7c08..8b8eb2c8466 100644 --- a/settings.gradle +++ b/settings.gradle @@ -18,6 +18,7 @@ String[] projects = [ 'plugins:discovery-ec2', 'plugins:discovery-gce', 'plugins:discovery-multicast', + 'plugins:ingest', 'plugins:lang-expression', 'plugins:lang-groovy', 'plugins:lang-javascript', From 8425d26b70ae89c848865d276873c24155930592 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 2 Nov 2015 15:27:56 +0100 Subject: [PATCH 017/347] use existing constant to check whether client is a transport client --- .../java/org/elasticsearch/plugin/ingest/IngestPlugin.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 9dfbf314499..11365167732 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -22,6 +22,7 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.action.ActionModule; import org.elasticsearch.client.Client; +import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Settings; @@ -56,7 +57,7 @@ public class IngestPlugin extends Plugin { public IngestPlugin(Settings nodeSettings) { this.nodeSettings = nodeSettings; - transportClient = "transport".equals(nodeSettings.get(Client.CLIENT_TYPE_SETTING)); + transportClient = TransportClient.CLIENT_TYPE.equals(nodeSettings.get(Client.CLIENT_TYPE_SETTING)); } @Override @@ -95,7 +96,7 @@ public class IngestPlugin extends Plugin { } public void onModule(ActionModule module) { - if (!transportClient) { + if (transportClient == false) { module.registerFilter(IngestActionFilter.class); } module.registerAction(PutPipelineAction.INSTANCE, PutPipelineTransportAction.class); @@ -108,5 +109,4 @@ public class IngestPlugin extends Plugin { restModule.addRestAction(RestGetPipelineAction.class); restModule.addRestAction(RestDeletePipelineAction.class); } - } From 7b3c034caf786a2f435c0eedcf0300a51868fac9 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 2 Nov 2015 15:54:46 +0100 Subject: [PATCH 018/347] [DOCS] fixed javadocs minor typos --- .../java/org/elasticsearch/ingest/processor/Processor.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java index 6059bb1bb71..2fb37612a41 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java @@ -25,8 +25,8 @@ import org.elasticsearch.ingest.Data; import java.util.Map; /** - * An processor implementation may modify the data belonging to a document. - * If and what exactly is modified is upto the implementation. + * A processor implementation may modify the data belonging to a document. + * Whether changes are made and what exactly is modified is up to the implementation. */ public interface Processor { @@ -36,7 +36,7 @@ public interface Processor { void execute(Data data); /** - * A builder to contruct a processor to be used in a pipeline. + * A builder to construct a processor to be used in a pipeline. */ interface Builder { From 376c739fe6890009a88aadacf02d563a212f19a2 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 2 Nov 2015 23:33:34 +0700 Subject: [PATCH 019/347] added TODO --- plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index 4ba41c113d4..66b81e8fe83 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -49,6 +49,8 @@ public final class Data { @SuppressWarnings("unchecked") public T getProperty(String path) { + // TODO: we should not rely on any core class, so we should have custom map extract value logic: + // also XContentMapValues has no support to get specific values from arrays, see: https://github.com/elastic/elasticsearch/issues/14324 return (T) XContentMapValues.extractValue(path, document); } From 14e09ad20c3a0bbab4245e62354e4c35f02c1a20 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 2 Nov 2015 17:29:39 +0100 Subject: [PATCH 020/347] Add the ingest param to the context only if present --- .../elasticsearch/plugin/ingest/rest/IngestRestFilter.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java index 702220addff..8b9bbb089e4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java @@ -34,7 +34,9 @@ public class IngestRestFilter extends RestFilter { @Override public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { - request.putInContext(INGEST_PARAM_CONTEXT_KEY, request.param(INGEST_PARAM)); + if (request.hasParam(INGEST_PARAM)) { + request.putInContext(INGEST_PARAM_CONTEXT_KEY, request.param(INGEST_PARAM)); + } filterChain.continueProcessing(request, channel); } } From a8d76f011783c8b03464e94ea45210952b56c821 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Mon, 2 Nov 2015 10:09:36 -0800 Subject: [PATCH 021/347] remove pom.xml from ingest plugin --- plugins/ingest/pom.xml | 33 --------------------------------- 1 file changed, 33 deletions(-) delete mode 100644 plugins/ingest/pom.xml diff --git a/plugins/ingest/pom.xml b/plugins/ingest/pom.xml deleted file mode 100644 index c280a8a6cac..00000000000 --- a/plugins/ingest/pom.xml +++ /dev/null @@ -1,33 +0,0 @@ - - - 4.0.0 - - - org.elasticsearch.plugin - plugins - 3.0.0-SNAPSHOT - - - ingest - Plugin: Node ingest - Plugin that allows to configure pipelines to preprocess documents before indexing - - - org.elasticsearch.plugin.ingest.IngestPlugin - ingest - false - -Xlint:-rawtypes,-unchecked - - - - - - org.apache.maven.plugins - maven-assembly-plugin - - - - - From 464b46437febebce3b9f1506630e3563f1fff5d1 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Thu, 15 Oct 2015 14:47:18 +0300 Subject: [PATCH 022/347] Introduce the GrokProcessor Also moved all processor classes into a subdirectory and introduced a ConfigException class to be a catch-all for things that can go wrong when constructing new processors with configurations that possibly throw exceptions. The GrokProcessor loads patterns from the resources directory. fix resource path issue, and add rest-api-spec test for grok fix rest-spec tests changes: license, remove configexception, throw IOException add more tests and fix iso8601-hour pattern move grok patterns from resources to config fix tests with pom changes, updated IngestClientIT with grok processor update gradle build script for grok deps and test configuration move config files to src/main/packaging move Env out of Processor, fix test for src/main/packaging change add docs clean up test resources task update Grok to be immutable - Updated the Grok class to be immutable. This means that all the pattern bank loading is handled by an external utility class called PatternUtils. - fixed tabs in the nagios patterns file's comments --- docs/plugins/ingest.asciidoc | 139 +++++++++ plugins/ingest/build.gradle | 9 + .../ingest/licenses/jcodings-1.0.12.jar.sha1 | 1 + plugins/ingest/licenses/jcodings-LICENSE.txt | 17 ++ plugins/ingest/licenses/jcodings-NOTICE.txt | 1 + plugins/ingest/licenses/joni-2.1.6.jar.sha1 | 1 + plugins/ingest/licenses/joni-LICENSE.txt | 17 ++ plugins/ingest/licenses/joni-NOTICE.txt | 1 + plugins/ingest/licenses/no_deps.txt | 1 - .../org/elasticsearch/ingest/Pipeline.java | 5 +- .../ingest/processor/Processor.java | 9 +- .../ingest/processor/grok/Grok.java | 160 ++++++++++ .../ingest/processor/grok/GrokMatchGroup.java | 67 +++++ .../ingest/processor/grok/GrokProcessor.java | 119 ++++++++ .../ingest/processor/grok/PatternUtils.java | 47 +++ .../plugin/ingest/IngestModule.java | 2 + .../plugin/ingest/PipelineStore.java | 9 +- .../main/packaging/config/grok/patterns/aws | 11 + .../packaging/config/grok/patterns/bacula | 50 +++ .../main/packaging/config/grok/patterns/bro | 13 + .../main/packaging/config/grok/patterns/exim | 13 + .../packaging/config/grok/patterns/firewalls | 86 ++++++ .../config/grok/patterns/grok-patterns | 102 +++++++ .../packaging/config/grok/patterns/haproxy | 39 +++ .../main/packaging/config/grok/patterns/java | 20 ++ .../main/packaging/config/grok/patterns/junos | 9 + .../config/grok/patterns/linux-syslog | 16 + .../config/grok/patterns/mcollective-patterns | 4 + .../packaging/config/grok/patterns/mongodb | 7 + .../packaging/config/grok/patterns/nagios | 124 ++++++++ .../packaging/config/grok/patterns/postgresql | 3 + .../main/packaging/config/grok/patterns/rails | 13 + .../main/packaging/config/grok/patterns/redis | 3 + .../main/packaging/config/grok/patterns/ruby | 2 + .../elasticsearch/ingest/IngestClientIT.java | 48 ++- .../ingest/processor/grok/GrokTests.java | 284 ++++++++++++++++++ .../plugin/ingest/PipelineStoreTests.java | 9 +- .../rest-api-spec/test/ingest/40_grok.yaml | 48 +++ 38 files changed, 1484 insertions(+), 25 deletions(-) create mode 100644 plugins/ingest/licenses/jcodings-1.0.12.jar.sha1 create mode 100644 plugins/ingest/licenses/jcodings-LICENSE.txt create mode 100644 plugins/ingest/licenses/jcodings-NOTICE.txt create mode 100644 plugins/ingest/licenses/joni-2.1.6.jar.sha1 create mode 100644 plugins/ingest/licenses/joni-LICENSE.txt create mode 100644 plugins/ingest/licenses/joni-NOTICE.txt delete mode 100644 plugins/ingest/licenses/no_deps.txt create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/PatternUtils.java create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/aws create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/bacula create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/bro create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/exim create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/firewalls create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/grok-patterns create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/haproxy create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/java create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/junos create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/linux-syslog create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/mcollective-patterns create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/mongodb create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/nagios create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/postgresql create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/rails create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/redis create mode 100644 plugins/ingest/src/main/packaging/config/grok/patterns/ruby create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_grok.yaml diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 3fb61bdc3b7..16c42cf9dfb 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -3,6 +3,145 @@ TODO +=== Processors + +==== Grok Processor + +The Grok Processor extracts structured fields out of a single text field within a document. You choose which field to +extract matched fields from, as well as the Grok Pattern you expect will match. A Grok Pattern is like a regular +expression that supports aliased expressions that can be reused. + +This tool is perfect for syslog logs, apache and other webserver logs, mysql logs, and in general, any log format +that is generally written for humans and not computer consumption. + +The processor comes packaged with over 120 reusable patterns that are located at `$ES_HOME/config/ingest/grok/patterns`. +Here, you can add your own custom grok pattern files with custom grok expressions to be used by the processor. + +If you need help building patterns to match your logs, you will find the and + applications quite useful! + +===== Grok Basics + +Grok sits on top of regular expressions, so any regular expressions are valid in grok as well. +The regular expression library is Oniguruma, and you can see the full supported regexp syntax +https://github.com/kkos/oniguruma/blob/master/doc/RE[on the Onigiruma site]. + +Grok works by leveraging this regular expression language to allow naming existing patterns and combining them into more +complex patterns that match your fields. + +The syntax for re-using a grok pattern comes in three forms: `%{SYNTAX:SEMANTIC}`, `%{SYNTAX}`, `%{SYNTAX:SEMANTIC:TYPE}`. + +The `SYNTAX` is the name of the pattern that will match your text. For example, `3.44` will be matched by the `NUMBER` +pattern and `55.3.244.1` will be matched by the `IP` pattern. The syntax is how you match. `NUMBER` and `IP` are both +patterns that are provided within the default patterns set. + +The `SEMANTIC` is the identifier you give to the piece of text being matched. For example, `3.44` could be the +duration of an event, so you could call it simply `duration`. Further, a string `55.3.244.1` might identify +the `client` making a request. + +The `TYPE` is the type you wish to cast your named field. `int` and `float` are currently the only types supported for coercion. + +For example, here is a grok pattern that would match the above example given. We would like to match a text with the following +contents: + +[source,js] +-------------------------------------------------- +3.44 55.3.244.1 +-------------------------------------------------- + +We may know that the above message is a number followed by an IP-address. We can match this text with the following +Grok expression. + +[source,js] +-------------------------------------------------- +%{NUMBER:duration} %{IP:client} +-------------------------------------------------- + +===== Custom Patterns and Pattern Files + +The Grok Processor comes pre-packaged with a base set of pattern files. These patterns may not always have +what you are looking for. These pattern files have a very basic format. Each line describes a named pattern with +the following format: + +[source,js] +-------------------------------------------------- +NAME ' '+ PATTERN '\n' +-------------------------------------------------- + +You can add this pattern to an existing file, or add your own file in the patterns directory here: `$ES_HOME/config/ingest/grok/patterns`. +The Ingest Plugin will pick up files in this directory to be loaded into the grok processor's known patterns. These patterns are loaded +at startup, so you will need to do a restart your ingest node if you wish to update these files while running. + +Example snippet of pattern definitions found in the `grok-patterns` patterns file: + +[source,js] +-------------------------------------------------- +YEAR (?>\d\d){1,2} +HOUR (?:2[0123]|[01]?[0-9]) +MINUTE (?:[0-5][0-9]) +SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?) +TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9]) +-------------------------------------------------- + +===== Using Grok Processor in a Pipeline + +[[grok-options]] +.Grok Options +[options="header"] +|====== +| Name | Required | Default | Description +| `match_field` | yes | - | The field to use for grok expression parsing +| `match_pattern` | yes | - | The grok expression to match and extract named captures with +|====== + +Here is an example of using the provided patterns to extract out and name structured fields from a string field in +a document. + +[source,js] +-------------------------------------------------- +{ + "message": "55.3.244.1 GET /index.html 15824 0.043" +} +-------------------------------------------------- + +The pattern for this could be + +[source] +-------------------------------------------------- +%{IP:client} %{WORD:method} %{URIPATHPARAM:request} %{NUMBER:bytes} %{NUMBER:duration} +-------------------------------------------------- + +An example pipeline for processing the above document using Grok: + +[source,js] +-------------------------------------------------- +{ + "description" : "...", + "processors": [ + { + "grok": { + "match_field": "message", + "match_pattern": "%{IP:client} %{WORD:method} %{URIPATHPARAM:request} %{NUMBER:bytes} %{NUMBER:duration}" + } + } + ] +} +-------------------------------------------------- + +This pipeline will insert these named captures as new fields within the document, like so: + +[source,js] +-------------------------------------------------- +{ + "message": "55.3.244.1 GET /index.html 15824 0.043", + "client": "55.3.244.1", + "method": "GET", + "request": "/index.html", + "bytes": 15824, + "duration": "0.043" +} +-------------------------------------------------- + === Put pipeline API The put pipeline api adds pipelines and updates existing pipelines in the cluster. diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index 920dcc54da7..7fe64f3295f 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -23,8 +23,17 @@ esplugin { } dependencies { + compile 'org.jruby.joni:joni:2.1.6' testCompile 'org.elasticsearch:securemock:1.1' } +sourceSets { + test { + resources { + srcDir "src/main/packaging/config" + } + } +} + compileJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" compileTestJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" diff --git a/plugins/ingest/licenses/jcodings-1.0.12.jar.sha1 b/plugins/ingest/licenses/jcodings-1.0.12.jar.sha1 new file mode 100644 index 00000000000..dac1d7a000a --- /dev/null +++ b/plugins/ingest/licenses/jcodings-1.0.12.jar.sha1 @@ -0,0 +1 @@ +6bc17079fcaa8823ea8cd0d4c66516335b558db8 diff --git a/plugins/ingest/licenses/jcodings-LICENSE.txt b/plugins/ingest/licenses/jcodings-LICENSE.txt new file mode 100644 index 00000000000..a3fdf73d562 --- /dev/null +++ b/plugins/ingest/licenses/jcodings-LICENSE.txt @@ -0,0 +1,17 @@ +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/plugins/ingest/licenses/jcodings-NOTICE.txt b/plugins/ingest/licenses/jcodings-NOTICE.txt new file mode 100644 index 00000000000..f6c494861fd --- /dev/null +++ b/plugins/ingest/licenses/jcodings-NOTICE.txt @@ -0,0 +1 @@ +JCodings is released under the MIT License. \ No newline at end of file diff --git a/plugins/ingest/licenses/joni-2.1.6.jar.sha1 b/plugins/ingest/licenses/joni-2.1.6.jar.sha1 new file mode 100644 index 00000000000..110752bb8ba --- /dev/null +++ b/plugins/ingest/licenses/joni-2.1.6.jar.sha1 @@ -0,0 +1 @@ +0f23c95a06eaecbc8c74c7458a8bfd13e4fd2d3a diff --git a/plugins/ingest/licenses/joni-LICENSE.txt b/plugins/ingest/licenses/joni-LICENSE.txt new file mode 100644 index 00000000000..a3fdf73d562 --- /dev/null +++ b/plugins/ingest/licenses/joni-LICENSE.txt @@ -0,0 +1,17 @@ +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/plugins/ingest/licenses/joni-NOTICE.txt b/plugins/ingest/licenses/joni-NOTICE.txt new file mode 100644 index 00000000000..45bc517b9e2 --- /dev/null +++ b/plugins/ingest/licenses/joni-NOTICE.txt @@ -0,0 +1 @@ +Joni is released under the MIT License. diff --git a/plugins/ingest/licenses/no_deps.txt b/plugins/ingest/licenses/no_deps.txt deleted file mode 100644 index 8cce254d037..00000000000 --- a/plugins/ingest/licenses/no_deps.txt +++ /dev/null @@ -1 +0,0 @@ -This plugin has no third party dependencies diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index 83627d6b2e8..b91ef2f0f7e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -22,6 +22,7 @@ package org.elasticsearch.ingest; import org.elasticsearch.ingest.processor.Processor; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -82,7 +83,7 @@ public final class Pipeline { this.id = id; } - public void fromMap(Map config, Map processorRegistry) { + public void fromMap(Map config, Map processorRegistry) throws IOException { description = (String) config.get("description"); @SuppressWarnings("unchecked") List>> processors = (List>>) config.get("processors"); @@ -105,7 +106,7 @@ public final class Pipeline { this.description = description; } - public void addProcessors(Processor.Builder... processors) { + public void addProcessors(Processor.Builder... processors) throws IOException { for (Processor.Builder processor : processors) { this.processors.add(processor.build()); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java index 2fb37612a41..23ac6e15edf 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java @@ -22,6 +22,8 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.Data; +import java.io.IOException; +import java.nio.file.Path; import java.util.Map; /** @@ -48,7 +50,7 @@ public interface Processor { /** * Builds the processor based on previous set settings. */ - Processor build(); + Processor build() throws IOException; /** * A factory that creates a processor builder when processor instances for pipelines are being created. @@ -60,6 +62,11 @@ public interface Processor { */ Builder create(); + /** + */ + default void setConfigDirectory(Path configDirectory) { + } + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java new file mode 100644 index 00000000000..e72aa948da6 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java @@ -0,0 +1,160 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.grok; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.lang.Object; +import java.lang.String; +import java.lang.StringIndexOutOfBoundsException; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Locale; +import java.util.Map; + +import org.jcodings.specific.UTF8Encoding; +import org.joni.*; +import org.joni.exception.ValueException; + +public class Grok { + + private static final String NAME_GROUP = "name"; + private static final String SUBNAME_GROUP = "subname"; + private static final String PATTERN_GROUP = "pattern"; + private static final String DEFINITION_GROUP = "definition"; + private static final String GROK_PATTERN = + "%\\{" + + "(?" + + "(?[A-z0-9]+)" + + "(?::(?[A-z0-9_:.-]+))?" + + ")" + + "(?:=(?" + + "(?:" + + "(?:[^{}]+|\\.+)+" + + ")+" + + ")" + + ")?" + "\\}"; + private static final Regex GROK_PATTERN_REGEX = new Regex(GROK_PATTERN.getBytes(StandardCharsets.UTF_8), 0, GROK_PATTERN.getBytes(StandardCharsets.UTF_8).length, Option.NONE, UTF8Encoding.INSTANCE, Syntax.DEFAULT); + private final Map patternBank; + private final boolean namedCaptures; + private final Regex compiledExpression; + private final String expression; + + + public Grok(Map patternBank, String grokPattern) { + this(patternBank, grokPattern, true); + } + + @SuppressWarnings("unchecked") + public Grok(Map patternBank, String grokPattern, boolean namedCaptures) { + this.patternBank = patternBank; + this.namedCaptures = namedCaptures; + + this.expression = toRegex(grokPattern); + byte[] expressionBytes = expression.getBytes(StandardCharsets.UTF_8); + this.compiledExpression = new Regex(expressionBytes, 0, expressionBytes.length, Option.DEFAULT, UTF8Encoding.INSTANCE); + } + + + public String groupMatch(String name, Region region, String pattern) { + try { + int number = GROK_PATTERN_REGEX.nameToBackrefNumber(name.getBytes(StandardCharsets.UTF_8), 0, name.getBytes(StandardCharsets.UTF_8).length, region); + int begin = region.beg[number]; + int end = region.end[number]; + return new String(pattern.getBytes(StandardCharsets.UTF_8), begin, end - begin, StandardCharsets.UTF_8); + } catch (StringIndexOutOfBoundsException e) { + return null; + } catch (ValueException e) { + return null; + } + } + + /** + * converts a grok expression into a named regex expression + * + * @return named regex expression + */ + public String toRegex(String grokPattern) { + byte[] grokPatternBytes = grokPattern.getBytes(StandardCharsets.UTF_8); + Matcher matcher = GROK_PATTERN_REGEX.matcher(grokPatternBytes); + + int result = matcher.search(0, grokPatternBytes.length, Option.NONE); + if (result != -1) { + Region region = matcher.getEagerRegion(); + String namedPatternRef = groupMatch(NAME_GROUP, region, grokPattern); + String subName = groupMatch(SUBNAME_GROUP, region, grokPattern); + // TODO(tal): Support definitions + String definition = groupMatch(DEFINITION_GROUP, region, grokPattern); + String patternName = groupMatch(PATTERN_GROUP, region, grokPattern); + String pattern = patternBank.get(patternName); + + String grokPart; + if (namedCaptures && subName != null) { + grokPart = String.format(Locale.US, "(?<%s>%s)", namedPatternRef, pattern); + } else if (!namedCaptures) { + grokPart = String.format(Locale.US, "(?<%s>%s)", patternName + "_" + String.valueOf(result), pattern); + } else { + grokPart = String.format(Locale.US, "(?:%s)", pattern); + } + + String start = new String(grokPatternBytes, 0, result, StandardCharsets.UTF_8); + String rest = new String(grokPatternBytes, region.end[0], grokPatternBytes.length - region.end[0], StandardCharsets.UTF_8); + return start + toRegex(grokPart + rest); + } + + return grokPattern; + } + + public boolean match(String text) { + Matcher matcher = compiledExpression.matcher(text.getBytes(StandardCharsets.UTF_8)); + int result = matcher.search(0, text.length(), Option.DEFAULT); + return (result != -1); + } + + public Map captures(String text) { + byte[] textAsBytes = text.getBytes(StandardCharsets.UTF_8); + Map fields = new HashMap<>(); + Matcher matcher = compiledExpression.matcher(textAsBytes); + int result = matcher.search(0, textAsBytes.length, Option.DEFAULT); + if (result != -1) { + Region region = matcher.getEagerRegion(); + for (Iterator entry = compiledExpression.namedBackrefIterator(); entry.hasNext();) { + NameEntry e = entry.next(); + int number = e.getBackRefs()[0]; + + String groupName = new String(e.name, e.nameP, e.nameEnd - e.nameP, StandardCharsets.UTF_8); + String matchValue = null; + if (region.beg[number] >= 0) { + matchValue = new String(textAsBytes, region.beg[number], region.end[number] - region.beg[number], StandardCharsets.UTF_8); + } + GrokMatchGroup match = new GrokMatchGroup(groupName, matchValue); + fields.put(match.getName(), match.getValue()); + } + } else { + return null; + } + + return fields; + } +} + diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java new file mode 100644 index 00000000000..c070850b7ca --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.grok; + +import java.lang.Float; +import java.lang.Integer; +import java.lang.Object; +import java.lang.String; + +public class GrokMatchGroup { + private static final String DEFAULT_TYPE = "string"; + private final String patternName; + private final String fieldName; + private final String type; + private final String groupValue; + + public GrokMatchGroup(String groupName, String groupValue) { + String[] parts = groupName.split(":"); + patternName = parts[0]; + if (parts.length >= 2) { + fieldName = parts[1]; + } else { + fieldName = null; + } + + if (parts.length == 3) { + type = parts[2]; + } else { + type = DEFAULT_TYPE; + } + this.groupValue = groupValue; + } + + public String getName() { + return (fieldName == null) ? patternName : fieldName; + } + + public Object getValue() { + if (groupValue == null) { return null; } + + switch(type) { + case "int": + return Integer.parseInt(groupValue); + case "float": + return Float.parseFloat(groupValue); + default: + return groupValue; + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java new file mode 100644 index 00000000000..16f9f4ee1a9 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -0,0 +1,119 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.grok; + +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.env.Environment; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.processor.Processor; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.HashMap; +import java.util.Map; + +public final class GrokProcessor implements Processor { + + public static final String TYPE = "grok"; + + private final String matchField; + private final String matchPattern; + private final Grok grok; + + public GrokProcessor(Grok grok, String matchField, String matchPattern) throws IOException { + this.matchField = matchField; + this.matchPattern = matchPattern; + this.grok = grok; + } + + @Override + public void execute(Data data) { + Object field = data.getProperty(matchField); + // TODO(talevy): handle invalid field types + if (field instanceof String) { + Map matches = grok.captures((String) field); + if (matches != null) { + matches.forEach((k, v) -> data.addField(k, v)); + } + } + } + + public static class Builder implements Processor.Builder { + + private Path grokConfigDirectory; + private String matchField; + private String matchPattern; + + public Builder(Path grokConfigDirectory) { + this.grokConfigDirectory = grokConfigDirectory; + } + + public void setMatchField(String matchField) { + this.matchField = matchField; + } + + public void setMatchPattern(String matchPattern) { + this.matchPattern = matchPattern; + } + + public void fromMap(Map config) { + this.matchField = (String) config.get("field"); + this.matchPattern = (String) config.get("pattern"); + } + + @Override + public Processor build() throws IOException { + Map patternBank = new HashMap<>(); + Path patternsDirectory = grokConfigDirectory.resolve("patterns"); + try (DirectoryStream stream = Files.newDirectoryStream(patternsDirectory)) { + for (Path patternFilePath : stream) { + try(InputStream is = Files.newInputStream(patternFilePath, StandardOpenOption.READ)) { + PatternUtils.loadBankFromStream(patternBank, is); + } + } + } + + Grok grok = new Grok(patternBank, matchPattern); + return new GrokProcessor(grok, matchField, matchPattern); + } + + public static class Factory implements Processor.Builder.Factory { + private Path grokConfigDirectory; + + @Override + public Processor.Builder create() { + return new Builder(grokConfigDirectory); + } + + @Override + public void setConfigDirectory(Path configDirectory) { + this.grokConfigDirectory = configDirectory.resolve("ingest").resolve("grok"); + } + } + + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/PatternUtils.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/PatternUtils.java new file mode 100644 index 00000000000..7da8060b5fd --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/PatternUtils.java @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.ingest.processor.grok; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.Map; + +public final class PatternUtils { + private PatternUtils() {} + + public static void loadBankFromStream(Map patternBank, InputStream inputStream) throws IOException { + String line; + BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); + while ((line = br.readLine()) != null) { + String trimmedLine = line.replaceAll("^\\s+", ""); + if (trimmedLine.startsWith("#") || trimmedLine.length() == 0) { + continue; + } + + String[] parts = trimmedLine.split("\\s+", 2); + if (parts.length == 2) { + patternBank.put(parts[0], parts[1]); + } + } + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index f3d2469c17c..d5f2dbb4ae4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -22,6 +22,7 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.processor.grok.GrokProcessor; import org.elasticsearch.ingest.processor.simple.SimpleProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; @@ -40,6 +41,7 @@ public class IngestModule extends AbstractModule { binder().bind(PipelineStoreClient.class).asEagerSingleton(); registerProcessor(SimpleProcessor.TYPE, SimpleProcessor.Builder.Factory.class); + registerProcessor(GrokProcessor.TYPE, GrokProcessor.Builder.Factory.class); MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, Processor.Builder.Factory.class); for (Map.Entry> entry : processors.entrySet()) { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index 0abf1edad29..defdeb459c9 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -29,12 +29,14 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.search.SearchHit; import org.elasticsearch.threadpool.ThreadPool; +import java.io.IOException; import java.util.*; public class PipelineStore extends AbstractLifecycleComponent { @@ -51,12 +53,15 @@ public class PipelineStore extends AbstractLifecycleComponent { private volatile Map pipelines = new HashMap<>(); @Inject - public PipelineStore(Settings settings, ThreadPool threadPool, ClusterService clusterService, PipelineStoreClient client, Map processors) { + public PipelineStore(Settings settings, ThreadPool threadPool, Environment environment, ClusterService clusterService, PipelineStoreClient client, Map processors) { super(settings); this.threadPool = threadPool; this.clusterService = clusterService; this.pipelineUpdateInterval = settings.getAsTime("ingest.pipeline.store.update.interval", TimeValue.timeValueSeconds(1)); this.client = client; + for (Processor.Builder.Factory factory : processors.values()) { + factory.setConfigDirectory(environment.configFile()); + } this.processorFactoryRegistry = Collections.unmodifiableMap(processors); clusterService.add(new PipelineStoreListener()); } @@ -101,7 +106,7 @@ public class PipelineStore extends AbstractLifecycleComponent { return result; } - void updatePipelines() { + void updatePipelines() throws IOException { // note: this process isn't fast or smart, but the idea is that there will not be many pipelines, // so for that reason the goal is to keep the update logic simple. diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/aws b/plugins/ingest/src/main/packaging/config/grok/patterns/aws new file mode 100644 index 00000000000..71edbc9f296 --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/aws @@ -0,0 +1,11 @@ +S3_REQUEST_LINE (?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest}) + +S3_ACCESS_LOG %{WORD:owner} %{NOTSPACE:bucket} \[%{HTTPDATE:timestamp}\] %{IP:clientip} %{NOTSPACE:requester} %{NOTSPACE:request_id} %{NOTSPACE:operation} %{NOTSPACE:key} (?:"%{S3_REQUEST_LINE}"|-) (?:%{INT:response:int}|-) (?:-|%{NOTSPACE:error_code}) (?:%{INT:bytes:int}|-) (?:%{INT:object_size:int}|-) (?:%{INT:request_time_ms:int}|-) (?:%{INT:turnaround_time_ms:int}|-) (?:%{QS:referrer}|-) (?:"?%{QS:agent}"?|-) (?:-|%{NOTSPACE:version_id}) + +ELB_URIPATHPARAM %{URIPATH:path}(?:%{URIPARAM:params})? + +ELB_URI %{URIPROTO:proto}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST:urihost})?(?:%{ELB_URIPATHPARAM})? + +ELB_REQUEST_LINE (?:%{WORD:verb} %{ELB_URI:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest}) + +ELB_ACCESS_LOG %{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:elb} %{IP:clientip}:%{INT:clientport:int} (?:(%{IP:backendip}:?:%{INT:backendport:int})|-) %{NUMBER:request_processing_time:float} %{NUMBER:backend_processing_time:float} %{NUMBER:response_processing_time:float} %{INT:response:int} %{INT:backend_response:int} %{INT:received_bytes:int} %{INT:bytes:int} "%{ELB_REQUEST_LINE}" diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/bacula b/plugins/ingest/src/main/packaging/config/grok/patterns/bacula new file mode 100644 index 00000000000..d80dfe5de4c --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/bacula @@ -0,0 +1,50 @@ +BACULA_TIMESTAMP %{MONTHDAY}-%{MONTH} %{HOUR}:%{MINUTE} +BACULA_HOST [a-zA-Z0-9-]+ +BACULA_VOLUME %{USER} +BACULA_DEVICE %{USER} +BACULA_DEVICEPATH %{UNIXPATH} +BACULA_CAPACITY %{INT}{1,3}(,%{INT}{3})* +BACULA_VERSION %{USER} +BACULA_JOB %{USER} + +BACULA_LOG_MAX_CAPACITY User defined maximum volume capacity %{BACULA_CAPACITY} exceeded on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\) +BACULA_LOG_END_VOLUME End of medium on Volume \"%{BACULA_VOLUME:volume}\" Bytes=%{BACULA_CAPACITY} Blocks=%{BACULA_CAPACITY} at %{MONTHDAY}-%{MONTH}-%{YEAR} %{HOUR}:%{MINUTE}. +BACULA_LOG_NEW_VOLUME Created new Volume \"%{BACULA_VOLUME:volume}\" in catalog. +BACULA_LOG_NEW_LABEL Labeled new Volume \"%{BACULA_VOLUME:volume}\" on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\). +BACULA_LOG_WROTE_LABEL Wrote label to prelabeled Volume \"%{BACULA_VOLUME:volume}\" on device \"%{BACULA_DEVICE}\" \(%{BACULA_DEVICEPATH}\) +BACULA_LOG_NEW_MOUNT New volume \"%{BACULA_VOLUME:volume}\" mounted on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\) at %{MONTHDAY}-%{MONTH}-%{YEAR} %{HOUR}:%{MINUTE}. +BACULA_LOG_NOOPEN \s+Cannot open %{DATA}: ERR=%{GREEDYDATA:berror} +BACULA_LOG_NOOPENDIR \s+Could not open directory %{DATA}: ERR=%{GREEDYDATA:berror} +BACULA_LOG_NOSTAT \s+Could not stat %{DATA}: ERR=%{GREEDYDATA:berror} +BACULA_LOG_NOJOBS There are no more Jobs associated with Volume \"%{BACULA_VOLUME:volume}\". Marking it purged. +BACULA_LOG_ALL_RECORDS_PRUNED All records pruned from Volume \"%{BACULA_VOLUME:volume}\"; marking it \"Purged\" +BACULA_LOG_BEGIN_PRUNE_JOBS Begin pruning Jobs older than %{INT} month %{INT} days . +BACULA_LOG_BEGIN_PRUNE_FILES Begin pruning Files. +BACULA_LOG_PRUNED_JOBS Pruned %{INT} Jobs* for client %{BACULA_HOST:client} from catalog. +BACULA_LOG_PRUNED_FILES Pruned Files from %{INT} Jobs* for client %{BACULA_HOST:client} from catalog. +BACULA_LOG_ENDPRUNE End auto prune. +BACULA_LOG_STARTJOB Start Backup JobId %{INT}, Job=%{BACULA_JOB:job} +BACULA_LOG_STARTRESTORE Start Restore Job %{BACULA_JOB:job} +BACULA_LOG_USEDEVICE Using Device \"%{BACULA_DEVICE:device}\" +BACULA_LOG_DIFF_FS \s+%{UNIXPATH} is a different filesystem. Will not descend from %{UNIXPATH} into it. +BACULA_LOG_JOBEND Job write elapsed time = %{DATA:elapsed}, Transfer rate = %{NUMBER} (K|M|G)? Bytes/second +BACULA_LOG_NOPRUNE_JOBS No Jobs found to prune. +BACULA_LOG_NOPRUNE_FILES No Files found to prune. +BACULA_LOG_VOLUME_PREVWRITTEN Volume \"%{BACULA_VOLUME:volume}\" previously written, moving to end of data. +BACULA_LOG_READYAPPEND Ready to append to end of Volume \"%{BACULA_VOLUME:volume}\" size=%{INT} +BACULA_LOG_CANCELLING Cancelling duplicate JobId=%{INT}. +BACULA_LOG_MARKCANCEL JobId %{INT}, Job %{BACULA_JOB:job} marked to be canceled. +BACULA_LOG_CLIENT_RBJ shell command: run ClientRunBeforeJob \"%{GREEDYDATA:runjob}\" +BACULA_LOG_VSS (Generate )?VSS (Writer)? +BACULA_LOG_MAXSTART Fatal error: Job canceled because max start delay time exceeded. +BACULA_LOG_DUPLICATE Fatal error: JobId %{INT:duplicate} already running. Duplicate job not allowed. +BACULA_LOG_NOJOBSTAT Fatal error: No Job status returned from FD. +BACULA_LOG_FATAL_CONN Fatal error: bsock.c:133 Unable to connect to (Client: %{BACULA_HOST:client}|Storage daemon) on %{HOSTNAME}:%{POSINT}. ERR=(?%{GREEDYDATA}) +BACULA_LOG_NO_CONNECT Warning: bsock.c:127 Could not connect to (Client: %{BACULA_HOST:client}|Storage daemon) on %{HOSTNAME}:%{POSINT}. ERR=(?%{GREEDYDATA}) +BACULA_LOG_NO_AUTH Fatal error: Unable to authenticate with File daemon at %{HOSTNAME}. Possible causes: +BACULA_LOG_NOSUIT No prior or suitable Full backup found in catalog. Doing FULL backup. +BACULA_LOG_NOPRIOR No prior Full backup Job record found. + +BACULA_LOG_JOB (Error: )?Bacula %{BACULA_HOST} %{BACULA_VERSION} \(%{BACULA_VERSION}\): + +BACULA_LOGLINE %{BACULA_TIMESTAMP:bts} %{BACULA_HOST:hostname} JobId %{INT:jobid}: (%{BACULA_LOG_MAX_CAPACITY}|%{BACULA_LOG_END_VOLUME}|%{BACULA_LOG_NEW_VOLUME}|%{BACULA_LOG_NEW_LABEL}|%{BACULA_LOG_WROTE_LABEL}|%{BACULA_LOG_NEW_MOUNT}|%{BACULA_LOG_NOOPEN}|%{BACULA_LOG_NOOPENDIR}|%{BACULA_LOG_NOSTAT}|%{BACULA_LOG_NOJOBS}|%{BACULA_LOG_ALL_RECORDS_PRUNED}|%{BACULA_LOG_BEGIN_PRUNE_JOBS}|%{BACULA_LOG_BEGIN_PRUNE_FILES}|%{BACULA_LOG_PRUNED_JOBS}|%{BACULA_LOG_PRUNED_FILES}|%{BACULA_LOG_ENDPRUNE}|%{BACULA_LOG_STARTJOB}|%{BACULA_LOG_STARTRESTORE}|%{BACULA_LOG_USEDEVICE}|%{BACULA_LOG_DIFF_FS}|%{BACULA_LOG_JOBEND}|%{BACULA_LOG_NOPRUNE_JOBS}|%{BACULA_LOG_NOPRUNE_FILES}|%{BACULA_LOG_VOLUME_PREVWRITTEN}|%{BACULA_LOG_READYAPPEND}|%{BACULA_LOG_CANCELLING}|%{BACULA_LOG_MARKCANCEL}|%{BACULA_LOG_CLIENT_RBJ}|%{BACULA_LOG_VSS}|%{BACULA_LOG_MAXSTART}|%{BACULA_LOG_DUPLICATE}|%{BACULA_LOG_NOJOBSTAT}|%{BACULA_LOG_FATAL_CONN}|%{BACULA_LOG_NO_CONNECT}|%{BACULA_LOG_NO_AUTH}|%{BACULA_LOG_NOSUIT}|%{BACULA_LOG_JOB}|%{BACULA_LOG_NOPRIOR}) diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/bro b/plugins/ingest/src/main/packaging/config/grok/patterns/bro new file mode 100644 index 00000000000..31b138b39e2 --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/bro @@ -0,0 +1,13 @@ +# https://www.bro.org/sphinx/script-reference/log-files.html + +# http.log +BRO_HTTP %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{INT:trans_depth}\t%{GREEDYDATA:method}\t%{GREEDYDATA:domain}\t%{GREEDYDATA:uri}\t%{GREEDYDATA:referrer}\t%{GREEDYDATA:user_agent}\t%{NUMBER:request_body_len}\t%{NUMBER:response_body_len}\t%{GREEDYDATA:status_code}\t%{GREEDYDATA:status_msg}\t%{GREEDYDATA:info_code}\t%{GREEDYDATA:info_msg}\t%{GREEDYDATA:filename}\t%{GREEDYDATA:bro_tags}\t%{GREEDYDATA:username}\t%{GREEDYDATA:password}\t%{GREEDYDATA:proxied}\t%{GREEDYDATA:orig_fuids}\t%{GREEDYDATA:orig_mime_types}\t%{GREEDYDATA:resp_fuids}\t%{GREEDYDATA:resp_mime_types} + +# dns.log +BRO_DNS %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{WORD:proto}\t%{INT:trans_id}\t%{GREEDYDATA:query}\t%{GREEDYDATA:qclass}\t%{GREEDYDATA:qclass_name}\t%{GREEDYDATA:qtype}\t%{GREEDYDATA:qtype_name}\t%{GREEDYDATA:rcode}\t%{GREEDYDATA:rcode_name}\t%{GREEDYDATA:AA}\t%{GREEDYDATA:TC}\t%{GREEDYDATA:RD}\t%{GREEDYDATA:RA}\t%{GREEDYDATA:Z}\t%{GREEDYDATA:answers}\t%{GREEDYDATA:TTLs}\t%{GREEDYDATA:rejected} + +# conn.log +BRO_CONN %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{WORD:proto}\t%{GREEDYDATA:service}\t%{NUMBER:duration}\t%{NUMBER:orig_bytes}\t%{NUMBER:resp_bytes}\t%{GREEDYDATA:conn_state}\t%{GREEDYDATA:local_orig}\t%{GREEDYDATA:missed_bytes}\t%{GREEDYDATA:history}\t%{GREEDYDATA:orig_pkts}\t%{GREEDYDATA:orig_ip_bytes}\t%{GREEDYDATA:resp_pkts}\t%{GREEDYDATA:resp_ip_bytes}\t%{GREEDYDATA:tunnel_parents} + +# files.log +BRO_FILES %{NUMBER:ts}\t%{NOTSPACE:fuid}\t%{IP:tx_hosts}\t%{IP:rx_hosts}\t%{NOTSPACE:conn_uids}\t%{GREEDYDATA:source}\t%{GREEDYDATA:depth}\t%{GREEDYDATA:analyzers}\t%{GREEDYDATA:mime_type}\t%{GREEDYDATA:filename}\t%{GREEDYDATA:duration}\t%{GREEDYDATA:local_orig}\t%{GREEDYDATA:is_orig}\t%{GREEDYDATA:seen_bytes}\t%{GREEDYDATA:total_bytes}\t%{GREEDYDATA:missing_bytes}\t%{GREEDYDATA:overflow_bytes}\t%{GREEDYDATA:timedout}\t%{GREEDYDATA:parent_fuid}\t%{GREEDYDATA:md5}\t%{GREEDYDATA:sha1}\t%{GREEDYDATA:sha256}\t%{GREEDYDATA:extracted} diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/exim b/plugins/ingest/src/main/packaging/config/grok/patterns/exim new file mode 100644 index 00000000000..68c4e5cd7d0 --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/exim @@ -0,0 +1,13 @@ +EXIM_MSGID [0-9A-Za-z]{6}-[0-9A-Za-z]{6}-[0-9A-Za-z]{2} +EXIM_FLAGS (<=|[-=>*]>|[*]{2}|==) +EXIM_DATE %{YEAR:exim_year}-%{MONTHNUM:exim_month}-%{MONTHDAY:exim_day} %{TIME:exim_time} +EXIM_PID \[%{POSINT}\] +EXIM_QT ((\d+y)?(\d+w)?(\d+d)?(\d+h)?(\d+m)?(\d+s)?) +EXIM_EXCLUDE_TERMS (Message is frozen|(Start|End) queue run| Warning: | retry time not reached | no (IP address|host name) found for (IP address|host) | unexpected disconnection while reading SMTP command | no immediate delivery: |another process is handling this message) +EXIM_REMOTE_HOST (H=(%{NOTSPACE:remote_hostname} )?(\(%{NOTSPACE:remote_heloname}\) )?\[%{IP:remote_host}\]) +EXIM_INTERFACE (I=\[%{IP:exim_interface}\](:%{NUMBER:exim_interface_port})) +EXIM_PROTOCOL (P=%{NOTSPACE:protocol}) +EXIM_MSG_SIZE (S=%{NUMBER:exim_msg_size}) +EXIM_HEADER_ID (id=%{NOTSPACE:exim_header_id}) +EXIM_SUBJECT (T=%{QS:exim_subject}) + diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/firewalls b/plugins/ingest/src/main/packaging/config/grok/patterns/firewalls new file mode 100644 index 00000000000..03c3e5aff0c --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/firewalls @@ -0,0 +1,86 @@ +# NetScreen firewall logs +NETSCREENSESSIONLOG %{SYSLOGTIMESTAMP:date} %{IPORHOST:device} %{IPORHOST}: NetScreen device_id=%{WORD:device_id}%{DATA}: start_time=%{QUOTEDSTRING:start_time} duration=%{INT:duration} policy_id=%{INT:policy_id} service=%{DATA:service} proto=%{INT:proto} src zone=%{WORD:src_zone} dst zone=%{WORD:dst_zone} action=%{WORD:action} sent=%{INT:sent} rcvd=%{INT:rcvd} src=%{IPORHOST:src_ip} dst=%{IPORHOST:dst_ip} src_port=%{INT:src_port} dst_port=%{INT:dst_port} src-xlated ip=%{IPORHOST:src_xlated_ip} port=%{INT:src_xlated_port} dst-xlated ip=%{IPORHOST:dst_xlated_ip} port=%{INT:dst_xlated_port} session_id=%{INT:session_id} reason=%{GREEDYDATA:reason} + +#== Cisco ASA == +CISCO_TAGGED_SYSLOG ^<%{POSINT:syslog_pri}>%{CISCOTIMESTAMP:timestamp}( %{SYSLOGHOST:sysloghost})? ?: %%{CISCOTAG:ciscotag}: +CISCOTIMESTAMP %{MONTH} +%{MONTHDAY}(?: %{YEAR})? %{TIME} +CISCOTAG [A-Z0-9]+-%{INT}-(?:[A-Z0-9_]+) +# Common Particles +CISCO_ACTION Built|Teardown|Deny|Denied|denied|requested|permitted|denied by ACL|discarded|est-allowed|Dropping|created|deleted +CISCO_REASON Duplicate TCP SYN|Failed to locate egress interface|Invalid transport field|No matching connection|DNS Response|DNS Query|(?:%{WORD}\s*)* +CISCO_DIRECTION Inbound|inbound|Outbound|outbound +CISCO_INTERVAL first hit|%{INT}-second interval +CISCO_XLATE_TYPE static|dynamic +# ASA-1-104001 +CISCOFW104001 \((?:Primary|Secondary)\) Switching to ACTIVE - %{GREEDYDATA:switch_reason} +# ASA-1-104002 +CISCOFW104002 \((?:Primary|Secondary)\) Switching to STANDBY - %{GREEDYDATA:switch_reason} +# ASA-1-104003 +CISCOFW104003 \((?:Primary|Secondary)\) Switching to FAILED\. +# ASA-1-104004 +CISCOFW104004 \((?:Primary|Secondary)\) Switching to OK\. +# ASA-1-105003 +CISCOFW105003 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{GREEDYDATA:interface_name} waiting +# ASA-1-105004 +CISCOFW105004 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{GREEDYDATA:interface_name} normal +# ASA-1-105005 +CISCOFW105005 \((?:Primary|Secondary)\) Lost Failover communications with mate on [Ii]nterface %{GREEDYDATA:interface_name} +# ASA-1-105008 +CISCOFW105008 \((?:Primary|Secondary)\) Testing [Ii]nterface %{GREEDYDATA:interface_name} +# ASA-1-105009 +CISCOFW105009 \((?:Primary|Secondary)\) Testing on [Ii]nterface %{GREEDYDATA:interface_name} (?:Passed|Failed) +# ASA-2-106001 +CISCOFW106001 %{CISCO_DIRECTION:direction} %{WORD:protocol} connection %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{GREEDYDATA:tcp_flags} on interface %{GREEDYDATA:interface} +# ASA-2-106006, ASA-2-106007, ASA-2-106010 +CISCOFW106006_106007_106010 %{CISCO_ACTION:action} %{CISCO_DIRECTION:direction} %{WORD:protocol} (?:from|src) %{IP:src_ip}/%{INT:src_port}(\(%{DATA:src_fwuser}\))? (?:to|dst) %{IP:dst_ip}/%{INT:dst_port}(\(%{DATA:dst_fwuser}\))? (?:on interface %{DATA:interface}|due to %{CISCO_REASON:reason}) +# ASA-3-106014 +CISCOFW106014 %{CISCO_ACTION:action} %{CISCO_DIRECTION:direction} %{WORD:protocol} src %{DATA:src_interface}:%{IP:src_ip}(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{IP:dst_ip}(\(%{DATA:dst_fwuser}\))? \(type %{INT:icmp_type}, code %{INT:icmp_code}\) +# ASA-6-106015 +CISCOFW106015 %{CISCO_ACTION:action} %{WORD:protocol} \(%{DATA:policy_id}\) from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{DATA:tcp_flags} on interface %{GREEDYDATA:interface} +# ASA-1-106021 +CISCOFW106021 %{CISCO_ACTION:action} %{WORD:protocol} reverse path check from %{IP:src_ip} to %{IP:dst_ip} on interface %{GREEDYDATA:interface} +# ASA-4-106023 +CISCOFW106023 %{CISCO_ACTION:action}( protocol)? %{WORD:protocol} src %{DATA:src_interface}:%{DATA:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{DATA:dst_ip}(/%{INT:dst_port})?(\(%{DATA:dst_fwuser}\))?( \(type %{INT:icmp_type}, code %{INT:icmp_code}\))? by access-group "?%{DATA:policy_id}"? \[%{DATA:hashcode1}, %{DATA:hashcode2}\] +# ASA-4-106100, ASA-4-106102, ASA-4-106103 +CISCOFW106100_2_3 access-list %{NOTSPACE:policy_id} %{CISCO_ACTION:action} %{WORD:protocol} for user '%{DATA:src_fwuser}' %{DATA:src_interface}/%{IP:src_ip}\(%{INT:src_port}\) -> %{DATA:dst_interface}/%{IP:dst_ip}\(%{INT:dst_port}\) hit-cnt %{INT:hit_count} %{CISCO_INTERVAL:interval} \[%{DATA:hashcode1}, %{DATA:hashcode2}\] +# ASA-5-106100 +CISCOFW106100 access-list %{NOTSPACE:policy_id} %{CISCO_ACTION:action} %{WORD:protocol} %{DATA:src_interface}/%{IP:src_ip}\(%{INT:src_port}\)(\(%{DATA:src_fwuser}\))? -> %{DATA:dst_interface}/%{IP:dst_ip}\(%{INT:dst_port}\)(\(%{DATA:src_fwuser}\))? hit-cnt %{INT:hit_count} %{CISCO_INTERVAL:interval} \[%{DATA:hashcode1}, %{DATA:hashcode2}\] +# ASA-6-110002 +CISCOFW110002 %{CISCO_REASON:reason} for %{WORD:protocol} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} +# ASA-6-302010 +CISCOFW302010 %{INT:connection_count} in use, %{INT:connection_count_max} most used +# ASA-6-302013, ASA-6-302014, ASA-6-302015, ASA-6-302016 +CISCOFW302013_302014_302015_302016 %{CISCO_ACTION:action}(?: %{CISCO_DIRECTION:direction})? %{WORD:protocol} connection %{INT:connection_id} for %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port}( \(%{IP:src_mapped_ip}/%{INT:src_mapped_port}\))?(\(%{DATA:src_fwuser}\))? to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}( \(%{IP:dst_mapped_ip}/%{INT:dst_mapped_port}\))?(\(%{DATA:dst_fwuser}\))?( duration %{TIME:duration} bytes %{INT:bytes})?(?: %{CISCO_REASON:reason})?( \(%{DATA:user}\))? +# ASA-6-302020, ASA-6-302021 +CISCOFW302020_302021 %{CISCO_ACTION:action}(?: %{CISCO_DIRECTION:direction})? %{WORD:protocol} connection for faddr %{IP:dst_ip}/%{INT:icmp_seq_num}(?:\(%{DATA:fwuser}\))? gaddr %{IP:src_xlated_ip}/%{INT:icmp_code_xlated} laddr %{IP:src_ip}/%{INT:icmp_code}( \(%{DATA:user}\))? +# ASA-6-305011 +CISCOFW305011 %{CISCO_ACTION:action} %{CISCO_XLATE_TYPE:xlate_type} %{WORD:protocol} translation from %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? to %{DATA:src_xlated_interface}:%{IP:src_xlated_ip}/%{DATA:src_xlated_port} +# ASA-3-313001, ASA-3-313004, ASA-3-313008 +CISCOFW313001_313004_313008 %{CISCO_ACTION:action} %{WORD:protocol} type=%{INT:icmp_type}, code=%{INT:icmp_code} from %{IP:src_ip} on interface %{DATA:interface}( to %{IP:dst_ip})? +# ASA-4-313005 +CISCOFW313005 %{CISCO_REASON:reason} for %{WORD:protocol} error message: %{WORD:err_protocol} src %{DATA:err_src_interface}:%{IP:err_src_ip}(\(%{DATA:err_src_fwuser}\))? dst %{DATA:err_dst_interface}:%{IP:err_dst_ip}(\(%{DATA:err_dst_fwuser}\))? \(type %{INT:err_icmp_type}, code %{INT:err_icmp_code}\) on %{DATA:interface} interface\. Original IP payload: %{WORD:protocol} src %{IP:orig_src_ip}/%{INT:orig_src_port}(\(%{DATA:orig_src_fwuser}\))? dst %{IP:orig_dst_ip}/%{INT:orig_dst_port}(\(%{DATA:orig_dst_fwuser}\))? +# ASA-5-321001 +CISCOFW321001 Resource '%{WORD:resource_name}' limit of %{POSINT:resource_limit} reached for system +# ASA-4-402117 +CISCOFW402117 %{WORD:protocol}: Received a non-IPSec packet \(protocol= %{WORD:orig_protocol}\) from %{IP:src_ip} to %{IP:dst_ip} +# ASA-4-402119 +CISCOFW402119 %{WORD:protocol}: Received an %{WORD:orig_protocol} packet \(SPI= %{DATA:spi}, sequence number= %{DATA:seq_num}\) from %{IP:src_ip} \(user= %{DATA:user}\) to %{IP:dst_ip} that failed anti-replay checking +# ASA-4-419001 +CISCOFW419001 %{CISCO_ACTION:action} %{WORD:protocol} packet from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}, reason: %{GREEDYDATA:reason} +# ASA-4-419002 +CISCOFW419002 %{CISCO_REASON:reason} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port} with different initial sequence number +# ASA-4-500004 +CISCOFW500004 %{CISCO_REASON:reason} for protocol=%{WORD:protocol}, from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} +# ASA-6-602303, ASA-6-602304 +CISCOFW602303_602304 %{WORD:protocol}: An %{CISCO_DIRECTION:direction} %{GREEDYDATA:tunnel_type} SA \(SPI= %{DATA:spi}\) between %{IP:src_ip} and %{IP:dst_ip} \(user= %{DATA:user}\) has been %{CISCO_ACTION:action} +# ASA-7-710001, ASA-7-710002, ASA-7-710003, ASA-7-710005, ASA-7-710006 +CISCOFW710001_710002_710003_710005_710006 %{WORD:protocol} (?:request|access) %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port} +# ASA-6-713172 +CISCOFW713172 Group = %{GREEDYDATA:group}, IP = %{IP:src_ip}, Automatic NAT Detection Status:\s+Remote end\s*%{DATA:is_remote_natted}\s*behind a NAT device\s+This\s+end\s*%{DATA:is_local_natted}\s*behind a NAT device +# ASA-4-733100 +CISCOFW733100 \[\s*%{DATA:drop_type}\s*\] drop %{DATA:drop_rate_id} exceeded. Current burst rate is %{INT:drop_rate_current_burst} per second, max configured rate is %{INT:drop_rate_max_burst}; Current average rate is %{INT:drop_rate_current_avg} per second, max configured rate is %{INT:drop_rate_max_avg}; Cumulative total count is %{INT:drop_total_count} +#== End Cisco ASA == + +# Shorewall firewall logs +SHOREWALL (%{SYSLOGTIMESTAMP:timestamp}) (%{WORD:nf_host}) kernel:.*Shorewall:(%{WORD:nf_action1})?:(%{WORD:nf_action2})?.*IN=(%{USERNAME:nf_in_interface})?.*(OUT= *MAC=(%{COMMONMAC:nf_dst_mac}):(%{COMMONMAC:nf_src_mac})?|OUT=%{USERNAME:nf_out_interface}).*SRC=(%{IPV4:nf_src_ip}).*DST=(%{IPV4:nf_dst_ip}).*LEN=(%{WORD:nf_len}).?*TOS=(%{WORD:nf_tos}).?*PREC=(%{WORD:nf_prec}).?*TTL=(%{INT:nf_ttl}).?*ID=(%{INT:nf_id}).?*PROTO=(%{WORD:nf_protocol}).?*SPT=(%{INT:nf_src_port}?.*DPT=%{INT:nf_dst_port}?.*) +#== End Shorewall diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/grok-patterns b/plugins/ingest/src/main/packaging/config/grok/patterns/grok-patterns new file mode 100644 index 00000000000..cb4c3fffc6a --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/grok-patterns @@ -0,0 +1,102 @@ +USERNAME [a-zA-Z0-9._-]+ +USER %{USERNAME} +EMAILLOCALPART [a-zA-Z][a-zA-Z0-9_.+-=:]+ +EMAILADDRESS %{EMAILLOCALPART}@%{HOSTNAME} +HTTPDUSER %{EMAILADDRESS}|%{USER} +INT (?:[+-]?(?:[0-9]+)) +BASE10NUM (?[+-]?(?:(?:[0-9]+(?:\.[0-9]+)?)|(?:\.[0-9]+))) +NUMBER (?:%{BASE10NUM}) +BASE16NUM (?(?"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``)) +UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12} + +# Networking +MAC (?:%{CISCOMAC}|%{WINDOWSMAC}|%{COMMONMAC}) +CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4}) +WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2}) +COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2}) +IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)? +IPV4 (?[A-Za-z]+:|\\)(?:\\[^\\?*]*)+ +URIPROTO [A-Za-z]+(\+[A-Za-z+]+)? +URIHOST %{IPORHOST}(?::%{POSINT:port})? +# uripath comes loosely from RFC1738, but mostly from what Firefox +# doesn't turn into %XX +URIPATH (?:/[A-Za-z0-9$.+!*'(){},~:;=@#%_\-]*)+ +#URIPARAM \?(?:[A-Za-z0-9]+(?:=(?:[^&]*))?(?:&(?:[A-Za-z0-9]+(?:=(?:[^&]*))?)?)*)? +URIPARAM \?[A-Za-z0-9$.+!*'|(){},~@#%&/=:;_?\-\[\]<>]* +URIPATHPARAM %{URIPATH}(?:%{URIPARAM})? +URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATHPARAM})? + +# Months: January, Feb, 3, 03, 12, December +MONTH \b(?:Jan(?:uary|uar)?|Feb(?:ruary|ruar)?|M(?:a|ä)?r(?:ch|z)?|Apr(?:il)?|Ma(?:y|i)?|Jun(?:e|i)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|O(?:c|k)?t(?:ober)?|Nov(?:ember)?|De(?:c|z)(?:ember)?)\b +MONTHNUM (?:0?[1-9]|1[0-2]) +MONTHNUM2 (?:0[1-9]|1[0-2]) +MONTHDAY (?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) + +# Days: Monday, Tue, Thu, etc... +DAY (?:Mon(?:day)?|Tue(?:sday)?|Wed(?:nesday)?|Thu(?:rsday)?|Fri(?:day)?|Sat(?:urday)?|Sun(?:day)?) + +# Years? +YEAR (?>\d\d){1,2} +HOUR (?:2[0123]|[01]?[0-9]) +MINUTE (?:[0-5][0-9]) +# '60' is a leap second in most time standards and thus is valid. +SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?) +TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9]) +# datestamp is YYYY/MM/DD-HH:MM:SS.UUUU (or something like it) +DATE_US %{MONTHNUM}[/-]%{MONTHDAY}[/-]%{YEAR} +DATE_EU %{MONTHDAY}[./-]%{MONTHNUM}[./-]%{YEAR} +ISO8601_TIMEZONE (?:Z|[+-]%{HOUR}(?::?%{MINUTE})) +ISO8601_SECOND (?:%{SECOND}|60) +ISO8601_HOUR (?:2[0123]|[01][0-9]) +TIMESTAMP_ISO8601 %{YEAR}-%{MONTHNUM}-%{MONTHDAY}[T ]%{ISO8601_HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}? +DATE %{DATE_US}|%{DATE_EU} +DATESTAMP %{DATE}[- ]%{TIME} +TZ (?:[PMCE][SD]T|UTC) +DATESTAMP_RFC822 %{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{TIME} %{TZ} +DATESTAMP_RFC2822 %{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} %{ISO8601_TIMEZONE} +DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{TZ} %{YEAR} +DATESTAMP_EVENTLOG %{YEAR}%{MONTHNUM2}%{MONTHDAY}%{HOUR}%{MINUTE}%{SECOND} +HTTPDERROR_DATE %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR} + +# Syslog Dates: Month Day HH:MM:SS +SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME} +PROG [\x21-\x5a\x5c\x5e-\x7e]+ +SYSLOGPROG %{PROG:program}(?:\[%{POSINT:pid}\])? +SYSLOGHOST %{IPORHOST} +SYSLOGFACILITY <%{NONNEGINT:facility}.%{NONNEGINT:priority}> +HTTPDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} %{INT} + +# Shortcuts +QS %{QUOTEDSTRING} + +# Log formats +SYSLOGBASE %{SYSLOGTIMESTAMP:timestamp} (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource} %{SYSLOGPROG}: +COMMONAPACHELOG %{IPORHOST:clientip} %{HTTPDUSER:ident} %{USER:auth} \[%{HTTPDATE:timestamp}\] "(?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})" %{NUMBER:response} (?:%{NUMBER:bytes}|-) +COMBINEDAPACHELOG %{COMMONAPACHELOG} %{QS:referrer} %{QS:agent} +HTTPD20_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[%{LOGLEVEL:loglevel}\] (?:\[client %{IPORHOST:clientip}\] ){0,1}%{GREEDYDATA:errormsg} +HTTPD24_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[%{WORD:module}:%{LOGLEVEL:loglevel}\] \[pid %{POSINT:pid}:tid %{NUMBER:tid}\]( \(%{POSINT:proxy_errorcode}\)%{DATA:proxy_errormessage}:)?( \[client %{IPORHOST:client}:%{POSINT:clientport}\])? %{DATA:errorcode}: %{GREEDYDATA:message} +HTTPD_ERRORLOG %{HTTPD20_ERRORLOG}|%{HTTPD24_ERRORLOG} + + +# Log Levels +LOGLEVEL ([Aa]lert|ALERT|[Tt]race|TRACE|[Dd]ebug|DEBUG|[Nn]otice|NOTICE|[Ii]nfo|INFO|[Ww]arn?(?:ing)?|WARN?(?:ING)?|[Ee]rr?(?:or)?|ERR?(?:OR)?|[Cc]rit?(?:ical)?|CRIT?(?:ICAL)?|[Ff]atal|FATAL|[Ss]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?) diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/haproxy b/plugins/ingest/src/main/packaging/config/grok/patterns/haproxy new file mode 100644 index 00000000000..ddabd193f0c --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/haproxy @@ -0,0 +1,39 @@ +## These patterns were tested w/ haproxy-1.4.15 + +## Documentation of the haproxy log formats can be found at the following links: +## http://code.google.com/p/haproxy-docs/wiki/HTTPLogFormat +## http://code.google.com/p/haproxy-docs/wiki/TCPLogFormat + +HAPROXYTIME (?!<[0-9])%{HOUR:haproxy_hour}:%{MINUTE:haproxy_minute}(?::%{SECOND:haproxy_second})(?![0-9]) +HAPROXYDATE %{MONTHDAY:haproxy_monthday}/%{MONTH:haproxy_month}/%{YEAR:haproxy_year}:%{HAPROXYTIME:haproxy_time}.%{INT:haproxy_milliseconds} + +# Override these default patterns to parse out what is captured in your haproxy.cfg +HAPROXYCAPTUREDREQUESTHEADERS %{DATA:captured_request_headers} +HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:captured_response_headers} + +# Example: +# These haproxy config lines will add data to the logs that are captured +# by the patterns below. Place them in your custom patterns directory to +# override the defaults. +# +# capture request header Host len 40 +# capture request header X-Forwarded-For len 50 +# capture request header Accept-Language len 50 +# capture request header Referer len 200 +# capture request header User-Agent len 200 +# +# capture response header Content-Type len 30 +# capture response header Content-Encoding len 10 +# capture response header Cache-Control len 200 +# capture response header Last-Modified len 200 +# +# HAPROXYCAPTUREDREQUESTHEADERS %{DATA:request_header_host}\|%{DATA:request_header_x_forwarded_for}\|%{DATA:request_header_accept_language}\|%{DATA:request_header_referer}\|%{DATA:request_header_user_agent} +# HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:response_header_content_type}\|%{DATA:response_header_content_encoding}\|%{DATA:response_header_cache_control}\|%{DATA:response_header_last_modified} + +# parse a haproxy 'httplog' line +HAPROXYHTTPBASE %{IP:client_ip}:%{INT:client_port} \[%{HAPROXYDATE:accept_date}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_request}/%{INT:time_queue}/%{INT:time_backend_connect}/%{INT:time_backend_response}/%{NOTSPACE:time_duration} %{INT:http_status_code} %{NOTSPACE:bytes_read} %{DATA:captured_request_cookie} %{DATA:captured_response_cookie} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue} (\{%{HAPROXYCAPTUREDREQUESTHEADERS}\})?( )?(\{%{HAPROXYCAPTUREDRESPONSEHEADERS}\})?( )?"(|(%{WORD:http_verb} (%{URIPROTO:http_proto}://)?(?:%{USER:http_user}(?::[^@]*)?@)?(?:%{URIHOST:http_host})?(?:%{URIPATHPARAM:http_request})?( HTTP/%{NUMBER:http_version})?))?" + +HAPROXYHTTP (?:%{SYSLOGTIMESTAMP:syslog_timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) %{IPORHOST:syslog_server} %{SYSLOGPROG}: %{HAPROXYHTTPBASE} + +# parse a haproxy 'tcplog' line +HAPROXYTCP (?:%{SYSLOGTIMESTAMP:syslog_timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) %{IPORHOST:syslog_server} %{SYSLOGPROG}: %{IP:client_ip}:%{INT:client_port} \[%{HAPROXYDATE:accept_date}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_queue}/%{INT:time_backend_connect}/%{NOTSPACE:time_duration} %{NOTSPACE:bytes_read} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue} diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/java b/plugins/ingest/src/main/packaging/config/grok/patterns/java new file mode 100644 index 00000000000..e968006ad2d --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/java @@ -0,0 +1,20 @@ +JAVACLASS (?:[a-zA-Z$_][a-zA-Z$_0-9]*\.)*[a-zA-Z$_][a-zA-Z$_0-9]* +#Space is an allowed character to match special cases like 'Native Method' or 'Unknown Source' +JAVAFILE (?:[A-Za-z0-9_. -]+) +#Allow special method +JAVAMETHOD (?:()|[a-zA-Z$_][a-zA-Z$_0-9]*) +#Line number is optional in special cases 'Native method' or 'Unknown source' +JAVASTACKTRACEPART %{SPACE}at %{JAVACLASS:class}\.%{JAVAMETHOD:method}\(%{JAVAFILE:file}(?::%{NUMBER:line})?\) +# Java Logs +JAVATHREAD (?:[A-Z]{2}-Processor[\d]+) +JAVACLASS (?:[a-zA-Z0-9-]+\.)+[A-Za-z0-9$]+ +JAVAFILE (?:[A-Za-z0-9_.-]+) +JAVASTACKTRACEPART at %{JAVACLASS:class}\.%{WORD:method}\(%{JAVAFILE:file}:%{NUMBER:line}\) +JAVALOGMESSAGE (.*) +# MMM dd, yyyy HH:mm:ss eg: Jan 9, 2014 7:13:13 AM +CATALINA_DATESTAMP %{MONTH} %{MONTHDAY}, 20%{YEAR} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) (?:AM|PM) +# yyyy-MM-dd HH:mm:ss,SSS ZZZ eg: 2014-01-09 17:32:25,527 -0800 +TOMCAT_DATESTAMP 20%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) %{ISO8601_TIMEZONE} +CATALINALOG %{CATALINA_DATESTAMP:timestamp} %{JAVACLASS:class} %{JAVALOGMESSAGE:logmessage} +# 2014-01-09 20:03:28,269 -0800 | ERROR | com.example.service.ExampleService - something compeletely unexpected happened... +TOMCATLOG %{TOMCAT_DATESTAMP:timestamp} \| %{LOGLEVEL:level} \| %{JAVACLASS:class} - %{JAVALOGMESSAGE:logmessage} diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/junos b/plugins/ingest/src/main/packaging/config/grok/patterns/junos new file mode 100644 index 00000000000..4eea59d08cc --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/junos @@ -0,0 +1,9 @@ +# JUNOS 11.4 RT_FLOW patterns +RT_FLOW_EVENT (RT_FLOW_SESSION_CREATE|RT_FLOW_SESSION_CLOSE|RT_FLOW_SESSION_DENY) + +RT_FLOW1 %{RT_FLOW_EVENT:event}: %{GREEDYDATA:close-reason}: %{IP:src-ip}/%{INT:src-port}->%{IP:dst-ip}/%{INT:dst-port} %{DATA:service} %{IP:nat-src-ip}/%{INT:nat-src-port}->%{IP:nat-dst-ip}/%{INT:nat-dst-port} %{DATA:src-nat-rule-name} %{DATA:dst-nat-rule-name} %{INT:protocol-id} %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} %{INT:session-id} \d+\(%{DATA:sent}\) \d+\(%{DATA:received}\) %{INT:elapsed-time} .* + +RT_FLOW2 %{RT_FLOW_EVENT:event}: session created %{IP:src-ip}/%{INT:src-port}->%{IP:dst-ip}/%{INT:dst-port} %{DATA:service} %{IP:nat-src-ip}/%{INT:nat-src-port}->%{IP:nat-dst-ip}/%{INT:nat-dst-port} %{DATA:src-nat-rule-name} %{DATA:dst-nat-rule-name} %{INT:protocol-id} %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} %{INT:session-id} .* + +RT_FLOW3 %{RT_FLOW_EVENT:event}: session denied %{IP:src-ip}/%{INT:src-port}->%{IP:dst-ip}/%{INT:dst-port} %{DATA:service} %{INT:protocol-id}\(\d\) %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} .* + diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/linux-syslog b/plugins/ingest/src/main/packaging/config/grok/patterns/linux-syslog new file mode 100644 index 00000000000..dcffb41ba8f --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/linux-syslog @@ -0,0 +1,16 @@ +SYSLOG5424PRINTASCII [!-~]+ + +SYSLOGBASE2 (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource}+(?: %{SYSLOGPROG}:|) +SYSLOGPAMSESSION %{SYSLOGBASE} (?=%{GREEDYDATA:message})%{WORD:pam_module}\(%{DATA:pam_caller}\): session %{WORD:pam_session_state} for user %{USERNAME:username}(?: by %{GREEDYDATA:pam_by})? + +CRON_ACTION [A-Z ]+ +CRONLOG %{SYSLOGBASE} \(%{USER:user}\) %{CRON_ACTION:action} \(%{DATA:message}\) + +SYSLOGLINE %{SYSLOGBASE2} %{GREEDYDATA:message} + +# IETF 5424 syslog(8) format (see http://www.rfc-editor.org/info/rfc5424) +SYSLOG5424PRI <%{NONNEGINT:syslog5424_pri}> +SYSLOG5424SD \[%{DATA}\]+ +SYSLOG5424BASE %{SYSLOG5424PRI}%{NONNEGINT:syslog5424_ver} +(?:%{TIMESTAMP_ISO8601:syslog5424_ts}|-) +(?:%{HOSTNAME:syslog5424_host}|-) +(-|%{SYSLOG5424PRINTASCII:syslog5424_app}) +(-|%{SYSLOG5424PRINTASCII:syslog5424_proc}) +(-|%{SYSLOG5424PRINTASCII:syslog5424_msgid}) +(?:%{SYSLOG5424SD:syslog5424_sd}|-|) + +SYSLOG5424LINE %{SYSLOG5424BASE} +%{GREEDYDATA:syslog5424_msg} diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/mcollective-patterns b/plugins/ingest/src/main/packaging/config/grok/patterns/mcollective-patterns new file mode 100644 index 00000000000..bb2f7f9bc82 --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/mcollective-patterns @@ -0,0 +1,4 @@ +# Remember, these can be multi-line events. +MCOLLECTIVE ., \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:pid}\]%{SPACE}%{LOGLEVEL:event_level} + +MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}: diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/mongodb b/plugins/ingest/src/main/packaging/config/grok/patterns/mongodb new file mode 100644 index 00000000000..78a43007c37 --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/mongodb @@ -0,0 +1,7 @@ +MONGO_LOG %{SYSLOGTIMESTAMP:timestamp} \[%{WORD:component}\] %{GREEDYDATA:message} +MONGO_QUERY \{ (?<={ ).*(?= } ntoreturn:) \} +MONGO_SLOWQUERY %{WORD} %{MONGO_WORDDASH:database}\.%{MONGO_WORDDASH:collection} %{WORD}: %{MONGO_QUERY:query} %{WORD}:%{NONNEGINT:ntoreturn} %{WORD}:%{NONNEGINT:ntoskip} %{WORD}:%{NONNEGINT:nscanned}.*nreturned:%{NONNEGINT:nreturned}..+ (?[0-9]+)ms +MONGO_WORDDASH \b[\w-]+\b +MONGO3_SEVERITY \w +MONGO3_COMPONENT %{WORD}|- +MONGO3_LOG %{TIMESTAMP_ISO8601:timestamp} %{MONGO3_SEVERITY:severity} %{MONGO3_COMPONENT:component}%{SPACE}(?:\[%{DATA:context}\])? %{GREEDYDATA:message} diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/nagios b/plugins/ingest/src/main/packaging/config/grok/patterns/nagios new file mode 100644 index 00000000000..f4a98bf533e --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/nagios @@ -0,0 +1,124 @@ +################################################################################## +################################################################################## +# Chop Nagios log files to smithereens! +# +# A set of GROK filters to process logfiles generated by Nagios. +# While it does not, this set intends to cover all possible Nagios logs. +# +# Some more work needs to be done to cover all External Commands: +# http://old.nagios.org/developerinfo/externalcommands/commandlist.php +# +# If you need some support on these rules please contact: +# Jelle Smet http://smetj.net +# +################################################################################# +################################################################################# + +NAGIOSTIME \[%{NUMBER:nagios_epoch}\] + +############################################### +######## Begin nagios log types +############################################### +NAGIOS_TYPE_CURRENT_SERVICE_STATE CURRENT SERVICE STATE +NAGIOS_TYPE_CURRENT_HOST_STATE CURRENT HOST STATE + +NAGIOS_TYPE_SERVICE_NOTIFICATION SERVICE NOTIFICATION +NAGIOS_TYPE_HOST_NOTIFICATION HOST NOTIFICATION + +NAGIOS_TYPE_SERVICE_ALERT SERVICE ALERT +NAGIOS_TYPE_HOST_ALERT HOST ALERT + +NAGIOS_TYPE_SERVICE_FLAPPING_ALERT SERVICE FLAPPING ALERT +NAGIOS_TYPE_HOST_FLAPPING_ALERT HOST FLAPPING ALERT + +NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT SERVICE DOWNTIME ALERT +NAGIOS_TYPE_HOST_DOWNTIME_ALERT HOST DOWNTIME ALERT + +NAGIOS_TYPE_PASSIVE_SERVICE_CHECK PASSIVE SERVICE CHECK +NAGIOS_TYPE_PASSIVE_HOST_CHECK PASSIVE HOST CHECK + +NAGIOS_TYPE_SERVICE_EVENT_HANDLER SERVICE EVENT HANDLER +NAGIOS_TYPE_HOST_EVENT_HANDLER HOST EVENT HANDLER + +NAGIOS_TYPE_EXTERNAL_COMMAND EXTERNAL COMMAND +NAGIOS_TYPE_TIMEPERIOD_TRANSITION TIMEPERIOD TRANSITION +############################################### +######## End nagios log types +############################################### + +############################################### +######## Begin external check types +############################################### +NAGIOS_EC_DISABLE_SVC_CHECK DISABLE_SVC_CHECK +NAGIOS_EC_ENABLE_SVC_CHECK ENABLE_SVC_CHECK +NAGIOS_EC_DISABLE_HOST_CHECK DISABLE_HOST_CHECK +NAGIOS_EC_ENABLE_HOST_CHECK ENABLE_HOST_CHECK +NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT PROCESS_SERVICE_CHECK_RESULT +NAGIOS_EC_PROCESS_HOST_CHECK_RESULT PROCESS_HOST_CHECK_RESULT +NAGIOS_EC_SCHEDULE_SERVICE_DOWNTIME SCHEDULE_SERVICE_DOWNTIME +NAGIOS_EC_SCHEDULE_HOST_DOWNTIME SCHEDULE_HOST_DOWNTIME +NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS DISABLE_HOST_SVC_NOTIFICATIONS +NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS ENABLE_HOST_SVC_NOTIFICATIONS +NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS DISABLE_HOST_NOTIFICATIONS +NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS ENABLE_HOST_NOTIFICATIONS +NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS DISABLE_SVC_NOTIFICATIONS +NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS ENABLE_SVC_NOTIFICATIONS +############################################### +######## End external check types +############################################### +NAGIOS_WARNING Warning:%{SPACE}%{GREEDYDATA:nagios_message} + +NAGIOS_CURRENT_SERVICE_STATE %{NAGIOS_TYPE_CURRENT_SERVICE_STATE:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statetype};%{DATA:nagios_statecode};%{GREEDYDATA:nagios_message} +NAGIOS_CURRENT_HOST_STATE %{NAGIOS_TYPE_CURRENT_HOST_STATE:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statetype};%{DATA:nagios_statecode};%{GREEDYDATA:nagios_message} + +NAGIOS_SERVICE_NOTIFICATION %{NAGIOS_TYPE_SERVICE_NOTIFICATION:nagios_type}: %{DATA:nagios_notifyname};%{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_contact};%{GREEDYDATA:nagios_message} +NAGIOS_HOST_NOTIFICATION %{NAGIOS_TYPE_HOST_NOTIFICATION:nagios_type}: %{DATA:nagios_notifyname};%{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_contact};%{GREEDYDATA:nagios_message} + +NAGIOS_SERVICE_ALERT %{NAGIOS_TYPE_SERVICE_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{NUMBER:nagios_attempt};%{GREEDYDATA:nagios_message} +NAGIOS_HOST_ALERT %{NAGIOS_TYPE_HOST_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{NUMBER:nagios_attempt};%{GREEDYDATA:nagios_message} + +NAGIOS_SERVICE_FLAPPING_ALERT %{NAGIOS_TYPE_SERVICE_FLAPPING_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_message} +NAGIOS_HOST_FLAPPING_ALERT %{NAGIOS_TYPE_HOST_FLAPPING_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_message} + +NAGIOS_SERVICE_DOWNTIME_ALERT %{NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment} +NAGIOS_HOST_DOWNTIME_ALERT %{NAGIOS_TYPE_HOST_DOWNTIME_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment} + +NAGIOS_PASSIVE_SERVICE_CHECK %{NAGIOS_TYPE_PASSIVE_SERVICE_CHECK:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment} +NAGIOS_PASSIVE_HOST_CHECK %{NAGIOS_TYPE_PASSIVE_HOST_CHECK:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment} + +NAGIOS_SERVICE_EVENT_HANDLER %{NAGIOS_TYPE_SERVICE_EVENT_HANDLER:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{DATA:nagios_event_handler_name} +NAGIOS_HOST_EVENT_HANDLER %{NAGIOS_TYPE_HOST_EVENT_HANDLER:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{DATA:nagios_event_handler_name} + +NAGIOS_TIMEPERIOD_TRANSITION %{NAGIOS_TYPE_TIMEPERIOD_TRANSITION:nagios_type}: %{DATA:nagios_service};%{DATA:nagios_unknown1};%{DATA:nagios_unknown2} + +#################### +#### External checks +#################### + +#Disable host & service check +NAGIOS_EC_LINE_DISABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_SVC_CHECK:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service} +NAGIOS_EC_LINE_DISABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_HOST_CHECK:nagios_command};%{DATA:nagios_hostname} + +#Enable host & service check +NAGIOS_EC_LINE_ENABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_SVC_CHECK:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service} +NAGIOS_EC_LINE_ENABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_HOST_CHECK:nagios_command};%{DATA:nagios_hostname} + +#Process host & service check +NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_check_result} +NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_PROCESS_HOST_CHECK_RESULT:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_check_result} + +#Disable host & service notifications +NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname} +NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname} +NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS:nagios_command};%{DATA:nagios_hostname};%{GREEDYDATA:nagios_service} + +#Enable host & service notifications +NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname} +NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname} +NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS:nagios_command};%{DATA:nagios_hostname};%{GREEDYDATA:nagios_service} + +#Schedule host & service downtime +NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_SCHEDULE_HOST_DOWNTIME:nagios_command};%{DATA:nagios_hostname};%{NUMBER:nagios_start_time};%{NUMBER:nagios_end_time};%{NUMBER:nagios_fixed};%{NUMBER:nagios_trigger_id};%{NUMBER:nagios_duration};%{DATA:author};%{DATA:comment} + +#End matching line +NAGIOSLOGLINE %{NAGIOSTIME} (?:%{NAGIOS_WARNING}|%{NAGIOS_CURRENT_SERVICE_STATE}|%{NAGIOS_CURRENT_HOST_STATE}|%{NAGIOS_SERVICE_NOTIFICATION}|%{NAGIOS_HOST_NOTIFICATION}|%{NAGIOS_SERVICE_ALERT}|%{NAGIOS_HOST_ALERT}|%{NAGIOS_SERVICE_FLAPPING_ALERT}|%{NAGIOS_HOST_FLAPPING_ALERT}|%{NAGIOS_SERVICE_DOWNTIME_ALERT}|%{NAGIOS_HOST_DOWNTIME_ALERT}|%{NAGIOS_PASSIVE_SERVICE_CHECK}|%{NAGIOS_PASSIVE_HOST_CHECK}|%{NAGIOS_SERVICE_EVENT_HANDLER}|%{NAGIOS_HOST_EVENT_HANDLER}|%{NAGIOS_TIMEPERIOD_TRANSITION}|%{NAGIOS_EC_LINE_DISABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_ENABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_DISABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_ENABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT}|%{NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT}|%{NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME}|%{NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS}) diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/postgresql b/plugins/ingest/src/main/packaging/config/grok/patterns/postgresql new file mode 100644 index 00000000000..c5b3e90b725 --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/postgresql @@ -0,0 +1,3 @@ +# Default postgresql pg_log format pattern +POSTGRESQL %{DATESTAMP:timestamp} %{TZ} %{DATA:user_id} %{GREEDYDATA:connection_id} %{POSINT:pid} + diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/rails b/plugins/ingest/src/main/packaging/config/grok/patterns/rails new file mode 100644 index 00000000000..68a50c734cb --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/rails @@ -0,0 +1,13 @@ +RUUID \h{32} +# rails controller with action +RCONTROLLER (?[^#]+)#(?\w+) + +# this will often be the only line: +RAILS3HEAD (?m)Started %{WORD:verb} "%{URIPATHPARAM:request}" for %{IPORHOST:clientip} at (?%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND} %{ISO8601_TIMEZONE}) +# for some a strange reason, params are stripped of {} - not sure that's a good idea. +RPROCESSING \W*Processing by %{RCONTROLLER} as (?\S+)(?:\W*Parameters: {%{DATA:params}}\W*)? +RAILS3FOOT Completed %{NUMBER:response}%{DATA} in %{NUMBER:totalms}ms %{RAILS3PROFILE}%{GREEDYDATA} +RAILS3PROFILE (?:\(Views: %{NUMBER:viewms}ms \| ActiveRecord: %{NUMBER:activerecordms}ms|\(ActiveRecord: %{NUMBER:activerecordms}ms)? + +# putting it all together +RAILS3 %{RAILS3HEAD}(?:%{RPROCESSING})?(?(?:%{DATA}\n)*)(?:%{RAILS3FOOT})? diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/redis b/plugins/ingest/src/main/packaging/config/grok/patterns/redis new file mode 100644 index 00000000000..8655c4f043e --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/redis @@ -0,0 +1,3 @@ +REDISTIMESTAMP %{MONTHDAY} %{MONTH} %{TIME} +REDISLOG \[%{POSINT:pid}\] %{REDISTIMESTAMP:timestamp} \* + diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/ruby b/plugins/ingest/src/main/packaging/config/grok/patterns/ruby new file mode 100644 index 00000000000..b1729cddcb0 --- /dev/null +++ b/plugins/ingest/src/main/packaging/config/grok/patterns/ruby @@ -0,0 +1,2 @@ +RUBY_LOGLEVEL (?:DEBUG|FATAL|ERROR|WARN|INFO) +RUBY_LOGGER [DFEWI], \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:pid}\] *%{RUBY_LOGLEVEL:loglevel} -- +%{DATA:progname}: %{GREEDYDATA:message} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index c082fe21b14..b1c982bef14 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -19,6 +19,8 @@ package org.elasticsearch.ingest; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequestBuilder; @@ -30,12 +32,15 @@ import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequestBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; - import java.util.Collection; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNull.notNullValue; public class IngestClientIT extends ESIntegTestCase { @@ -47,22 +52,21 @@ public class IngestClientIT extends ESIntegTestCase { @Override protected Collection> transportClientPlugins() { return nodePlugins(); + } - public void testBasics() throws Exception { + public void test() throws Exception { new PutPipelineRequestBuilder(client(), PutPipelineAction.INSTANCE) .setId("_id") .setSource(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") - .startObject() - .startObject("simple") - .field("path", "field2") - .field("expected_value", "abc") - .field("add_field", "field3") - .field("add_field_value", "xyz") - .endObject() - .endObject() + .startObject() + .startObject("grok") + .field("field", "field1") + .field("pattern", "%{NUMBER:val:float} %{NUMBER:status:int} <%{WORD:msg}>") + .endObject() + .endObject() .endArray() .endObject().bytes()) .get(); @@ -78,7 +82,15 @@ public class IngestClientIT extends ESIntegTestCase { }); createIndex("test"); - client().prepareIndex("test", "type", "1").setSource("field2", "abc") + XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("properties") + .startObject("status").field("type", "integer").endObject() + .startObject("val").field("type", "float").endObject() + .endObject(); + PutMappingResponse putMappingResponse = client().admin().indices() + .preparePutMapping("test").setType("type").setSource(updateMappingBuilder).get(); + assertAcked(putMappingResponse); + + client().prepareIndex("test", "type", "1").setSource("field1", "123.42 400 ") .putHeader("ingest", "_id") .get(); @@ -87,21 +99,25 @@ public class IngestClientIT extends ESIntegTestCase { public void run() { Map doc = client().prepareGet("test", "type", "1") .get().getSourceAsMap(); - assertThat(doc.get("field3"), equalTo("xyz")); + assertThat(doc.get("val"), equalTo(123.42)); + assertThat(doc.get("status"), equalTo(400)); + assertThat(doc.get("msg"), equalTo("foo")); } }); client().prepareBulk().add( - client().prepareIndex("test", "type", "2").setSource("field2", "abc") + client().prepareIndex("test", "type", "2").setSource("field1", "123.42 400 ") ).putHeader("ingest", "_id").get(); assertBusy(new Runnable() { @Override public void run() { Map doc = client().prepareGet("test", "type", "2").get().getSourceAsMap(); - assertThat(doc.get("field3"), equalTo("xyz")); + assertThat(doc.get("val"), equalTo(123.42)); + assertThat(doc.get("status"), equalTo(400)); + assertThat(doc.get("msg"), equalTo("foo")); } }); - + DeletePipelineResponse response = new DeletePipelineRequestBuilder(client(), DeletePipelineAction.INSTANCE) .setId("_id") .get(); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java new file mode 100644 index 00000000000..744ad7c6aac --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java @@ -0,0 +1,284 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.grok; + +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.io.InputStream; +import java.lang.Object; +import java.lang.String; +import java.util.*; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + + +public class GrokTests extends ESTestCase { + private Map basePatterns; + + private Map newBankFromStreams(InputStream... inputStreams) throws IOException { + Map patternBank = new HashMap<>(); + + for (InputStream is : inputStreams) { + PatternUtils.loadBankFromStream(patternBank, is); + } + + return patternBank; + } + + @Before + public void setup() throws IOException { + basePatterns = newBankFromStreams( + getClass().getResourceAsStream("/grok/patterns/grok-patterns"), + getClass().getResourceAsStream("/grok/patterns/linux-syslog") + ); + } + + public void testSimpleSyslogLine() { + String line = "Mar 16 00:01:25 evita postfix/smtpd[1713]: connect from camomile.cloud9.net[168.100.1.3]"; + Grok grok = new Grok(basePatterns, "%{SYSLOGLINE}"); + Map matches = grok.captures(line); + assertEquals("evita", matches.get("logsource")); + assertEquals("Mar 16 00:01:25", matches.get("timestamp")); + assertEquals("connect from camomile.cloud9.net[168.100.1.3]", matches.get("message")); + assertEquals("postfix/smtpd", matches.get("program")); + assertEquals("1713", matches.get("pid")); + } + + public void testSyslog5424Line() { + String line = "<191>1 2009-06-30T18:30:00+02:00 paxton.local grokdebug 4123 - [id1 foo=\\\"bar\\\"][id2 baz=\\\"something\\\"] Hello, syslog."; + Grok grok = new Grok(basePatterns, "%{SYSLOG5424LINE}"); + Map matches = grok.captures(line); + assertEquals("191", matches.get("syslog5424_pri")); + assertEquals("1", matches.get("syslog5424_ver")); + assertEquals("2009-06-30T18:30:00+02:00", matches.get("syslog5424_ts")); + assertEquals("paxton.local", matches.get("syslog5424_host")); + assertEquals("grokdebug", matches.get("syslog5424_app")); + assertEquals("4123", matches.get("syslog5424_proc")); + assertEquals(null, matches.get("syslog5424_msgid")); + assertEquals("[id1 foo=\\\"bar\\\"][id2 baz=\\\"something\\\"]", matches.get("syslog5424_sd")); + assertEquals("Hello, syslog.", matches.get("syslog5424_msg")); + } + + public void testDatePattern() { + String line = "fancy 12-12-12 12:12:12"; + Grok grok = new Grok(basePatterns, "(?%{DATE_EU} %{TIME})"); + Map matches = grok.captures(line); + assertEquals("12-12-12 12:12:12", matches.get("timestamp")); + } + + public void testNilCoercedValues() { + Grok grok = new Grok(basePatterns, "test (N/A|%{BASE10NUM:duration:float}ms)"); + Map matches = grok.captures("test 28.4ms"); + assertEquals(28.4f, matches.get("duration")); + matches = grok.captures("test N/A"); + assertEquals(null, matches.get("duration")); + } + + public void testNilWithNoCoercion() { + Grok grok = new Grok(basePatterns, "test (N/A|%{BASE10NUM:duration}ms)"); + Map matches = grok.captures("test 28.4ms"); + assertEquals("28.4", matches.get("duration")); + matches = grok.captures("test N/A"); + assertEquals(null, matches.get("duration")); + } + + public void testUnicodeSyslog() { + Grok grok = new Grok(basePatterns, "<%{POSINT:syslog_pri}>%{SPACE}%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{PROG:syslog_program}(:?)(?:\\[%{GREEDYDATA:syslog_pid}\\])?(:?) %{GREEDYDATA:syslog_message}"); + Map matches = grok.captures("<22>Jan 4 07:50:46 mailmaster postfix/policy-spf[9454]: : SPF permerror (Junk encountered in record 'v=spf1 mx a:mail.domain.no ip4:192.168.0.4 �all'): Envelope-from: email@domain.no"); + assertThat(matches.get("syslog_pri"), equalTo("22")); + assertThat(matches.get("syslog_program"), equalTo("postfix/policy-spf")); + assertThat(matches.get("tags"), nullValue()); + } + + public void testNamedFieldsWithWholeTextMatch() { + Grok grok = new Grok(basePatterns, "%{DATE_EU:stimestamp}"); + Map matches = grok.captures("11/01/01"); + assertThat(matches.get("stimestamp"), equalTo("11/01/01")); + } + + public void testWithOniguramaNamedCaptures() { + Grok grok = new Grok(basePatterns, "(?\\w+)"); + Map matches = grok.captures("hello world"); + assertThat(matches.get("foo"), equalTo("hello")); + } + + public void testISO8601() { + Grok grok = new Grok(basePatterns, "^%{TIMESTAMP_ISO8601}$"); + List timeMessages = Arrays.asList( + "2001-01-01T00:00:00", + "1974-03-02T04:09:09", + "2010-05-03T08:18:18+00:00", + "2004-07-04T12:27:27-00:00", + "2001-09-05T16:36:36+0000", + "2001-11-06T20:45:45-0000", + "2001-12-07T23:54:54Z", + "2001-01-01T00:00:00.123456", + "1974-03-02T04:09:09.123456", + "2010-05-03T08:18:18.123456+00:00", + "2004-07-04T12:27:27.123456-00:00", + "2001-09-05T16:36:36.123456+0000", + "2001-11-06T20:45:45.123456-0000", + "2001-12-07T23:54:54.123456Z", + "2001-12-07T23:54:60.123456Z" // '60' second is a leap second. + ); + for (String msg : timeMessages) { + assertThat(grok.match(msg), is(true)); + } + } + + public void testNotISO8601() { + Grok grok = new Grok(basePatterns, "^%{TIMESTAMP_ISO8601}$"); + List timeMessages = Arrays.asList( + "2001-13-01T00:00:00", // invalid month + "2001-00-01T00:00:00", // invalid month + "2001-01-00T00:00:00", // invalid day + "2001-01-32T00:00:00", // invalid day + "2001-01-aT00:00:00", // invalid day + "2001-01-1aT00:00:00", // invalid day + "2001-01-01Ta0:00:00", // invalid hour + "2001-01-01T0:00:00", // invalid hour + "2001-01-01T25:00:00", // invalid hour + "2001-01-01T01:60:00", // invalid minute + "2001-01-01T00:aa:00", // invalid minute + "2001-01-01T00:00:aa", // invalid second + "2001-01-01T00:00:-1", // invalid second + "2001-01-01T00:00:61", // invalid second + "2001-01-01T00:00:00A", // invalid timezone + "2001-01-01T00:00:00+", // invalid timezone + "2001-01-01T00:00:00+25", // invalid timezone + "2001-01-01T00:00:00+2500", // invalid timezone + "2001-01-01T00:00:00+25:00", // invalid timezone + "2001-01-01T00:00:00-25", // invalid timezone + "2001-01-01T00:00:00-2500", // invalid timezone + "2001-01-01T00:00:00-00:61" // invalid timezone + ); + for (String msg : timeMessages) { + assertThat(grok.match(msg), is(false)); + } + } + + public void testNoNamedCaptures() { + Map bank = new HashMap<>(); + + bank.put("NAME", "Tal"); + bank.put("EXCITED_NAME", "!!!%{NAME:name}!!!"); + bank.put("TEST", "hello world"); + + String text = "wowza !!!Tal!!! - Tal"; + String pattern = "%{EXCITED_NAME} - %{NAME}"; + Grok g = new Grok(bank, pattern, false); + + assertEquals("(?!!!(?Tal)!!!) - (?Tal)", g.toRegex(pattern)); + assertEquals(true, g.match(text)); + + Object actual = g.captures(text); + Map expected = new HashMap<>(); + expected.put("EXCITED_NAME_0", "!!!Tal!!!"); + expected.put("NAME_21", "Tal"); + expected.put("NAME_22", "Tal"); + assertEquals(expected, actual); + } + + public void testNumericCapturesCoercion() { + Map bank = new HashMap<>(); + bank.put("BASE10NUM", "(?[+-]?(?:(?:[0-9]+(?:\\.[0-9]+)?)|(?:\\.[0-9]+)))"); + bank.put("NUMBER", "(?:%{BASE10NUM})"); + + String pattern = "%{NUMBER:bytes:float} %{NUMBER:status} %{NUMBER}"; + Grok g = new Grok(bank, pattern); + + String text = "12009.34 200 9032"; + Map expected = new HashMap<>(); + expected.put("bytes", 12009.34f); + expected.put("status", "200"); + Map actual = g.captures(text); + + assertEquals(expected, actual); + } + + public void testApacheLog() { + String logLine = "31.184.238.164 - - [24/Jul/2014:05:35:37 +0530] \"GET /logs/access.log HTTP/1.0\" 200 69849 \"http://8rursodiol.enjin.com\" \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.12785 YaBrowser/13.12.1599.12785 Safari/537.36\" \"www.dlwindianrailways.com\""; + Grok grok = new Grok(basePatterns, "%{COMBINEDAPACHELOG}"); + Map matches = grok.captures(logLine); + + assertEquals("31.184.238.164", matches.get("clientip")); + assertEquals("-", matches.get("ident")); + assertEquals("-", matches.get("auth")); + assertEquals("24/Jul/2014:05:35:37 +0530", matches.get("timestamp")); + assertEquals("GET", matches.get("verb")); + assertEquals("/logs/access.log", matches.get("request")); + assertEquals("1.0", matches.get("httpversion")); + assertEquals("200", matches.get("response")); + assertEquals("69849", matches.get("bytes")); + assertEquals("\"http://8rursodiol.enjin.com\"", matches.get("referrer")); + assertEquals(null, matches.get("port")); + assertEquals("\"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.12785 YaBrowser/13.12.1599.12785 Safari/537.36\"", matches.get("agent")); + } + + public void testComplete() { + Map bank = new HashMap<>(); + bank.put("MONTHDAY", "(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])"); + bank.put("MONTH", "\\b(?:Jan(?:uary|uar)?|Feb(?:ruary|ruar)?|M(?:a|ä)?r(?:ch|z)?|Apr(?:il)?|Ma(?:y|i)?|Jun(?:e|i)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|O(?:c|k)?t(?:ober)?|Nov(?:ember)?|De(?:c|z)(?:ember)?)\\b"); + bank.put("MINUTE", "(?:[0-5][0-9])"); + bank.put("YEAR", "(?>\\d\\d){1,2}"); + bank.put("HOUR", "(?:2[0123]|[01]?[0-9])"); + bank.put("SECOND", "(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)"); + bank.put("TIME", "(?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9])"); + bank.put("INT", "(?:[+-]?(?:[0-9]+))"); + bank.put("HTTPDATE", "%{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} %{INT}"); + bank.put("WORD", "\\b\\w+\\b"); + bank.put("BASE10NUM", "(?[+-]?(?:(?:[0-9]+(?:\\.[0-9]+)?)|(?:\\.[0-9]+)))"); + bank.put("NUMBER", "(?:%{BASE10NUM})"); + bank.put("IPV6", "((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:)))(%.+)?"); + bank.put("IPV4", "(?(?\"(?>\\\\.|[^\\\\\"]+)+\"|\"\"|(?>'(?>\\\\.|[^\\\\']+)+')|''|(?>`(?>\\\\.|[^\\\\`]+)+`)|``))"); + + String text = "83.149.9.216 - - [19/Jul/2015:08:13:42 +0000] \"GET /presentations/logstash-monitorama-2013/images/kibana-dashboard3.png HTTP/1.1\" 200 171717 \"http://semicomplete.com/presentations/logstash-monitorama-2013/\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36\""; + String pattern = "%{IPORHOST:clientip} %{USER:ident} %{USER:auth} \\[%{HTTPDATE:timestamp}\\] \"%{WORD:verb} %{DATA:request} HTTP/%{NUMBER:httpversion}\" %{NUMBER:response:int} (?:-|%{NUMBER:bytes:int}) %{QS:referrer} %{QS:agent}"; + + Grok grok = new Grok(bank, pattern); + + Map expected = new HashMap<>(); + expected.put("clientip", "83.149.9.216"); + expected.put("ident", "-"); + expected.put("auth", "-"); + expected.put("timestamp", "19/Jul/2015:08:13:42 +0000"); + expected.put("verb", "GET"); + expected.put("request", "/presentations/logstash-monitorama-2013/images/kibana-dashboard3.png"); + expected.put("httpversion", "1.1"); + expected.put("response", 200); + expected.put("bytes", 171717); + expected.put("referrer", "\"http://semicomplete.com/presentations/logstash-monitorama-2013/\""); + expected.put("agent", "\"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36\""); + + Map actual = grok.captures(text); + + assertEquals(expected, actual); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java index bdcb19bbc6f..6e9593ff9ca 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.StringText; +import org.elasticsearch.env.Environment; import org.elasticsearch.ingest.processor.simple.SimpleProcessor; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.internal.InternalSearchHit; @@ -31,6 +32,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -54,7 +56,8 @@ public class PipelineStoreTests extends ESTestCase { threadPool = new ThreadPool("test"); ClusterService clusterService = mock(ClusterService.class); client = mock(PipelineStoreClient.class); - store = new PipelineStore(Settings.EMPTY, threadPool, clusterService, client, Collections.singletonMap(SimpleProcessor.TYPE, new SimpleProcessor.Builder.Factory())); + Environment environment = mock(Environment.class); + store = new PipelineStore(Settings.EMPTY, threadPool, environment, clusterService, client, Collections.singletonMap(SimpleProcessor.TYPE, new SimpleProcessor.Builder.Factory())); store.start(); } @@ -65,7 +68,7 @@ public class PipelineStoreTests extends ESTestCase { } - public void testUpdatePipeline() { + public void testUpdatePipeline() throws IOException { List hits = new ArrayList<>(); hits.add(new InternalSearchHit(0, "1", new StringText("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) @@ -126,7 +129,7 @@ public class PipelineStoreTests extends ESTestCase { }); } - public void testGetReference() { + public void testGetReference() throws IOException { // fill the store up for the test: List hits = new ArrayList<>(); hits.add(new InternalSearchHit(0, "foo", new StringText("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_grok.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_grok.yaml new file mode 100644 index 00000000000..a119c59c524 --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_grok.yaml @@ -0,0 +1,48 @@ +--- +"Test Grok Pipeline": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "grok" : { + "field" : "field1", + "pattern" : "%{NUMBER:val:float} %{NUMBER:status:int} <%{WORD:msg}>" + } + } + ] + } + - match: { _id: "my_pipeline" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.index: + index: test + type: test + id: 1 + ingest: "my_pipeline" + body: {field1: "123.42 400 "} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.val: 123.42 } + - match: { _source.status: 400 } + - match: { _source.msg: "foo" } + From 03387266caf2c040924c7b1d98482c281daae84a Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 20 Oct 2015 16:36:58 +0300 Subject: [PATCH 023/347] ingest: Added new `geoip` processor, that adds geographical information to documents based on an ip address. The information is fetched from the Maxmind geolite2 database, that is embedded in the ingest plugin. --- docs/plugins/ingest.asciidoc | 63 +++++ plugins/ingest/build.gradle | 19 ++ plugins/ingest/licenses/geoip2-2.3.1.jar.sha1 | 1 + plugins/ingest/licenses/geoip2-LICENSE.txt | 202 ++++++++++++++++ plugins/ingest/licenses/geoip2-NOTICE.txt | 3 + .../jackson-annotations-2.5.0.jar.sha1 | 1 + .../licenses/jackson-annotations-LICENSE | 8 + .../licenses/jackson-annotations-NOTICE | 20 ++ .../licenses/jackson-databind-2.5.3.jar.sha1 | 1 + .../ingest/licenses/jackson-databind-LICENSE | 8 + .../ingest/licenses/jackson-databind-NOTICE | 20 ++ .../ingest/licenses/maxmind-db-1.0.0.jar.sha1 | 1 + .../ingest/licenses/maxmind-db-LICENSE.txt | 202 ++++++++++++++++ plugins/ingest/licenses/maxmind-db-NOTICE.txt | 3 + .../ingest/processor/Processor.java | 6 +- .../geoip/DatabaseReaderService.java | 51 ++++ .../processor/geoip/GeoIpProcessor.java | 223 ++++++++++++++++++ .../processor/simple/SimpleProcessor.java | 2 + .../plugin/ingest/IngestModule.java | 2 + .../plugin/ingest/IngestPlugin.java | 3 + .../plugin/ingest/PipelineStore.java | 7 + .../plugin-metadata/plugin-security.policy | 23 ++ .../geoip/DatabaseReaderServiceTests.java | 41 ++++ .../geoip/GeoProcessorBuilderTests.java | 69 ++++++ .../processor/geoip/GeoProcessorTests.java | 80 +++++++ .../plugin/ingest/PipelineStoreTests.java | 4 +- .../test/ingest/40_geoip_processor.yaml | 106 +++++++++ 27 files changed, 1166 insertions(+), 3 deletions(-) create mode 100644 plugins/ingest/licenses/geoip2-2.3.1.jar.sha1 create mode 100644 plugins/ingest/licenses/geoip2-LICENSE.txt create mode 100644 plugins/ingest/licenses/geoip2-NOTICE.txt create mode 100644 plugins/ingest/licenses/jackson-annotations-2.5.0.jar.sha1 create mode 100644 plugins/ingest/licenses/jackson-annotations-LICENSE create mode 100644 plugins/ingest/licenses/jackson-annotations-NOTICE create mode 100644 plugins/ingest/licenses/jackson-databind-2.5.3.jar.sha1 create mode 100644 plugins/ingest/licenses/jackson-databind-LICENSE create mode 100644 plugins/ingest/licenses/jackson-databind-NOTICE create mode 100644 plugins/ingest/licenses/maxmind-db-1.0.0.jar.sha1 create mode 100644 plugins/ingest/licenses/maxmind-db-LICENSE.txt create mode 100644 plugins/ingest/licenses/maxmind-db-NOTICE.txt create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderService.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java create mode 100644 plugins/ingest/src/main/plugin-metadata/plugin-security.policy create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderServiceTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorBuilderTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorTests.java create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 16c42cf9dfb..43daebc4ece 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -142,6 +142,69 @@ This pipeline will insert these named captures as new fields within the document } -------------------------------------------------- +==== Geoip processor + +The GeoIP processor adds information about the geographical location of IP addresses, based on data from the Maxmind databases. +This processor adds this information by default under the `geoip` field. + +The ingest plugin ships by default with the GeoLite2 City and GeoLite2 Country geoip2 databases from Maxmind made available +under the CCA-ShareAlike 3.0 license. For more details see, http://dev.maxmind.com/geoip/geoip2/geolite2/ + +The GeoIP processor can run with other geoip2 databases from Maxmind. The files must be copied into the geoip config directory +and the `database_file` option should be used to specify the filename of the custom database. The geoip config directory +is located at `$ES_HOME/config/ingest/geoip` and holds the shipped databases too. + +[[geoip-options]] +.Geoip options +[options="header"] +|====== +| Name | Required | Default | Description +| `ip_field` | yes | - | The field to get the ip address from for the geographical lookip. +| `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database. +| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest plugin ships with the GeoLite2-City.mmdb and GeoLite2-Country.mmdb files. +|====== + +If the GeoLite2 City database is used then the following fields will be added under the `target_field`: `ip`, +`country_iso_code`, `country_name`, `continent_name`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude` +and `location`. + +If the GeoLite2 Country database is used then the following fields will be added under the `target_field`: `ip`, +`country_iso_code`, `country_name` and `continent_name`. + +An example that uses the default city database and adds the geographical information to the `geoip` field based on the `ip` field`: + +[source,js] +-------------------------------------------------- +{ + "description" : "...", + "processors" : [ + { + "geoip" : { + "ip_field" : "ip" + } + } + ] +} +-------------------------------------------------- + +An example that uses the default country database and add the geographical information to the `geo` field based on the `ip` field`: + +[source,js] +-------------------------------------------------- +{ + "description" : "...", + "processors" : [ + { + "geoip" : { + "ip_field" : "ip", + "target_field" : "geo", + "database_file" : "GeoLite2-Country.mmdb" + } + } + ] +} +-------------------------------------------------- + === Put pipeline API The put pipeline api adds pipelines and updates existing pipelines in the cluster. diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index 7fe64f3295f..4aca00bcb9f 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -24,6 +24,11 @@ esplugin { dependencies { compile 'org.jruby.joni:joni:2.1.6' + compile (group: 'com.maxmind.geoip2', name: 'geoip2', version: '2.3.1') { + // we don't use Maxmind's http service: + exclude group: 'com.google.http-client', module: 'google-http-client' + } + testCompile 'org.elasticsearch:geolite2-databases:20151029' testCompile 'org.elasticsearch:securemock:1.1' } @@ -35,5 +40,19 @@ sourceSets { } } +task copyDefaultGeoIp2DatabaseFiles(type: Copy) { + from zipTree(configurations.testCompile.files.find { it.name.contains('geolite2-databases')}) + into "${project.buildDir}/geoip" + include "*.mmdb" +} + +project.bundlePlugin.dependsOn(copyDefaultGeoIp2DatabaseFiles) + compileJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" compileTestJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" + +bundlePlugin { + from("${project.buildDir}/geoip") { + into 'config/geoip' + } +} diff --git a/plugins/ingest/licenses/geoip2-2.3.1.jar.sha1 b/plugins/ingest/licenses/geoip2-2.3.1.jar.sha1 new file mode 100644 index 00000000000..cb1982ffef5 --- /dev/null +++ b/plugins/ingest/licenses/geoip2-2.3.1.jar.sha1 @@ -0,0 +1 @@ +0b128448f5bcfafb6caa82ae079ab39aa56dafb4 diff --git a/plugins/ingest/licenses/geoip2-LICENSE.txt b/plugins/ingest/licenses/geoip2-LICENSE.txt new file mode 100644 index 00000000000..7a4a3ea2424 --- /dev/null +++ b/plugins/ingest/licenses/geoip2-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/plugins/ingest/licenses/geoip2-NOTICE.txt b/plugins/ingest/licenses/geoip2-NOTICE.txt new file mode 100644 index 00000000000..448b71d47d3 --- /dev/null +++ b/plugins/ingest/licenses/geoip2-NOTICE.txt @@ -0,0 +1,3 @@ +This software is Copyright (c) 2013 by MaxMind, Inc. + +This is free software, licensed under the Apache License, Version 2.0. \ No newline at end of file diff --git a/plugins/ingest/licenses/jackson-annotations-2.5.0.jar.sha1 b/plugins/ingest/licenses/jackson-annotations-2.5.0.jar.sha1 new file mode 100644 index 00000000000..862ac6f304f --- /dev/null +++ b/plugins/ingest/licenses/jackson-annotations-2.5.0.jar.sha1 @@ -0,0 +1 @@ +a2a55a3375bc1cef830ca426d68d2ea22961190e diff --git a/plugins/ingest/licenses/jackson-annotations-LICENSE b/plugins/ingest/licenses/jackson-annotations-LICENSE new file mode 100644 index 00000000000..f5f45d26a49 --- /dev/null +++ b/plugins/ingest/licenses/jackson-annotations-LICENSE @@ -0,0 +1,8 @@ +This copy of Jackson JSON processor streaming parser/generator is licensed under the +Apache (Software) License, version 2.0 ("the License"). +See the License for details about distribution rights, and the +specific rights regarding derivate works. + +You may obtain a copy of the License at: + +http://www.apache.org/licenses/LICENSE-2.0 diff --git a/plugins/ingest/licenses/jackson-annotations-NOTICE b/plugins/ingest/licenses/jackson-annotations-NOTICE new file mode 100644 index 00000000000..4c976b7b4cc --- /dev/null +++ b/plugins/ingest/licenses/jackson-annotations-NOTICE @@ -0,0 +1,20 @@ +# Jackson JSON processor + +Jackson is a high-performance, Free/Open Source JSON processing library. +It was originally written by Tatu Saloranta (tatu.saloranta@iki.fi), and has +been in development since 2007. +It is currently developed by a community of developers, as well as supported +commercially by FasterXML.com. + +## Licensing + +Jackson core and extension components may licensed under different licenses. +To find the details that apply to this artifact see the accompanying LICENSE file. +For more information, including possible other licensing options, contact +FasterXML.com (http://fasterxml.com). + +## Credits + +A list of contributors may be found from CREDITS file, which is included +in some artifacts (usually source distributions); but is always available +from the source code management (SCM) system project uses. diff --git a/plugins/ingest/licenses/jackson-databind-2.5.3.jar.sha1 b/plugins/ingest/licenses/jackson-databind-2.5.3.jar.sha1 new file mode 100644 index 00000000000..cdc66958059 --- /dev/null +++ b/plugins/ingest/licenses/jackson-databind-2.5.3.jar.sha1 @@ -0,0 +1 @@ +c37875ff66127d93e5f672708cb2dcc14c8232ab diff --git a/plugins/ingest/licenses/jackson-databind-LICENSE b/plugins/ingest/licenses/jackson-databind-LICENSE new file mode 100644 index 00000000000..f5f45d26a49 --- /dev/null +++ b/plugins/ingest/licenses/jackson-databind-LICENSE @@ -0,0 +1,8 @@ +This copy of Jackson JSON processor streaming parser/generator is licensed under the +Apache (Software) License, version 2.0 ("the License"). +See the License for details about distribution rights, and the +specific rights regarding derivate works. + +You may obtain a copy of the License at: + +http://www.apache.org/licenses/LICENSE-2.0 diff --git a/plugins/ingest/licenses/jackson-databind-NOTICE b/plugins/ingest/licenses/jackson-databind-NOTICE new file mode 100644 index 00000000000..4c976b7b4cc --- /dev/null +++ b/plugins/ingest/licenses/jackson-databind-NOTICE @@ -0,0 +1,20 @@ +# Jackson JSON processor + +Jackson is a high-performance, Free/Open Source JSON processing library. +It was originally written by Tatu Saloranta (tatu.saloranta@iki.fi), and has +been in development since 2007. +It is currently developed by a community of developers, as well as supported +commercially by FasterXML.com. + +## Licensing + +Jackson core and extension components may licensed under different licenses. +To find the details that apply to this artifact see the accompanying LICENSE file. +For more information, including possible other licensing options, contact +FasterXML.com (http://fasterxml.com). + +## Credits + +A list of contributors may be found from CREDITS file, which is included +in some artifacts (usually source distributions); but is always available +from the source code management (SCM) system project uses. diff --git a/plugins/ingest/licenses/maxmind-db-1.0.0.jar.sha1 b/plugins/ingest/licenses/maxmind-db-1.0.0.jar.sha1 new file mode 100644 index 00000000000..4437d02a227 --- /dev/null +++ b/plugins/ingest/licenses/maxmind-db-1.0.0.jar.sha1 @@ -0,0 +1 @@ +b140295a52005aaf224b6c711ad4ecb38b1da155 diff --git a/plugins/ingest/licenses/maxmind-db-LICENSE.txt b/plugins/ingest/licenses/maxmind-db-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/ingest/licenses/maxmind-db-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/ingest/licenses/maxmind-db-NOTICE.txt b/plugins/ingest/licenses/maxmind-db-NOTICE.txt new file mode 100644 index 00000000000..1ebe2b0826d --- /dev/null +++ b/plugins/ingest/licenses/maxmind-db-NOTICE.txt @@ -0,0 +1,3 @@ +This software is Copyright (c) 2014 by MaxMind, Inc. + +This is free software, licensed under the Apache License, Version 2.0. diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java index 23ac6e15edf..6efb3ce06f4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java @@ -22,6 +22,7 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.Data; +import java.io.Closeable; import java.io.IOException; import java.nio.file.Path; import java.util.Map; @@ -55,7 +56,7 @@ public interface Processor { /** * A factory that creates a processor builder when processor instances for pipelines are being created. */ - interface Factory { + interface Factory extends Closeable { /** * Creates the builder. @@ -67,6 +68,9 @@ public interface Processor { default void setConfigDirectory(Path configDirectory) { } + @Override + default void close() throws IOException { + } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderService.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderService.java new file mode 100644 index 00000000000..8d61accf92e --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderService.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.geoip; + +import com.maxmind.geoip2.DatabaseReader; + +import java.io.Closeable; +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; + +final class DatabaseReaderService implements Closeable { + + private final Map databaseReaders = new HashMap<>(); + + synchronized DatabaseReader getOrCreateDatabaseReader(String key, InputStream inputStream) throws IOException { + DatabaseReader databaseReader = databaseReaders.get(key); + if (databaseReader != null) { + return databaseReader; + } + + databaseReader = new DatabaseReader.Builder(inputStream).build(); + databaseReaders.put(key, databaseReader); + return databaseReader; + } + + @Override + public void close() throws IOException { + for (DatabaseReader databaseReader : databaseReaders.values()) { + databaseReader.close(); + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java new file mode 100644 index 00000000000..103158d4696 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -0,0 +1,223 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.geoip; + +import com.maxmind.geoip2.DatabaseReader; +import com.maxmind.geoip2.exception.GeoIp2Exception; +import com.maxmind.geoip2.model.CityResponse; +import com.maxmind.geoip2.model.CountryResponse; +import com.maxmind.geoip2.record.*; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.processor.Processor; + +import java.io.InputStream; +import java.net.UnknownHostException; +import java.nio.file.Files; +import java.io.IOException; +import java.net.InetAddress; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.HashMap; +import java.util.Map; + +public final class GeoIpProcessor implements Processor { + + public static final String TYPE = "geoip"; + + private final String ipField; + private final String targetField; + // pck-protected visibility for tests: + final DatabaseReader dbReader; + + GeoIpProcessor(String ipField, DatabaseReader dbReader, String targetField) throws IOException { + this.ipField = ipField; + this.targetField = targetField == null ? "geoip" : targetField; + this.dbReader = dbReader; + } + + @Override + public void execute(Data data) { + String ip = data.getProperty(ipField); + final InetAddress ipAddress; + try { + ipAddress = InetAddress.getByName(ip); + } catch (UnknownHostException e) { + throw new RuntimeException(e); + } + + final Map geoData; + switch (dbReader.getMetadata().getDatabaseType()) { + case "GeoLite2-City": + geoData = retrieveCityGeoData(ipAddress); + break; + case "GeoLite2-Country": + geoData = retrieveCountryGeoData(ipAddress); + break; + default: + throw new IllegalStateException("Unsupported database type [" + dbReader.getMetadata().getDatabaseType() + "]"); + } + data.addField(targetField, geoData); + } + + private Map retrieveCityGeoData(InetAddress ipAddress) { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + CityResponse response = AccessController.doPrivileged(new PrivilegedAction() { + @Override + public CityResponse run() { + try { + return dbReader.city(ipAddress); + } catch (IOException | GeoIp2Exception e) { + throw new RuntimeException(e); + } + } + }); + + Country country = response.getCountry(); + City city = response.getCity(); + Location location = response.getLocation(); + Continent continent = response.getContinent(); + Subdivision subdivision = response.getMostSpecificSubdivision(); + + Map geoData = new HashMap(); + geoData.put("ip", NetworkAddress.formatAddress(ipAddress)); + geoData.put("country_iso_code", country.getIsoCode()); + geoData.put("country_name", country.getName()); + geoData.put("continent_name", continent.getName()); + geoData.put("region_name", subdivision.getName()); + geoData.put("city_name", city.getName()); + geoData.put("timezone", location.getTimeZone()); + geoData.put("latitude", location.getLatitude()); + geoData.put("longitude", location.getLongitude()); + if (location.getLatitude() != null && location.getLongitude() != null) { + geoData.put("location", new double[]{location.getLongitude(), location.getLatitude()}); + } + return geoData; + } + + private Map retrieveCountryGeoData(InetAddress ipAddress) { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + CountryResponse response = AccessController.doPrivileged(new PrivilegedAction() { + @Override + public CountryResponse run() { + try { + return dbReader.country(ipAddress); + } catch (IOException | GeoIp2Exception e) { + throw new RuntimeException(e); + } + } + }); + + Country country = response.getCountry(); + Continent continent = response.getContinent(); + + Map geoData = new HashMap(); + geoData.put("ip", NetworkAddress.formatAddress(ipAddress)); + geoData.put("country_iso_code", country.getIsoCode()); + geoData.put("country_name", country.getName()); + geoData.put("continent_name", continent.getName()); + return geoData; + } + + public static class Builder implements Processor.Builder { + + private final Path geoIpConfigDirectory; + private final DatabaseReaderService databaseReaderService; + + private String ipField; + private String databaseFile = "GeoLite2-City.mmdb"; + private String targetField = "geoip"; + + public Builder(Path geoIpConfigDirectory, DatabaseReaderService databaseReaderService) { + this.geoIpConfigDirectory = geoIpConfigDirectory; + this.databaseReaderService = databaseReaderService; + } + + public void setIpField(String ipField) { + this.ipField = ipField; + } + + public void setDatabaseFile(String dbPath) { + this.databaseFile = dbPath; + } + + public void setTargetField(String targetField) { + this.targetField = targetField; + } + + public void fromMap(Map config) { + this.ipField = (String) config.get("ip_field"); + + String targetField = (String) config.get("target_field"); + if (targetField != null) { + this.targetField = targetField; + } + String databaseFile = (String) config.get("database_file"); + if (databaseFile != null) { + this.databaseFile = databaseFile; + } + } + + @Override + public Processor build() throws IOException { + Path databasePath = geoIpConfigDirectory.resolve(databaseFile); + if (Files.exists(databasePath)) { + try (InputStream database = Files.newInputStream(databasePath, StandardOpenOption.READ)) { + DatabaseReader databaseReader = databaseReaderService.getOrCreateDatabaseReader(databaseFile, database); + return new GeoIpProcessor(ipField, databaseReader, targetField); + } + } else { + throw new IllegalArgumentException("database file [" + databaseFile + "] doesn't exist in [" + geoIpConfigDirectory + "]"); + } + } + + public static class Factory implements Processor.Builder.Factory { + + private Path geoIpConfigDirectory; + private final DatabaseReaderService databaseReaderService = new DatabaseReaderService(); + + @Override + public Processor.Builder create() { + return new Builder(geoIpConfigDirectory, databaseReaderService); + } + + @Override + public void setConfigDirectory(Path configDirectory) { + geoIpConfigDirectory = configDirectory.resolve("ingest").resolve("geoip"); + } + + @Override + public void close() throws IOException { + databaseReaderService.close(); + } + } + + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java index 82e25e77e8c..2892f0e6250 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java @@ -22,6 +22,7 @@ package org.elasticsearch.ingest.processor.simple; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.processor.Processor; +import java.io.IOException; import java.util.Map; public final class SimpleProcessor implements Processor { @@ -92,6 +93,7 @@ public final class SimpleProcessor implements Processor { public Processor.Builder create() { return new Builder(); } + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index d5f2dbb4ae4..f8903cf5aa0 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -22,6 +22,7 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.processor.geoip.GeoIpProcessor; import org.elasticsearch.ingest.processor.grok.GrokProcessor; import org.elasticsearch.ingest.processor.simple.SimpleProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; @@ -41,6 +42,7 @@ public class IngestModule extends AbstractModule { binder().bind(PipelineStoreClient.class).asEagerSingleton(); registerProcessor(SimpleProcessor.TYPE, SimpleProcessor.Builder.Factory.class); + registerProcessor(GeoIpProcessor.TYPE, GeoIpProcessor.Builder.Factory.class); registerProcessor(GrokProcessor.TYPE, GrokProcessor.Builder.Factory.class); MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, Processor.Builder.Factory.class); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 11365167732..cb5220073da 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -20,6 +20,7 @@ package org.elasticsearch.plugin.ingest; +import org.elasticsearch.SpecialPermission; import org.elasticsearch.action.ActionModule; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; @@ -39,6 +40,8 @@ import org.elasticsearch.plugin.ingest.transport.put.PutPipelineTransportAction; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestModule; +import java.security.AccessController; +import java.security.PrivilegedAction; import java.util.Arrays; import java.util.Collection; import java.util.Collections; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index defdeb459c9..70cd45bad8d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -76,6 +76,13 @@ public class PipelineStore extends AbstractLifecycleComponent { @Override protected void doClose() { + for (Processor.Builder.Factory factory : processorFactoryRegistry.values()) { + try { + factory.close(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } } public Pipeline get(String id) { diff --git a/plugins/ingest/src/main/plugin-metadata/plugin-security.policy b/plugins/ingest/src/main/plugin-metadata/plugin-security.policy new file mode 100644 index 00000000000..3faba716fd1 --- /dev/null +++ b/plugins/ingest/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +grant { + // needed because geoip2 is using reflection to deserialize data into its own domain classes + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; +}; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderServiceTests.java new file mode 100644 index 00000000000..ebf3fefdba0 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderServiceTests.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.geoip; + +import com.maxmind.geoip2.DatabaseReader; +import org.elasticsearch.test.ESTestCase; +import static org.hamcrest.Matchers.*; + +import java.io.InputStream; + +public class DatabaseReaderServiceTests extends ESTestCase { + + public void testLookup() throws Exception { + InputStream database = DatabaseReaderServiceTests.class.getResourceAsStream("/GeoLite2-City.mmdb"); + + DatabaseReaderService service = new DatabaseReaderService(); + DatabaseReader instance = service.getOrCreateDatabaseReader("key1", database); + assertThat(service.getOrCreateDatabaseReader("key1", database), equalTo(instance)); + + database = DatabaseReaderServiceTests.class.getResourceAsStream("/GeoLite2-City.mmdb"); + assertThat(service.getOrCreateDatabaseReader("key2", database), not(equalTo(instance))); + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorBuilderTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorBuilderTests.java new file mode 100644 index 00000000000..1d026e18df7 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorBuilderTests.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.geoip; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.StreamsUtils; +import org.junit.Before; + +import static org.hamcrest.Matchers.*; + +import java.io.ByteArrayInputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collections; + +public class GeoProcessorBuilderTests extends ESTestCase { + + private Path geoIpConfigDir; + + @Before + public void prepareConfigDirectory() throws Exception { + geoIpConfigDir = createTempDir(); + Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-City.mmdb")), geoIpConfigDir.resolve("GeoLite2-City.mmdb")); + Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-Country.mmdb")), geoIpConfigDir.resolve("GeoLite2-Country.mmdb")); + } + + public void testBuild_defaults() throws Exception { + GeoIpProcessor.Builder builder = new GeoIpProcessor.Builder(geoIpConfigDir, new DatabaseReaderService()); + builder.fromMap(Collections.emptyMap()); + GeoIpProcessor processor = (GeoIpProcessor) builder.build(); + assertThat(processor.dbReader.getMetadata().getDatabaseType(), equalTo("GeoLite2-City")); + } + + public void testBuild_dbFile() throws Exception { + GeoIpProcessor.Builder builder = new GeoIpProcessor.Builder(geoIpConfigDir, new DatabaseReaderService()); + builder.fromMap(Collections.singletonMap("database_file", "GeoLite2-Country.mmdb")); + GeoIpProcessor processor = (GeoIpProcessor) builder.build(); + assertThat(processor.dbReader.getMetadata().getDatabaseType(), equalTo("GeoLite2-Country")); + } + + public void testBuild_nonExistingDbFile() throws Exception { + GeoIpProcessor.Builder builder = new GeoIpProcessor.Builder(geoIpConfigDir, new DatabaseReaderService()); + builder.fromMap(Collections.singletonMap("database_file", "does-not-exist.mmdb")); + try { + builder.build(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), startsWith("database file [does-not-exist.mmdb] doesn't exist in")); + } + } + + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorTests.java new file mode 100644 index 00000000000..43b293501ae --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorTests.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.geoip; + +import com.maxmind.geoip2.DatabaseReader; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.test.ESTestCase; + +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class GeoProcessorTests extends ESTestCase { + + public void testCity() throws Exception { + InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb"); + GeoIpProcessor processor = new GeoIpProcessor("source_field", new DatabaseReader.Builder(database).build(), "target_field"); + + Map document = new HashMap<>(); + document.put("source_field", "82.170.213.79"); + Data data = new Data("_index", "_type", "_id", document); + processor.execute(data); + + assertThat(data.getDocument().size(), equalTo(2)); + assertThat(data.getDocument().get("source_field"), equalTo("82.170.213.79")); + @SuppressWarnings("unchecked") + Map geoData = (Map) data.getDocument().get("target_field"); + assertThat(geoData.size(), equalTo(10)); + assertThat(geoData.get("ip"), equalTo("82.170.213.79")); + assertThat(geoData.get("country_iso_code"), equalTo("NL")); + assertThat(geoData.get("country_name"), equalTo("Netherlands")); + assertThat(geoData.get("continent_name"), equalTo("Europe")); + assertThat(geoData.get("region_name"), equalTo("North Holland")); + assertThat(geoData.get("city_name"), equalTo("Amsterdam")); + assertThat(geoData.get("timezone"), equalTo("Europe/Amsterdam")); + assertThat(geoData.get("latitude"), equalTo(52.374)); + assertThat(geoData.get("longitude"), equalTo(4.8897)); + assertThat(geoData.get("location"), equalTo(new double[]{4.8897, 52.374})); + } + + public void testCountry() throws Exception { + InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-Country.mmdb"); + GeoIpProcessor processor = new GeoIpProcessor("source_field", new DatabaseReader.Builder(database).build(), "target_field"); + + Map document = new HashMap<>(); + document.put("source_field", "82.170.213.79"); + Data data = new Data("_index", "_type", "_id", document); + processor.execute(data); + + assertThat(data.getDocument().size(), equalTo(2)); + assertThat(data.getDocument().get("source_field"), equalTo("82.170.213.79")); + @SuppressWarnings("unchecked") + Map geoData = (Map) data.getDocument().get("target_field"); + assertThat(geoData.size(), equalTo(4)); + assertThat(geoData.get("ip"), equalTo("82.170.213.79")); + assertThat(geoData.get("country_iso_code"), equalTo("NL")); + assertThat(geoData.get("country_name"), equalTo("Netherlands")); + assertThat(geoData.get("continent_name"), equalTo("Europe")); + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java index 6e9593ff9ca..fc810fb7247 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java @@ -68,7 +68,7 @@ public class PipelineStoreTests extends ESTestCase { } - public void testUpdatePipeline() throws IOException { + public void testUpdatePipeline() throws Exception { List hits = new ArrayList<>(); hits.add(new InternalSearchHit(0, "1", new StringText("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) @@ -129,7 +129,7 @@ public class PipelineStoreTests extends ESTestCase { }); } - public void testGetReference() throws IOException { + public void testGetReference() throws Exception { // fill the store up for the test: List hits = new ArrayList<>(); hits.add(new InternalSearchHit(0, "foo", new StringText("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml new file mode 100644 index 00000000000..7aa0a1c8e2e --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml @@ -0,0 +1,106 @@ +--- +"Test geoip processor with defaults": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "geoip" : { + "ip_field" : "field1" + } + } + ] + } + - match: { _id: "my_pipeline" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.index: + index: test + type: test + id: 1 + ingest: "my_pipeline" + body: {field1: "128.101.101.101"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.field1: "128.101.101.101" } + - length: { _source.geoip: 10 } + - match: { _source.geoip.city_name: "Minneapolis" } + - match: { _source.geoip.country_iso_code: "US" } + - match: { _source.geoip.ip: "128.101.101.101" } + - match: { _source.geoip.latitude: 44.9759 } + - match: { _source.geoip.longitude: -93.2166 } + - match: { _source.geoip.location: [-93.2166, 44.9759] } + - match: { _source.geoip.timezone: "America/Chicago" } + - match: { _source.geoip.country_name: "United States" } + - match: { _source.geoip.region_name: "Minnesota" } + - match: { _source.geoip.continent_name: "North America" } + +--- +"Test geoip processor with different database file": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "geoip" : { + "ip_field" : "field1", + "database_file" : "GeoLite2-Country.mmdb" + } + } + ] + } + - match: { _id: "my_pipeline" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.index: + index: test + type: test + id: 1 + ingest: "my_pipeline" + body: {field1: "128.101.101.101"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.field1: "128.101.101.101" } + - length: { _source.geoip: 4 } + - match: { _source.geoip.country_iso_code: "US" } + - match: { _source.geoip.ip: "128.101.101.101" } + - match: { _source.geoip.country_name: "United States" } + - match: { _source.geoip.continent_name: "North America" } From 95e6b99d2b143765e0b98f44abf394c5b80afb99 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 3 Nov 2015 12:17:10 +0700 Subject: [PATCH 024/347] renamed test file --- .../ingest/{40_geoip_processor.yaml => 50_geoip_processor.yaml} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename plugins/ingest/src/test/resources/rest-api-spec/test/ingest/{40_geoip_processor.yaml => 50_geoip_processor.yaml} (100%) diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_geoip_processor.yaml similarity index 100% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml rename to plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_geoip_processor.yaml From d935a3ab8178fe616f450fca80d2793b9df4be6e Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Mon, 2 Nov 2015 21:27:53 -0800 Subject: [PATCH 025/347] Make Grok-specific classes package-protected. --- .../java/org/elasticsearch/ingest/processor/grok/Grok.java | 6 +----- .../elasticsearch/ingest/processor/grok/GrokMatchGroup.java | 2 +- .../elasticsearch/ingest/processor/grok/GrokProcessor.java | 5 ----- .../elasticsearch/ingest/processor/grok/PatternUtils.java | 2 +- 4 files changed, 3 insertions(+), 12 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java index e72aa948da6..94e5d22fc98 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java @@ -19,10 +19,6 @@ package org.elasticsearch.ingest.processor.grok; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; import java.lang.Object; import java.lang.String; import java.lang.StringIndexOutOfBoundsException; @@ -36,7 +32,7 @@ import org.jcodings.specific.UTF8Encoding; import org.joni.*; import org.joni.exception.ValueException; -public class Grok { +final class Grok { private static final String NAME_GROUP = "name"; private static final String SUBNAME_GROUP = "subname"; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java index c070850b7ca..86ddd6974fe 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java @@ -24,7 +24,7 @@ import java.lang.Integer; import java.lang.Object; import java.lang.String; -public class GrokMatchGroup { +final class GrokMatchGroup { private static final String DEFAULT_TYPE = "string"; private final String patternName; private final String fieldName; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index 16f9f4ee1a9..87d6e9b4a11 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -19,16 +19,11 @@ package org.elasticsearch.ingest.processor.grok; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.env.Environment; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.processor.Processor; -import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/PatternUtils.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/PatternUtils.java index 7da8060b5fd..05b291d4ea6 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/PatternUtils.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/PatternUtils.java @@ -25,7 +25,7 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.Map; -public final class PatternUtils { +final class PatternUtils { private PatternUtils() {} public static void loadBankFromStream(Map patternBank, InputStream inputStream) throws IOException { From 1a4b5bba2bc42fb1be0003cb71371062fedc24ff Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 28 Oct 2015 18:15:01 +0700 Subject: [PATCH 026/347] Simplify processor creation from map of maps by folding the build and builder factory in one interface called Factory. In tests processors can be created from the their constructors instead of builders. In the IngestModule, register instances instead of class instances. --- .../org/elasticsearch/ingest/Pipeline.java | 49 ++++--------- .../ingest/processor/Processor.java | 34 +++------- .../processor/geoip/GeoIpProcessor.java | 68 +++++-------------- .../ingest/processor/grok/GrokProcessor.java | 50 +++----------- .../processor/simple/SimpleProcessor.java | 46 ++----------- .../plugin/ingest/IngestModule.java | 21 +++--- .../plugin/ingest/PipelineStore.java | 14 ++-- ...sts.java => GeoProcessorFactoryTests.java} | 26 +++---- .../ingest/PipelineExecutionServiceTests.java | 30 +------- .../plugin/ingest/PipelineStoreTests.java | 2 +- .../transport/IngestActionFilterTests.java | 11 +-- 11 files changed, 94 insertions(+), 257 deletions(-) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/{GeoProcessorBuilderTests.java => GeoProcessorFactoryTests.java} (69%) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index b91ef2f0f7e..f318de721f6 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -23,10 +23,7 @@ package org.elasticsearch.ingest; import org.elasticsearch.ingest.processor.Processor; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; +import java.util.*; /** * A pipeline is a list of {@link Processor} instances grouped under a unique id. @@ -37,7 +34,7 @@ public final class Pipeline { private final String description; private final List processors; - private Pipeline(String id, String description, List processors) { + public Pipeline(String id, String description, List processors) { this.id = id; this.description = description; this.processors = processors; @@ -73,47 +70,27 @@ public final class Pipeline { return processors; } - public final static class Builder { + public final static class Factory { - private final String id; - private String description; - private List processors = new ArrayList<>(); - - public Builder(String id) { - this.id = id; - } - - public void fromMap(Map config, Map processorRegistry) throws IOException { - description = (String) config.get("description"); + public Pipeline create(String id, Map config, Map processorRegistry) throws IOException { + String description = (String) config.get("description"); + List processors = new ArrayList<>(); @SuppressWarnings("unchecked") - List>> processors = (List>>) config.get("processors"); - if (processors != null ) { - for (Map> processor : processors) { + List>> processorConfigs = (List>>) config.get("processors"); + if (processorConfigs != null ) { + for (Map> processor : processorConfigs) { for (Map.Entry> entry : processor.entrySet()) { - Processor.Builder builder = processorRegistry.get(entry.getKey()).create(); - if (builder != null) { - builder.fromMap(entry.getValue()); - this.processors.add(builder.build()); + Processor.Factory factory = processorRegistry.get(entry.getKey()); + if (factory != null) { + processors.add(factory.create(entry.getValue())); } else { throw new IllegalArgumentException("No processor type exist with name [" + entry.getKey() + "]"); } } } } - } - - public void setDescription(String description) { - this.description = description; - } - - public void addProcessors(Processor.Builder... processors) throws IOException { - for (Processor.Builder processor : processors) { - this.processors.add(processor.build()); - } - } - - public Pipeline build() { return new Pipeline(id, description, Collections.unmodifiableList(processors)); } + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java index 6efb3ce06f4..9916e24a587 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java @@ -39,40 +39,24 @@ public interface Processor { void execute(Data data); /** - * A builder to construct a processor to be used in a pipeline. + * A factory that knows how to construct a processor based on a map of maps. */ - interface Builder { + interface Factory extends Closeable { /** - * A general way to set processor related settings based on the config map. + * Creates a processor based on the specified map of maps config */ - void fromMap(Map config); + Processor create(Map config) throws IOException; /** - * Builds the processor based on previous set settings. */ - Processor build() throws IOException; - - /** - * A factory that creates a processor builder when processor instances for pipelines are being created. - */ - interface Factory extends Closeable { - - /** - * Creates the builder. - */ - Builder create(); - - /** - */ - default void setConfigDirectory(Path configDirectory) { - } - - @Override - default void close() throws IOException { - } + default void setConfigDirectory(Path configDirectory) { } + @Override + default void close() throws IOException { + } + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index 103158d4696..482c71c9119 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -145,47 +145,23 @@ public final class GeoIpProcessor implements Processor { return geoData; } - public static class Builder implements Processor.Builder { + public static class Factory implements Processor.Factory { - private final Path geoIpConfigDirectory; - private final DatabaseReaderService databaseReaderService; + private Path geoIpConfigDirectory; + private final DatabaseReaderService databaseReaderService = new DatabaseReaderService(); - private String ipField; - private String databaseFile = "GeoLite2-City.mmdb"; - private String targetField = "geoip"; - - public Builder(Path geoIpConfigDirectory, DatabaseReaderService databaseReaderService) { - this.geoIpConfigDirectory = geoIpConfigDirectory; - this.databaseReaderService = databaseReaderService; - } - - public void setIpField(String ipField) { - this.ipField = ipField; - } - - public void setDatabaseFile(String dbPath) { - this.databaseFile = dbPath; - } - - public void setTargetField(String targetField) { - this.targetField = targetField; - } - - public void fromMap(Map config) { - this.ipField = (String) config.get("ip_field"); + public Processor create(Map config) throws IOException { + String ipField = (String) config.get("ip_field"); String targetField = (String) config.get("target_field"); - if (targetField != null) { - this.targetField = targetField; + if (targetField == null) { + targetField = "geoip"; } String databaseFile = (String) config.get("database_file"); - if (databaseFile != null) { - this.databaseFile = databaseFile; + if (databaseFile == null) { + databaseFile = "GeoLite2-City.mmdb"; } - } - @Override - public Processor build() throws IOException { Path databasePath = geoIpConfigDirectory.resolve(databaseFile); if (Files.exists(databasePath)) { try (InputStream database = Files.newInputStream(databasePath, StandardOpenOption.READ)) { @@ -197,27 +173,15 @@ public final class GeoIpProcessor implements Processor { } } - public static class Factory implements Processor.Builder.Factory { - - private Path geoIpConfigDirectory; - private final DatabaseReaderService databaseReaderService = new DatabaseReaderService(); - - @Override - public Processor.Builder create() { - return new Builder(geoIpConfigDirectory, databaseReaderService); - } - - @Override - public void setConfigDirectory(Path configDirectory) { - geoIpConfigDirectory = configDirectory.resolve("ingest").resolve("geoip"); - } - - @Override - public void close() throws IOException { - databaseReaderService.close(); - } + @Override + public void setConfigDirectory(Path configDirectory) { + geoIpConfigDirectory = configDirectory.resolve("ingest").resolve("geoip"); } + @Override + public void close() throws IOException { + databaseReaderService.close(); + } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index 87d6e9b4a11..9c1dcb44da6 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -36,12 +36,10 @@ public final class GrokProcessor implements Processor { public static final String TYPE = "grok"; private final String matchField; - private final String matchPattern; private final Grok grok; - public GrokProcessor(Grok grok, String matchField, String matchPattern) throws IOException { + public GrokProcessor(Grok grok, String matchField) throws IOException { this.matchField = matchField; - this.matchPattern = matchPattern; this.grok = grok; } @@ -57,31 +55,12 @@ public final class GrokProcessor implements Processor { } } - public static class Builder implements Processor.Builder { - + public static class Factory implements Processor.Factory { private Path grokConfigDirectory; - private String matchField; - private String matchPattern; - public Builder(Path grokConfigDirectory) { - this.grokConfigDirectory = grokConfigDirectory; - } - - public void setMatchField(String matchField) { - this.matchField = matchField; - } - - public void setMatchPattern(String matchPattern) { - this.matchPattern = matchPattern; - } - - public void fromMap(Map config) { - this.matchField = (String) config.get("field"); - this.matchPattern = (String) config.get("pattern"); - } - - @Override - public Processor build() throws IOException { + public Processor create(Map config) throws IOException { + String matchField = (String) config.get("field"); + String matchPattern = (String) config.get("pattern"); Map patternBank = new HashMap<>(); Path patternsDirectory = grokConfigDirectory.resolve("patterns"); try (DirectoryStream stream = Files.newDirectoryStream(patternsDirectory)) { @@ -93,22 +72,13 @@ public final class GrokProcessor implements Processor { } Grok grok = new Grok(patternBank, matchPattern); - return new GrokProcessor(grok, matchField, matchPattern); + return new GrokProcessor(grok, matchField); } - public static class Factory implements Processor.Builder.Factory { - private Path grokConfigDirectory; - - @Override - public Processor.Builder create() { - return new Builder(grokConfigDirectory); - } - - @Override - public void setConfigDirectory(Path configDirectory) { - this.grokConfigDirectory = configDirectory.resolve("ingest").resolve("grok"); - } + @Override + public void setConfigDirectory(Path configDirectory) { + this.grokConfigDirectory = configDirectory.resolve("ingest").resolve("grok"); } - } + } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java index 2892f0e6250..94d8a3bdc0f 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java @@ -52,50 +52,16 @@ public final class SimpleProcessor implements Processor { } } - public static class Builder implements Processor.Builder { + public static class Factory implements Processor.Factory { - private String path; - private String expectedValue; - private String addField; - private String addFieldValue; - - public void setPath(String path) { - this.path = path; - } - - public void setExpectedValue(String value) { - this.expectedValue = value; - } - - public void setAddField(String addField) { - this.addField = addField; - } - - public void setAddFieldValue(String addFieldValue) { - this.addFieldValue = addFieldValue; - } - - public void fromMap(Map config) { - this.path = (String) config.get("path"); - this.expectedValue = (String) config.get("expected_value"); - this.addField = (String) config.get("add_field"); - this.addFieldValue = (String) config.get("add_field_value"); - } - - @Override - public Processor build() { + public Processor create(Map config) { + String path = (String) config.get("path"); + String expectedValue = (String) config.get("expected_value"); + String addField = (String) config.get("add_field"); + String addFieldValue = (String) config.get("add_field_value"); return new SimpleProcessor(path, expectedValue, addField, addFieldValue); } - public static class Factory implements Processor.Builder.Factory { - - @Override - public Processor.Builder create() { - return new Builder(); - } - - } - } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index f8903cf5aa0..194716d0f58 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -32,7 +32,7 @@ import java.util.Map; public class IngestModule extends AbstractModule { - private final Map> processors = new HashMap<>(); + private final Map processors = new HashMap<>(); @Override protected void configure() { @@ -41,18 +41,21 @@ public class IngestModule extends AbstractModule { binder().bind(PipelineStore.class).asEagerSingleton(); binder().bind(PipelineStoreClient.class).asEagerSingleton(); - registerProcessor(SimpleProcessor.TYPE, SimpleProcessor.Builder.Factory.class); - registerProcessor(GeoIpProcessor.TYPE, GeoIpProcessor.Builder.Factory.class); - registerProcessor(GrokProcessor.TYPE, GrokProcessor.Builder.Factory.class); + addProcessor(SimpleProcessor.TYPE, new SimpleProcessor.Factory()); + addProcessor(GeoIpProcessor.TYPE, new GeoIpProcessor.Factory()); + addProcessor(GrokProcessor.TYPE, new GrokProcessor.Factory()); - MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, Processor.Builder.Factory.class); - for (Map.Entry> entry : processors.entrySet()) { - mapBinder.addBinding(entry.getKey()).to(entry.getValue()); + MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, Processor.Factory.class); + for (Map.Entry entry : processors.entrySet()) { + mapBinder.addBinding(entry.getKey()).toInstance(entry.getValue()); } } - public void registerProcessor(String processorType, Class processorFactory) { - processors.put(processorType, processorFactory); + /** + * Adds a processor factory under a specific type name. + */ + public void addProcessor(String type, Processor.Factory factory) { + processors.put(type, factory); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index 70cd45bad8d..38e406919c5 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -48,18 +48,19 @@ public class PipelineStore extends AbstractLifecycleComponent { private final ClusterService clusterService; private final TimeValue pipelineUpdateInterval; private final PipelineStoreClient client; - private final Map processorFactoryRegistry; + private final Pipeline.Factory factory = new Pipeline.Factory(); + private final Map processorFactoryRegistry; private volatile Map pipelines = new HashMap<>(); @Inject - public PipelineStore(Settings settings, ThreadPool threadPool, Environment environment, ClusterService clusterService, PipelineStoreClient client, Map processors) { + public PipelineStore(Settings settings, ThreadPool threadPool, Environment environment, ClusterService clusterService, PipelineStoreClient client, Map processors) { super(settings); this.threadPool = threadPool; this.clusterService = clusterService; this.pipelineUpdateInterval = settings.getAsTime("ingest.pipeline.store.update.interval", TimeValue.timeValueSeconds(1)); this.client = client; - for (Processor.Builder.Factory factory : processors.values()) { + for (Processor.Factory factory : processors.values()) { factory.setConfigDirectory(environment.configFile()); } this.processorFactoryRegistry = Collections.unmodifiableMap(processors); @@ -76,7 +77,7 @@ public class PipelineStore extends AbstractLifecycleComponent { @Override protected void doClose() { - for (Processor.Builder.Factory factory : processorFactoryRegistry.values()) { + for (Processor.Factory factory : processorFactoryRegistry.values()) { try { factory.close(); } catch (IOException e) { @@ -130,9 +131,8 @@ public class PipelineStore extends AbstractLifecycleComponent { } changed++; - Pipeline.Builder builder = new Pipeline.Builder(hit.getId()); - builder.fromMap(hit.sourceAsMap(), processorFactoryRegistry); - newPipelines.put(pipelineId, new PipelineReference(builder.build(), hit.getVersion(), pipelineSource)); + Pipeline pipeline = factory.create(hit.getId(), hit.sourceAsMap(), processorFactoryRegistry); + newPipelines.put(pipelineId, new PipelineReference(pipeline, hit.getVersion(), pipelineSource)); } int removed = 0; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorBuilderTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorFactoryTests.java similarity index 69% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorBuilderTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorFactoryTests.java index 1d026e18df7..f681600db72 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorBuilderTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorFactoryTests.java @@ -30,36 +30,38 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; -public class GeoProcessorBuilderTests extends ESTestCase { +public class GeoProcessorFactoryTests extends ESTestCase { - private Path geoIpConfigDir; + private Path configDir; @Before public void prepareConfigDirectory() throws Exception { - geoIpConfigDir = createTempDir(); + this.configDir = createTempDir(); + Path geoIpConfigDir = configDir.resolve("ingest").resolve("geoip"); + Files.createDirectories(geoIpConfigDir); Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-City.mmdb")), geoIpConfigDir.resolve("GeoLite2-City.mmdb")); Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-Country.mmdb")), geoIpConfigDir.resolve("GeoLite2-Country.mmdb")); } public void testBuild_defaults() throws Exception { - GeoIpProcessor.Builder builder = new GeoIpProcessor.Builder(geoIpConfigDir, new DatabaseReaderService()); - builder.fromMap(Collections.emptyMap()); - GeoIpProcessor processor = (GeoIpProcessor) builder.build(); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); + factory.setConfigDirectory(configDir); + GeoIpProcessor processor = (GeoIpProcessor) factory.create(Collections.emptyMap()); assertThat(processor.dbReader.getMetadata().getDatabaseType(), equalTo("GeoLite2-City")); } public void testBuild_dbFile() throws Exception { - GeoIpProcessor.Builder builder = new GeoIpProcessor.Builder(geoIpConfigDir, new DatabaseReaderService()); - builder.fromMap(Collections.singletonMap("database_file", "GeoLite2-Country.mmdb")); - GeoIpProcessor processor = (GeoIpProcessor) builder.build(); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); + factory.setConfigDirectory(configDir); + GeoIpProcessor processor = (GeoIpProcessor) factory.create(Collections.singletonMap("database_file", "GeoLite2-Country.mmdb")); assertThat(processor.dbReader.getMetadata().getDatabaseType(), equalTo("GeoLite2-Country")); } public void testBuild_nonExistingDbFile() throws Exception { - GeoIpProcessor.Builder builder = new GeoIpProcessor.Builder(geoIpConfigDir, new DatabaseReaderService()); - builder.fromMap(Collections.singletonMap("database_file", "does-not-exist.mmdb")); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); + factory.setConfigDirectory(configDir); try { - builder.build(); + factory.create(Collections.singletonMap("database_file", "does-not-exist.mmdb")); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), startsWith("database file [does-not-exist.mmdb] doesn't exist in")); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index 7dfddb3e1cc..f970f24b237 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -28,8 +28,8 @@ import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; +import java.util.Arrays; import java.util.Collections; -import java.util.Map; import static org.mockito.Matchers.any; import static org.mockito.Mockito.*; @@ -67,20 +67,8 @@ public class PipelineExecutionServiceTests extends ESTestCase { } public void testExecute_success() throws Exception { - Pipeline.Builder builder = new Pipeline.Builder("_id"); Processor processor = mock(Processor.class); - builder.addProcessors(new Processor.Builder() { - @Override - public void fromMap(Map config) { - } - - @Override - public Processor build() { - return processor; - } - }); - - when(store.get("_id")).thenReturn(builder.build()); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(processor))); Data data = new Data("_index", "_type", "_id", Collections.emptyMap()); PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); @@ -96,20 +84,8 @@ public class PipelineExecutionServiceTests extends ESTestCase { } public void testExecute_failure() throws Exception { - Pipeline.Builder builder = new Pipeline.Builder("_id"); Processor processor = mock(Processor.class); - builder.addProcessors(new Processor.Builder() { - @Override - public void fromMap(Map config) { - } - - @Override - public Processor build() { - return processor; - } - }); - - when(store.get("_id")).thenReturn(builder.build()); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(processor))); Data data = new Data("_index", "_type", "_id", Collections.emptyMap()); doThrow(new RuntimeException()).when(processor).execute(data); PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java index fc810fb7247..46271a1c4bb 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java @@ -57,7 +57,7 @@ public class PipelineStoreTests extends ESTestCase { ClusterService clusterService = mock(ClusterService.class); client = mock(PipelineStoreClient.class); Environment environment = mock(Environment.class); - store = new PipelineStore(Settings.EMPTY, threadPool, environment, clusterService, client, Collections.singletonMap(SimpleProcessor.TYPE, new SimpleProcessor.Builder.Factory())); + store = new PipelineStore(Settings.EMPTY, threadPool, environment, clusterService, client, Collections.singletonMap(SimpleProcessor.TYPE, new SimpleProcessor.Factory())); store.start(); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index f338992d7c0..2f03831c001 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -39,6 +39,8 @@ import org.junit.Before; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import java.util.Arrays; + import static org.hamcrest.Matchers.equalTo; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; @@ -162,14 +164,7 @@ public class IngestActionFilterTests extends ESTestCase { .build() ); PipelineStore store = mock(PipelineStore.class); - Pipeline.Builder pipelineBuilder = new Pipeline.Builder("_id"); - SimpleProcessor.Builder processorBuilder = new SimpleProcessor.Builder(); - processorBuilder.setPath("field1"); - processorBuilder.setExpectedValue("value1"); - processorBuilder.setAddField("field2"); - processorBuilder.setAddFieldValue("value2"); - pipelineBuilder.addProcessors(processorBuilder); - when(store.get("_id")).thenReturn(pipelineBuilder.build()); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(new SimpleProcessor("field1", "value1", "field2", "value2")))); executionService = new PipelineExecutionService(store, threadPool); filter = new IngestActionFilter(Settings.EMPTY, executionService); From b45815da362fe4e2d15513df77dc821ebf337713 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 4 Nov 2015 14:56:03 +0100 Subject: [PATCH 027/347] adapt to changes upstream --- .../ingest/processor/date/DateProcessor.java | 59 +++---------------- .../plugin/ingest/IngestModule.java | 5 +- 2 files changed, 10 insertions(+), 54 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java index 4d4beed1dda..8538ee62f9d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.processor.Processor; import org.joda.time.DateTimeZone; -import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; @@ -86,60 +85,18 @@ public final class DateProcessor implements Processor { } } - public static class Builder implements Processor.Builder { - - private String timezone; - private String locale; - private String matchField; - private List matchFormats; - private String targetField; - - public Builder() { - matchFormats = new ArrayList(); - } - - public void setTimezone(String timezone) { - this.timezone = timezone; - } - - public void setLocale(String locale) { - this.locale = locale; - } - - public void setMatchField(String matchField) { - this.matchField = matchField; - } - - public void addMatchFormat(String matchFormat) { - matchFormats.add(matchFormat); - } - - public void setTargetField(String targetField) { - this.targetField = targetField; - } + public static class Factory implements Processor.Factory { @SuppressWarnings("unchecked") - public void fromMap(Map config) { - this.timezone = (String) config.get("timezone"); - this.locale = (String) config.get("locale"); - this.matchField = (String) config.get("match_field"); - this.matchFormats = (List) config.get("match_formats"); - this.targetField = (String) config.get("target_field"); - } - - @Override - public Processor build() { + public Processor create(Map config) { + //TODO handle default values + String timezone = (String) config.get("timezone"); + String locale = (String) config.get("locale"); + String matchField = (String) config.get("match_field"); + List matchFormats = (List) config.get("match_formats"); + String targetField = (String) config.get("target_field"); return new DateProcessor(timezone, locale, matchField, matchFormats, targetField); } - - public static class Factory implements Processor.Builder.Factory { - - @Override - public Processor.Builder create() { - return new Builder(); - } - } - } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index ced3e930f1e..d7a52d0315c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -22,10 +22,10 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.processor.date.DateProcessor; import org.elasticsearch.ingest.processor.geoip.GeoIpProcessor; import org.elasticsearch.ingest.processor.grok.GrokProcessor; import org.elasticsearch.ingest.processor.simple.SimpleProcessor; -import org.elasticsearch.ingest.processor.date.DateProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; import java.util.HashMap; @@ -45,8 +45,7 @@ public class IngestModule extends AbstractModule { addProcessor(SimpleProcessor.TYPE, new SimpleProcessor.Factory()); addProcessor(GeoIpProcessor.TYPE, new GeoIpProcessor.Factory()); addProcessor(GrokProcessor.TYPE, new GrokProcessor.Factory()); - - //TODO addProcessor(DateProcessor.TYPE, DateProcessor.Builder.Factory.class); + addProcessor(DateProcessor.TYPE, new DateProcessor.Factory()); MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, Processor.Factory.class); for (Map.Entry entry : processors.entrySet()) { From 92452ff99aa2b66093576b452c2f9d017c012442 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 5 Nov 2015 14:35:49 +0700 Subject: [PATCH 028/347] rename the `ingest` parameter to `pipeline_id` param, because it is more descriptive what the parameter should hold. --- .../elasticsearch/plugin/ingest/IngestPlugin.java | 6 +++--- .../plugin/ingest/rest/IngestRestFilter.java | 6 +++--- .../ingest/transport/IngestActionFilter.java | 8 ++++---- .../org/elasticsearch/ingest/IngestClientIT.java | 4 ++-- .../ingest/transport/IngestActionFilterTests.java | 14 +++++++------- .../resources/rest-api-spec/api/ingest.bulk.json | 2 +- .../resources/rest-api-spec/api/ingest.index.json | 2 +- .../test/ingest/30_simple_processor.yaml | 4 ++-- .../rest-api-spec/test/ingest/40_grok.yaml | 2 +- .../test/ingest/50_geoip_processor.yaml | 4 ++-- 10 files changed, 26 insertions(+), 26 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index cb5220073da..c5efe74db26 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -50,9 +50,9 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; public class IngestPlugin extends Plugin { - public static final String INGEST_PARAM_CONTEXT_KEY = "__ingest__"; - public static final String INGEST_PARAM = "ingest"; - public static final String INGEST_ALREADY_PROCESSED = "ingest_already_processed"; + public static final String PIPELINE_ID_PARAM_CONTEXT_KEY = "__pipeline_id__"; + public static final String PIPELINE_ID_PARAM = "pipeline_id"; + public static final String PIPELINE_ALREADY_PROCESSED = "ingest_already_processed"; public static final String NAME = "ingest"; private final Settings nodeSettings; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java index 8b9bbb089e4..fe7af03b0ae 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.rest.*; import static org.elasticsearch.plugin.ingest.IngestPlugin.*; -import static org.elasticsearch.plugin.ingest.IngestPlugin.INGEST_PARAM_CONTEXT_KEY; +import static org.elasticsearch.plugin.ingest.IngestPlugin.PIPELINE_ID_PARAM_CONTEXT_KEY; public class IngestRestFilter extends RestFilter { @@ -34,8 +34,8 @@ public class IngestRestFilter extends RestFilter { @Override public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { - if (request.hasParam(INGEST_PARAM)) { - request.putInContext(INGEST_PARAM_CONTEXT_KEY, request.param(INGEST_PARAM)); + if (request.hasParam(PIPELINE_ID_PARAM)) { + request.putInContext(PIPELINE_ID_PARAM_CONTEXT_KEY, request.param(PIPELINE_ID_PARAM)); } filterChain.continueProcessing(request, channel); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index aec9c4554c7..3483544a8ce 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -48,9 +48,9 @@ public class IngestActionFilter extends AbstractComponent implements ActionFilte @Override public void apply(String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { - String pipelineId = request.getFromContext(IngestPlugin.INGEST_PARAM_CONTEXT_KEY); + String pipelineId = request.getFromContext(IngestPlugin.PIPELINE_ID_PARAM_CONTEXT_KEY); if (pipelineId == null) { - pipelineId = request.getHeader(IngestPlugin.INGEST_PARAM); + pipelineId = request.getHeader(IngestPlugin.PIPELINE_ID_PARAM); if (pipelineId == null) { chain.proceed(action, request, listener); return; @@ -76,7 +76,7 @@ public class IngestActionFilter extends AbstractComponent implements ActionFilte // The IndexRequest has the same type on the node that receives the request and the node that // processes the primary action. This could lead to a pipeline being executed twice for the same // index request, hence this check - if (indexRequest.hasHeader(IngestPlugin.INGEST_ALREADY_PROCESSED)) { + if (indexRequest.hasHeader(IngestPlugin.PIPELINE_ALREADY_PROCESSED)) { chain.proceed(action, indexRequest, listener); return; } @@ -89,7 +89,7 @@ public class IngestActionFilter extends AbstractComponent implements ActionFilte if (data.isModified()) { indexRequest.source(data.getDocument()); } - indexRequest.putHeader(IngestPlugin.INGEST_ALREADY_PROCESSED, true); + indexRequest.putHeader(IngestPlugin.PIPELINE_ALREADY_PROCESSED, true); chain.proceed(action, indexRequest, listener); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index b1c982bef14..fe860b742d6 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -91,7 +91,7 @@ public class IngestClientIT extends ESIntegTestCase { assertAcked(putMappingResponse); client().prepareIndex("test", "type", "1").setSource("field1", "123.42 400 ") - .putHeader("ingest", "_id") + .putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id") .get(); assertBusy(new Runnable() { @@ -107,7 +107,7 @@ public class IngestClientIT extends ESIntegTestCase { client().prepareBulk().add( client().prepareIndex("test", "type", "2").setSource("field1", "123.42 400 ") - ).putHeader("ingest", "_id").get(); + ).putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id").get(); assertBusy(new Runnable() { @Override public void run() { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index 2f03831c001..a4c71a6a729 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -71,7 +71,7 @@ public class IngestActionFilterTests extends ESTestCase { public void testApplyIngestIdViaRequestParam() throws Exception { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(IngestPlugin.INGEST_PARAM, "_id"); + indexRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -84,7 +84,7 @@ public class IngestActionFilterTests extends ESTestCase { public void testApplyIngestIdViaContext() throws Exception { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putInContext(IngestPlugin.INGEST_PARAM_CONTEXT_KEY, "_id"); + indexRequest.putInContext(IngestPlugin.PIPELINE_ID_PARAM_CONTEXT_KEY, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -97,8 +97,8 @@ public class IngestActionFilterTests extends ESTestCase { public void testApplyAlreadyProcessed() throws Exception { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(IngestPlugin.INGEST_PARAM, "_id"); - indexRequest.putHeader(IngestPlugin.INGEST_ALREADY_PROCESSED, true); + indexRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); + indexRequest.putHeader(IngestPlugin.PIPELINE_ALREADY_PROCESSED, true); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -111,7 +111,7 @@ public class IngestActionFilterTests extends ESTestCase { public void testApply_executed() throws Exception { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(IngestPlugin.INGEST_PARAM, "_id"); + indexRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -135,7 +135,7 @@ public class IngestActionFilterTests extends ESTestCase { public void testApply_failed() throws Exception { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(IngestPlugin.INGEST_PARAM, "_id"); + indexRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -169,7 +169,7 @@ public class IngestActionFilterTests extends ESTestCase { filter = new IngestActionFilter(Settings.EMPTY, executionService); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.putHeader(IngestPlugin.INGEST_PARAM, "_id"); + bulkRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); int numRequest = scaledRandomIntBetween(8, 64); for (int i = 0; i < numRequest; i++) { if (rarely()) { diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json index 5a4c7a856b9..5595c2007ff 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json @@ -41,7 +41,7 @@ "type": "list", "description" : "Default comma-separated list of fields to return in the response for updates" }, - "ingest" : { + "pipeline_id" : { "type" : "string", "description" : "The pipeline id to preprocess incoming documents with" } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json index 23d3337bf60..02dc30b2f3b 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json @@ -66,7 +66,7 @@ "options" : ["internal", "external", "external_gte", "force"], "description" : "Specific version type" }, - "ingest" : { + "pipeline_id" : { "type" : "string", "description" : "The pipeline id to preprocess incoming documents with" } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_simple_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_simple_processor.yaml index cd7e45ca3ae..5fa10e10122 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_simple_processor.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_simple_processor.yaml @@ -36,7 +36,7 @@ index: test type: test id: 1 - ingest: "my_pipeline" + pipeline_id: "my_pipeline" body: {field1: "_value"} - do: @@ -49,7 +49,7 @@ - do: ingest.bulk: - ingest: "my_pipeline" + pipeline_id: "my_pipeline" body: - '{ "index": { "_index": "test", "_type": "test", "_id": "2" } }' - '{ "field1": "_value" }' diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_grok.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_grok.yaml index a119c59c524..d59a3b53ff5 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_grok.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_grok.yaml @@ -34,7 +34,7 @@ index: test type: test id: 1 - ingest: "my_pipeline" + pipeline_id: "my_pipeline" body: {field1: "123.42 400 "} - do: diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_geoip_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_geoip_processor.yaml index 7aa0a1c8e2e..05912572562 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_geoip_processor.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_geoip_processor.yaml @@ -33,7 +33,7 @@ index: test type: test id: 1 - ingest: "my_pipeline" + pipeline_id: "my_pipeline" body: {field1: "128.101.101.101"} - do: @@ -90,7 +90,7 @@ index: test type: test id: 1 - ingest: "my_pipeline" + pipeline_id: "my_pipeline" body: {field1: "128.101.101.101"} - do: From e7f0f0ed4e8765b276038404e60bcad946becdd7 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 5 Nov 2015 16:30:11 +0700 Subject: [PATCH 029/347] Enforce strict pipeline configuration Closes #14552 --- .../org/elasticsearch/ingest/Pipeline.java | 6 +- .../ingest/processor/ConfigurationUtils.java | 51 ++++++++++++ .../processor/geoip/GeoIpProcessor.java | 35 ++++---- .../ingest/processor/grok/GrokProcessor.java | 19 ++++- .../processor/simple/SimpleProcessor.java | 9 ++- .../plugin/ingest/PipelineStore.java | 6 +- .../put/PutPipelineTransportAction.java | 17 +++- .../ingest/PipelineFactoryTests.java | 80 +++++++++++++++++++ .../geoip/GeoProcessorFactoryTests.java | 38 +++++++-- .../grok/GrokProcessorFactoryTests.java | 57 +++++++++++++ .../rest-api-spec/test/ingest/20_crud.yaml | 37 +++++++++ 11 files changed, 324 insertions(+), 31 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index f318de721f6..a719c4ec727 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -82,7 +82,11 @@ public final class Pipeline { for (Map.Entry> entry : processor.entrySet()) { Processor.Factory factory = processorRegistry.get(entry.getKey()); if (factory != null) { - processors.add(factory.create(entry.getValue())); + Map processorConfig = entry.getValue(); + processors.add(factory.create(processorConfig)); + if (processorConfig.isEmpty() == false) { + throw new IllegalArgumentException("processor [" + entry.getKey() + "] doesn't support one or more provided configuration parameters [" + Arrays.toString(processorConfig.keySet().toArray()) + "]"); + } } else { throw new IllegalArgumentException("No processor type exist with name [" + entry.getKey() + "]"); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java new file mode 100644 index 00000000000..7e4de927f74 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor; + +import java.util.Map; + +public final class ConfigurationUtils { + + private ConfigurationUtils() { + } + + /** + * Returns and removes the specified property from the specified configuration map. + * + * If the property value isn't of type string a {@link IllegalArgumentException} is thrown. + * If the property is missing and no default value has been specified a {@link IllegalArgumentException} is thrown + */ + public static String readStringProperty(Map configuration, String propertyName, String defaultValue) { + Object value = configuration.remove(propertyName); + if (value == null && defaultValue != null) { + return defaultValue; + } else if (value == null) { + throw new IllegalArgumentException("required property [" + propertyName + "] is missing"); + } + + if (value instanceof String) { + return (String) value; + } else { + throw new IllegalArgumentException("property [" + propertyName + "] isn't a string, but of type [" + value.getClass() + "]"); + } + } + + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index 482c71c9119..77d9833d0ab 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -27,6 +27,7 @@ import com.maxmind.geoip2.record.*; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; import java.io.InputStream; @@ -41,18 +42,19 @@ import java.security.PrivilegedAction; import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.ingest.processor.ConfigurationUtils.readStringProperty; + public final class GeoIpProcessor implements Processor { public static final String TYPE = "geoip"; private final String ipField; private final String targetField; - // pck-protected visibility for tests: - final DatabaseReader dbReader; + private final DatabaseReader dbReader; GeoIpProcessor(String ipField, DatabaseReader dbReader, String targetField) throws IOException { this.ipField = ipField; - this.targetField = targetField == null ? "geoip" : targetField; + this.targetField = targetField; this.dbReader = dbReader; } @@ -80,6 +82,18 @@ public final class GeoIpProcessor implements Processor { data.addField(targetField, geoData); } + String getIpField() { + return ipField; + } + + String getTargetField() { + return targetField; + } + + DatabaseReader getDbReader() { + return dbReader; + } + private Map retrieveCityGeoData(InetAddress ipAddress) { SecurityManager sm = System.getSecurityManager(); if (sm != null) { @@ -151,19 +165,12 @@ public final class GeoIpProcessor implements Processor { private final DatabaseReaderService databaseReaderService = new DatabaseReaderService(); public Processor create(Map config) throws IOException { - String ipField = (String) config.get("ip_field"); - - String targetField = (String) config.get("target_field"); - if (targetField == null) { - targetField = "geoip"; - } - String databaseFile = (String) config.get("database_file"); - if (databaseFile == null) { - databaseFile = "GeoLite2-City.mmdb"; - } + String ipField = readStringProperty(config, "ip_field", null); + String targetField = readStringProperty(config, "target_field", "geoip"); + String databaseFile = readStringProperty(config, "database_file", "GeoLite2-City.mmdb"); Path databasePath = geoIpConfigDirectory.resolve(databaseFile); - if (Files.exists(databasePath)) { + if (Files.exists(databasePath) && Files.isRegularFile(databasePath)) { try (InputStream database = Files.newInputStream(databasePath, StandardOpenOption.READ)) { DatabaseReader databaseReader = databaseReaderService.getOrCreateDatabaseReader(databaseFile, database); return new GeoIpProcessor(ipField, databaseReader, targetField); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index 9c1dcb44da6..7d7af7ecaa4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest.processor.grok; import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; import java.io.IOException; @@ -55,18 +56,28 @@ public final class GrokProcessor implements Processor { } } + String getMatchField() { + return matchField; + } + + Grok getGrok() { + return grok; + } + public static class Factory implements Processor.Factory { private Path grokConfigDirectory; public Processor create(Map config) throws IOException { - String matchField = (String) config.get("field"); - String matchPattern = (String) config.get("pattern"); + String matchField = ConfigurationUtils.readStringProperty(config, "field", null); + String matchPattern = ConfigurationUtils.readStringProperty(config, "pattern", null); Map patternBank = new HashMap<>(); Path patternsDirectory = grokConfigDirectory.resolve("patterns"); try (DirectoryStream stream = Files.newDirectoryStream(patternsDirectory)) { for (Path patternFilePath : stream) { - try(InputStream is = Files.newInputStream(patternFilePath, StandardOpenOption.READ)) { - PatternUtils.loadBankFromStream(patternBank, is); + if (Files.isRegularFile(patternFilePath)) { + try(InputStream is = Files.newInputStream(patternFilePath, StandardOpenOption.READ)) { + PatternUtils.loadBankFromStream(patternBank, is); + } } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java index 94d8a3bdc0f..4ae28989cd5 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest.processor.simple; import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; import java.io.IOException; @@ -55,10 +56,10 @@ public final class SimpleProcessor implements Processor { public static class Factory implements Processor.Factory { public Processor create(Map config) { - String path = (String) config.get("path"); - String expectedValue = (String) config.get("expected_value"); - String addField = (String) config.get("add_field"); - String addFieldValue = (String) config.get("add_field_value"); + String path = ConfigurationUtils.readStringProperty(config, "path", null); + String expectedValue = ConfigurationUtils.readStringProperty(config, "expected_value", null); + String addField = ConfigurationUtils.readStringProperty(config, "add_field", null); + String addFieldValue = ConfigurationUtils.readStringProperty(config, "add_field_value", null); return new SimpleProcessor(path, expectedValue, addField, addFieldValue); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index 38e406919c5..8fc0e4d2d2c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -114,6 +114,10 @@ public class PipelineStore extends AbstractLifecycleComponent { return result; } + public Pipeline constructPipeline(String id, Map config) throws IOException { + return factory.create(id, config, processorFactoryRegistry); + } + void updatePipelines() throws IOException { // note: this process isn't fast or smart, but the idea is that there will not be many pipelines, // so for that reason the goal is to keep the update logic simple. @@ -131,7 +135,7 @@ public class PipelineStore extends AbstractLifecycleComponent { } changed++; - Pipeline pipeline = factory.create(hit.getId(), hit.sourceAsMap(), processorFactoryRegistry); + Pipeline pipeline = constructPipeline(hit.getId(), hit.sourceAsMap()); newPipelines.put(pipelineId, new PipelineReference(pipeline, hit.getVersion(), pipelineSource)); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java index fc5b7e7f124..542c0842627 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java @@ -28,24 +28,37 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.function.Supplier; +import java.io.IOException; +import java.util.Map; public class PutPipelineTransportAction extends HandledTransportAction { private final TransportIndexAction indexAction; + private final PipelineStore pipelineStore; @Inject - public PutPipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportIndexAction indexAction) { + public PutPipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportIndexAction indexAction, PipelineStore pipelineStore) { super(settings, PutPipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, PutPipelineRequest::new); this.indexAction = indexAction; + this.pipelineStore = pipelineStore; } @Override protected void doExecute(PutPipelineRequest request, ActionListener listener) { + // validates the pipeline and processor configuration: + Map pipelineConfig = XContentHelper.convertToMap(request.source(), false).v2(); + try { + pipelineStore.constructPipeline(request.id(), pipelineConfig); + } catch (IOException e) { + listener.onFailure(e); + return; + } + IndexRequest indexRequest = new IndexRequest(); indexRequest.index(PipelineStore.INDEX); indexRequest.type(PipelineStore.TYPE); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java new file mode 100644 index 00000000000..4bc63dcbc24 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.processor.simple.SimpleProcessor; +import org.elasticsearch.test.ESTestCase; +import org.junit.Test; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class PipelineFactoryTests extends ESTestCase { + + public void testCreate() throws Exception { + Pipeline.Factory factory = new Pipeline.Factory(); + Map processorRegistry = new HashMap<>(); + processorRegistry.put("simple", new SimpleProcessor.Factory()); + + Map processorConfig = new HashMap<>(); + processorConfig.put("path", "_path"); + processorConfig.put("expected_value", "_expected_value"); + processorConfig.put("add_field", "_add_field"); + processorConfig.put("add_field_value", "_add_field_value"); + Map pipelineConfig = new HashMap<>(); + pipelineConfig.put("description", "_description"); + pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("simple", processorConfig))); + Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); + + assertThat(pipeline.getId(), equalTo("_id")); + assertThat(pipeline.getDescription(), equalTo("_description")); + assertThat(pipeline.getProcessors().size(), equalTo(1)); + assertThat(pipeline.getProcessors().get(0), instanceOf(SimpleProcessor.class)); + } + + public void testCreate_unusedProcessorOptions() throws Exception { + Pipeline.Factory factory = new Pipeline.Factory(); + Map processorRegistry = new HashMap<>(); + processorRegistry.put("simple", new SimpleProcessor.Factory()); + + Map processorConfig = new HashMap<>(); + processorConfig.put("path", "_path"); + processorConfig.put("expected_value", "_expected_value"); + processorConfig.put("add_field", "_add_field"); + processorConfig.put("add_field_value", "_add_field_value"); + processorConfig.put("foo", "bar"); + Map pipelineConfig = new HashMap<>(); + pipelineConfig.put("description", "_description"); + pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("simple", processorConfig))); + + try { + factory.create("_id", pipelineConfig, processorRegistry); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("processor [simple] doesn't support one or more provided configuration parameters [[foo]]")); + } + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorFactoryTests.java index f681600db72..7a0119ce10b 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorFactoryTests.java @@ -29,6 +29,8 @@ import java.io.ByteArrayInputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; public class GeoProcessorFactoryTests extends ESTestCase { @@ -46,22 +48,48 @@ public class GeoProcessorFactoryTests extends ESTestCase { public void testBuild_defaults() throws Exception { GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); factory.setConfigDirectory(configDir); - GeoIpProcessor processor = (GeoIpProcessor) factory.create(Collections.emptyMap()); - assertThat(processor.dbReader.getMetadata().getDatabaseType(), equalTo("GeoLite2-City")); + + Map config = new HashMap<>(); + config.put("ip_field", "_field"); + + GeoIpProcessor processor = (GeoIpProcessor) factory.create(config); + assertThat(processor.getIpField(), equalTo("_field")); + assertThat(processor.getTargetField(), equalTo("geoip")); + assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-City")); + } + + public void testBuild_targetField() throws Exception { + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); + factory.setConfigDirectory(configDir); + Map config = new HashMap<>(); + config.put("ip_field", "_field"); + config.put("target_field", "_field"); + GeoIpProcessor processor = (GeoIpProcessor) factory.create(config); + assertThat(processor.getIpField(), equalTo("_field")); + assertThat(processor.getTargetField(), equalTo("_field")); } public void testBuild_dbFile() throws Exception { GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); factory.setConfigDirectory(configDir); - GeoIpProcessor processor = (GeoIpProcessor) factory.create(Collections.singletonMap("database_file", "GeoLite2-Country.mmdb")); - assertThat(processor.dbReader.getMetadata().getDatabaseType(), equalTo("GeoLite2-Country")); + Map config = new HashMap<>(); + config.put("ip_field", "_field"); + config.put("database_file", "GeoLite2-Country.mmdb"); + GeoIpProcessor processor = (GeoIpProcessor) factory.create(config); + assertThat(processor.getIpField(), equalTo("_field")); + assertThat(processor.getTargetField(), equalTo("geoip")); + assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country")); } public void testBuild_nonExistingDbFile() throws Exception { GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); factory.setConfigDirectory(configDir); + + Map config = new HashMap<>(); + config.put("ip_field", "_field"); + config.put("database_file", "does-not-exist.mmdb"); try { - factory.create(Collections.singletonMap("database_file", "does-not-exist.mmdb")); + factory.create(config); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), startsWith("database file [does-not-exist.mmdb] doesn't exist in")); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java new file mode 100644 index 00000000000..25a8321575d --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.grok; + +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class GrokProcessorFactoryTests extends ESTestCase { + + private Path configDir; + + @Before + public void prepareConfigDirectory() throws Exception { + this.configDir = createTempDir(); + Path grokDir = configDir.resolve("ingest").resolve("grok"); + Path patternsDir = grokDir.resolve("patterns"); + Files.createDirectories(patternsDir); + } + + public void testBuild() throws Exception { + GrokProcessor.Factory factory = new GrokProcessor.Factory(); + factory.setConfigDirectory(configDir); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("pattern", "(?\\w+)"); + GrokProcessor processor = (GrokProcessor) factory.create(config); + assertThat(processor.getMatchField(), equalTo("_field")); + assertThat(processor.getGrok(), notNullValue()); + } + +} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml index 56e12cdd59b..4c982ddb914 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml @@ -55,3 +55,40 @@ catch: missing ingest.get_pipeline: ids: "my_pipeline" + +--- +"Test invalid config": + - do: + cluster.health: + wait_for_status: green + + - do: + catch: param + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "geoip" : { + } + } + ] + } + + - do: + catch: param + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "geoip" : { + "ip_field" : 1234 + } + } + ] + } From 7798c6cd495c7cc760258a1047058b9816eb5736 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 6 Nov 2015 12:05:30 +0100 Subject: [PATCH 030/347] unified date parser tests in a single test class, added more tests for date processor --- plugins/ingest/build.gradle | 1 + .../ingest/processor/date/DateParser.java | 12 +- .../processor/date/DateParserFactory.java | 48 +++++++ .../ingest/processor/date/DateProcessor.java | 77 +++++----- .../processor/date/ISO8601DateParser.java | 11 +- .../processor/date/JodaPatternDateParser.java | 23 --- .../processor/date/TAI64NDateParser.java | 13 +- .../ingest/processor/date/UnixDateParser.java | 9 +- .../processor/date/UnixMsDateParser.java | 7 +- .../processor/date/DateParserTests.java | 75 ++++++++++ .../processor/date/DateProcessorTests.java | 131 ++++++++++++++++++ .../date/ISO8601DateParserTests.java | 40 ------ .../date/JodaPatternDateParserTests.java | 43 ------ .../processor/date/TAI64NDateParserTests.java | 36 ----- .../processor/date/UnixDateParserTests.java | 33 ----- .../processor/date/UnixMsDateParserTests.java | 33 ----- 16 files changed, 312 insertions(+), 280 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParserFactory.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateParserTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/ISO8601DateParserTests.java delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParserTests.java delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/TAI64NDateParserTests.java delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixDateParserTests.java delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixMsDateParserTests.java diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index 4aca00bcb9f..90da4cdfcd7 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -28,6 +28,7 @@ dependencies { // we don't use Maxmind's http service: exclude group: 'com.google.http-client', module: 'google-http-client' } + compile 'joda-time:joda-time:2.8.2' testCompile 'org.elasticsearch:geolite2-databases:20151029' testCompile 'org.elasticsearch:securemock:1.1' } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParser.java index 587bfc40053..c062dbdac37 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParser.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParser.java @@ -21,9 +21,15 @@ package org.elasticsearch.ingest.processor.date; import org.joda.time.DateTime; +/** + * Parser for dates provided as strings. Parses into a joda {@link DateTime} object. + * We use our own joda wrapper as we support some formats that are not supported directly by joda. + * + */ public interface DateParser { - public long parseMillis(String date); - - public DateTime parseDateTime(String date); + /** + * Parser the date provided as a string argument into a joda {@link DateTime} object + */ + DateTime parseDateTime(String date); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParserFactory.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParserFactory.java new file mode 100644 index 00000000000..c0041c4ca56 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParserFactory.java @@ -0,0 +1,48 @@ +/* + * Licensed to ElasticSearch and Shay Banon under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. ElasticSearch licenses this + * file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.joda.time.DateTimeZone; + +import java.util.Locale; + +public class DateParserFactory { + + public static final String ISO8601 = "ISO8601"; + public static final String UNIX = "UNIX"; + public static final String UNIX_MS = "UNIX_MS"; + public static final String TAI64N = "TAI64N"; + + public static DateParser createDateParser(String format, DateTimeZone timezone, Locale locale) { + switch(format) { + case ISO8601: + // TODO(talevy): fallback solution for almost ISO8601 + return new ISO8601DateParser(timezone); + case UNIX: + return new UnixDateParser(timezone); + case UNIX_MS: + return new UnixMsDateParser(timezone); + case TAI64N: + return new TAI64NDateParser(timezone); + default: + return new JodaPatternDateParser(format, timezone, locale); + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java index 8538ee62f9d..d8ea69424c8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java @@ -21,80 +21,75 @@ package org.elasticsearch.ingest.processor.date; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.processor.Processor; +import org.joda.time.DateTime; import org.joda.time.DateTimeZone; +import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.stream.Collectors; public final class DateProcessor implements Processor { public static final String TYPE = "date"; - public static final String DEFAULT_TARGET_FIELD = "@timestamp"; + static final String DEFAULT_TARGET_FIELD = "@timestamp"; private final DateTimeZone timezone; private final Locale locale; private final String matchField; private final String targetField; - private final List matchFormats; - private final List parserList; + private final List dateParsers; - public DateProcessor(String timezone, String locale, String matchField, List matchFormats, String targetField) { - this.timezone = (timezone == null) ? DateTimeZone.UTC : DateTimeZone.forID(timezone); - this.locale = Locale.forLanguageTag(locale); + DateProcessor(DateTimeZone timezone, Locale locale, String matchField, List matchFormats, String targetField) { + this.timezone = timezone; + this.locale = locale; this.matchField = matchField; - this.matchFormats = matchFormats; - this.parserList = matchFormats.stream().map(elt -> getParser(elt)).collect(Collectors.toList()); - this.targetField = (targetField == null) ? DEFAULT_TARGET_FIELD : targetField; + this.targetField = targetField; + this.dateParsers = new ArrayList<>(); + for (String matchFormat : matchFormats) { + dateParsers.add(DateParserFactory.createDateParser(matchFormat, timezone, locale)); + } } @Override public void execute(Data data) { - String value = (String) data.getProperty(matchField); - // TODO(talevy): handle multiple patterns + String value = data.getProperty(matchField); // TODO(talevy): handle custom timestamp fields - String dateAsISO8601 = parserList.get(0).parseDateTime(value).toString(); - data.addField(targetField, dateAsISO8601); - } - private DateParser getParser(String format) { - if ("ISO8601".equals(format)) { - // TODO(talevy): fallback solution for almost ISO8601 - if (timezone == null) { - return new ISO8601DateParser(); - } else { - return new ISO8601DateParser(timezone); - } - } else if ("UNIX".equals(format)) { - return new UnixDateParser(timezone); - } else if ("UNIX_MS".equals(format)) { - return new UnixMsDateParser(timezone); - } else if ("TAI64N".equals(format)) { - return new TAI64NDateParser(timezone); - } else { - if (timezone != null && locale != null) { - return new JodaPatternDateParser(format, timezone, locale); - } else if (timezone != null) { - return new JodaPatternDateParser(format, timezone); - } else if (locale != null) { - return new JodaPatternDateParser(format, locale); - } else { - return new JodaPatternDateParser(format); + DateTime dateTime = null; + Exception lastException = null; + for (DateParser dateParser : dateParsers) { + try { + dateTime = dateParser.parseDateTime(value); + } catch(Exception e) { + //TODO is there a better way other than catching exception? + //try the next parser + lastException = e; } } + + if (dateTime == null) { + throw new IllegalArgumentException("unable to parse date [" + value + "]", lastException); + } + + String dateAsISO8601 = dateTime.toString(); + data.addField(targetField, dateAsISO8601); } public static class Factory implements Processor.Factory { @SuppressWarnings("unchecked") public Processor create(Map config) { - //TODO handle default values - String timezone = (String) config.get("timezone"); - String locale = (String) config.get("locale"); + String timezoneString = (String) config.get("timezone"); + DateTimeZone timezone = (timezoneString == null) ? DateTimeZone.UTC : DateTimeZone.forID(timezoneString); + String localeString = (String) config.get("locale"); + Locale locale = localeString == null ? Locale.ENGLISH : Locale.forLanguageTag(localeString); String matchField = (String) config.get("match_field"); List matchFormats = (List) config.get("match_formats"); String targetField = (String) config.get("target_field"); + if (targetField == null) { + targetField = DEFAULT_TARGET_FIELD; + } return new DateProcessor(timezone, locale, matchField, matchFormats, targetField); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/ISO8601DateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/ISO8601DateParser.java index aaab1340f61..f480daba61c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/ISO8601DateParser.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/ISO8601DateParser.java @@ -29,16 +29,7 @@ public class ISO8601DateParser implements DateParser { private final DateTimeFormatter formatter; public ISO8601DateParser(DateTimeZone timezone) { - formatter = ISODateTimeFormat.dateTimeParser().withZone(timezone); - } - - public ISO8601DateParser() { - formatter = ISODateTimeFormat.dateTimeParser().withOffsetParsed(); - } - - @Override - public long parseMillis(String date) { - return formatter.parseMillis(date); + this.formatter = ISODateTimeFormat.dateTimeParser().withZone(timezone); } @Override diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java index 03afe781cdd..a17c8f1703a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java @@ -30,35 +30,12 @@ public class JodaPatternDateParser implements DateParser { private final DateTimeFormatter formatter; - public JodaPatternDateParser(String format) { - formatter = DateTimeFormat.forPattern(format) - .withDefaultYear((new DateTime(DateTimeZone.UTC)).getYear()) - .withOffsetParsed(); - } - - public JodaPatternDateParser(String format, DateTimeZone timezone) { - formatter = DateTimeFormat.forPattern(format) - .withDefaultYear((new DateTime(timezone)).getYear()) - .withZone(timezone); - } - - public JodaPatternDateParser(String format, Locale locale) { - formatter = DateTimeFormat.forPattern(format) - .withDefaultYear((new DateTime(DateTimeZone.UTC)).getYear()) - .withLocale(locale); - } - public JodaPatternDateParser(String format, DateTimeZone timezone, Locale locale) { formatter = DateTimeFormat.forPattern(format) .withDefaultYear((new DateTime(DateTimeZone.UTC)).getYear()) .withZone(timezone).withLocale(locale); } - @Override - public long parseMillis(String date) { - return formatter.parseMillis(date); - } - @Override public DateTime parseDateTime(String date) { return formatter.parseDateTime(date); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java index f51269b5b48..5e199595a7a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java @@ -30,9 +30,13 @@ public class TAI64NDateParser implements DateParser { } @Override - public long parseMillis(String date) { + public DateTime parseDateTime(String date) { + return new DateTime(parseMillis(date), timezone); + } + + private static long parseMillis(String date) { if (date.startsWith("@")) { - date = date.substring(1); + date = date.substring(1); } long base = Long.parseLong(date.substring(1, 16), 16); // 1356138046000 @@ -40,9 +44,4 @@ public class TAI64NDateParser implements DateParser { return ((base * 1000) - 10000) + (rest/1000000); } - - @Override - public DateTime parseDateTime(String date) { - return new DateTime(parseMillis(date), timezone); - } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java index 1fa9c836a2d..83b4168b6cc 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java @@ -29,13 +29,12 @@ public class UnixDateParser implements DateParser { this.timezone = timezone; } - @Override - public long parseMillis(String date) { - return (long) (Float.parseFloat(date) * 1000); - } - @Override public DateTime parseDateTime(String date) { return new DateTime(parseMillis(date), timezone); } + + private static long parseMillis(String date) { + return (long) (Float.parseFloat(date) * 1000); + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java index 640fb52fe32..8e5e5b167b5 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java @@ -29,13 +29,8 @@ public class UnixMsDateParser implements DateParser { this.timezone = timezone; } - @Override - public long parseMillis(String date) { - return Long.parseLong(date); - } - @Override public DateTime parseDateTime(String date) { - return new DateTime(parseMillis(date), timezone); + return new DateTime(Long.parseLong(date), timezone); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateParserTests.java new file mode 100644 index 00000000000..19c0e8dba8e --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateParserTests.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTimeZone; + +import java.time.Instant; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.util.Locale; + +import static org.hamcrest.core.IsEqual.equalTo; + +public class DateParserTests extends ESTestCase { + + public void testJodaPatternParse() { + JodaPatternDateParser parser = new JodaPatternDateParser("MMM dd HH:mm:ss Z", + DateTimeZone.forOffsetHours(-8), Locale.ENGLISH); + + assertThat(Instant.ofEpochMilli(parser.parseDateTime("Nov 24 01:29:01 -0800").getMillis()) + .atZone(ZoneId.of("GMT-8")) + .format(DateTimeFormatter.ofPattern("MM dd HH:mm:ss", Locale.ENGLISH)), + equalTo("11 24 01:29:01")); + } + + public void testParseUnixMs() { + UnixMsDateParser parser = new UnixMsDateParser(DateTimeZone.UTC); + assertThat(parser.parseDateTime("1000500").getMillis(), equalTo(1000500L)); + } + + public void testUnixParse() { + UnixDateParser parser = new UnixDateParser(DateTimeZone.UTC); + assertThat(parser.parseDateTime("1000.5").getMillis(), equalTo(1000500L)); + } + + public void testParseISO8601() { + ISO8601DateParser parser = new ISO8601DateParser(DateTimeZone.UTC); + assertThat(parser.parseDateTime("2001-01-01T00:00:00-0800").getMillis(), equalTo(978336000000L)); + } + + public void testParseISO8601Failure() { + ISO8601DateParser parser = new ISO8601DateParser(DateTimeZone.UTC); + try { + parser.parseDateTime("2001-01-0:00-0800"); + fail("parse should have failed"); + } catch(IllegalArgumentException e) { + //all good + } + } + + public void testTAI64NParse() { + TAI64NDateParser parser = new TAI64NDateParser(DateTimeZone.forOffsetHours(2)); + String input = "4000000050d506482dbdf024"; + String expected = "2012-12-22T03:00:46.767+02:00"; + assertThat(parser.parseDateTime((randomBoolean() ? "@" : "") + input).toString(), equalTo(expected)); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java new file mode 100644 index 00000000000..f129e437f75 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java @@ -0,0 +1,131 @@ +/* + * Licensed to ElasticSearch and Shay Banon under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. ElasticSearch licenses this + * file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.elasticsearch.ingest.Data; +import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; + +import java.util.*; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo; + +public class DateProcessorTests extends ESTestCase { + + public void testJodaPattern() { + DateProcessor dateProcessor = new DateProcessor(DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, + "date_as_string", Collections.singletonList("yyyy dd MM hh:mm:ss"), "date_as_date"); + Map document = new HashMap<>(); + document.put("date_as_string", "2010 12 06 11:05:15"); + Data data = new Data("index", "type", "id", document); + dateProcessor.execute(data); + assertThat(data.getProperty("date_as_date"), equalTo("2010-06-12T11:05:15.000+02:00")); + } + + public void testJodaPatternMultipleFormats() { + List matchFormats = new ArrayList<>(); + matchFormats.add("yyyy dd MM"); + matchFormats.add("dd/MM/yyyy"); + matchFormats.add("dd-MM-yyyy"); + DateProcessor dateProcessor = new DateProcessor(DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, + "date_as_string", matchFormats, "date_as_date"); + + Map document = new HashMap<>(); + document.put("date_as_string", "2010 12 06"); + Data data = new Data("index", "type", "id", document); + dateProcessor.execute(data); + assertThat(data.getProperty("date_as_date"), equalTo("2010-06-12T00:00:00.000+02:00")); + + document = new HashMap<>(); + document.put("date_as_string", "12/06/2010"); + data = new Data("index", "type", "id", document); + dateProcessor.execute(data); + assertThat(data.getProperty("date_as_date"), equalTo("2010-06-12T00:00:00.000+02:00")); + + document = new HashMap<>(); + document.put("date_as_string", "12-06-2010"); + data = new Data("index", "type", "id", document); + dateProcessor.execute(data); + assertThat(data.getProperty("date_as_date"), equalTo("2010-06-12T00:00:00.000+02:00")); + + document = new HashMap<>(); + document.put("date_as_string", "2010"); + data = new Data("index", "type", "id", document); + try { + dateProcessor.execute(data); + fail("processor should have failed due to not supported date format"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("unable to parse date [2010]")); + } + } + + public void testJodaPatternLocale() { + DateProcessor dateProcessor = new DateProcessor(DateTimeZone.forID("Europe/Amsterdam"), Locale.ITALIAN, + "date_as_string", Collections.singletonList("yyyy dd MMM"), "date_as_date"); + Map document = new HashMap<>(); + document.put("date_as_string", "2010 12 giugno"); + Data data = new Data("index", "type", "id", document); + dateProcessor.execute(data); + assertThat(data.getProperty("date_as_date"), equalTo("2010-06-12T00:00:00.000+02:00")); + } + + public void testJodaPatternDefaultYear() { + DateProcessor dateProcessor = new DateProcessor(DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, + "date_as_string", Collections.singletonList("dd/MM"), "date_as_date"); + Map document = new HashMap<>(); + document.put("date_as_string", "12/06"); + Data data = new Data("index", "type", "id", document); + dateProcessor.execute(data); + assertThat(data.getProperty("date_as_date"), equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00")); + } + + public void testTAI64N() { + DateProcessor dateProcessor = new DateProcessor(DateTimeZone.forOffsetHours(2), randomLocale(random()), + "date_as_string", Collections.singletonList(DateParserFactory.TAI64N), "date_as_date"); + Map document = new HashMap<>(); + String dateAsString = (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024"; + document.put("date_as_string", dateAsString); + Data data = new Data("index", "type", "id", document); + dateProcessor.execute(data); + assertThat(data.getProperty("date_as_date"), equalTo("2012-12-22T03:00:46.767+02:00")); + } + + public void testUnixMs() { + DateProcessor dateProcessor = new DateProcessor(DateTimeZone.UTC, randomLocale(random()), + "date_as_string", Collections.singletonList(DateParserFactory.UNIX_MS), "date_as_date"); + Map document = new HashMap<>(); + document.put("date_as_string", "1000500"); + Data data = new Data("index", "type", "id", document); + dateProcessor.execute(data); + assertThat(data.getProperty("date_as_date"), equalTo("1970-01-01T00:16:40.500Z")); + } + + public void testUnix() { + DateProcessor dateProcessor = new DateProcessor(DateTimeZone.UTC, randomLocale(random()), + "date_as_string", Collections.singletonList(DateParserFactory.UNIX), "date_as_date"); + Map document = new HashMap<>(); + document.put("date_as_string", "1000.5"); + Data data = new Data("index", "type", "id", document); + dateProcessor.execute(data); + assertThat(data.getProperty("date_as_date"), equalTo("1970-01-01T00:16:40.500Z")); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/ISO8601DateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/ISO8601DateParserTests.java deleted file mode 100644 index a726fa43b8e..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/ISO8601DateParserTests.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.date; - -import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTimeZone; -import org.junit.Test; - -import static org.hamcrest.core.IsEqual.equalTo; - -public class ISO8601DateParserTests extends ESTestCase { - - public void testParseUTC() { - ISO8601DateParser parser = new ISO8601DateParser(DateTimeZone.UTC); - assertThat(parser.parseMillis("2001-01-01T00:00:00-0800"), equalTo(978336000000L)); - } - - @Test(expected=IllegalArgumentException.class) - public void testParseFailure() { - ISO8601DateParser parser = new ISO8601DateParser(DateTimeZone.UTC); - parser.parseMillis("2001-01-0:00-0800"); - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParserTests.java deleted file mode 100644 index 268222feb35..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParserTests.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.date; - -import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTimeZone; - -import java.time.Instant; -import java.time.ZoneId; -import java.time.format.DateTimeFormatter; -import java.util.Locale; - -import static org.hamcrest.core.IsEqual.equalTo; - -public class JodaPatternDateParserTests extends ESTestCase { - - public void testParse() { - JodaPatternDateParser parser = new JodaPatternDateParser("MMM dd HH:mm:ss Z", - DateTimeZone.forOffsetHours(-8), Locale.ENGLISH); - - assertThat(Instant.ofEpochMilli(parser.parseMillis("Nov 24 01:29:01 -0800")) - .atZone(ZoneId.of("GMT-8")) - .format(DateTimeFormatter.ofPattern("MM dd HH:mm:ss", Locale.ENGLISH)), - equalTo("11 24 01:29:01")); - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/TAI64NDateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/TAI64NDateParserTests.java deleted file mode 100644 index d0007cfa5d7..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/TAI64NDateParserTests.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.date; - -import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTimeZone; - -import static org.hamcrest.core.IsEqual.equalTo; - -public class TAI64NDateParserTests extends ESTestCase { - - public void testParse() { - TAI64NDateParser parser = new TAI64NDateParser(DateTimeZone.forOffsetHours(2)); - String input = "4000000050d506482dbdf024"; - String expected = "2012-12-22T03:00:46.767+02:00"; - assertThat(parser.parseDateTime("@" + input).toString(), equalTo(expected)); - assertThat(parser.parseDateTime(input).toString(), equalTo(expected)); - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixDateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixDateParserTests.java deleted file mode 100644 index 68636d88811..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixDateParserTests.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.date; - -import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTimeZone; - -import static org.hamcrest.core.IsEqual.equalTo; - -public class UnixDateParserTests extends ESTestCase { - - public void testParse() { - UnixDateParser parser = new UnixDateParser(DateTimeZone.UTC); - assertThat(parser.parseMillis("1000.5"), equalTo(1000500L)); - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixMsDateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixMsDateParserTests.java deleted file mode 100644 index db2fa506e5e..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/UnixMsDateParserTests.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.date; - -import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTimeZone; - -import static org.hamcrest.core.IsEqual.equalTo; - -public class UnixMsDateParserTests extends ESTestCase { - - public void testParse() { - UnixMsDateParser parser = new UnixMsDateParser(DateTimeZone.UTC); - assertThat(parser.parseMillis("1000500"), equalTo(1000500L)); - } -} From 588de6ccffb34dcd4a1952020b809c1ec1fd5bf7 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 6 Nov 2015 12:53:20 +0100 Subject: [PATCH 031/347] Add generic type to Processor.Factory and rename Geo*Tests to GeoIp*Tests --- .../elasticsearch/ingest/processor/Processor.java | 5 +++-- .../ingest/processor/geoip/GeoIpProcessor.java | 9 ++++----- .../ingest/processor/grok/GrokProcessor.java | 4 ++-- .../ingest/processor/simple/SimpleProcessor.java | 5 ++--- ...yTests.java => GeoIpProcessorFactoryTests.java} | 14 +++++++------- ...rocessorTests.java => GeoIpProcessorTests.java} | 2 +- .../processor/grok/GrokProcessorFactoryTests.java | 2 +- 7 files changed, 20 insertions(+), 21 deletions(-) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/{GeoProcessorFactoryTests.java => GeoIpProcessorFactoryTests.java} (91%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/{GeoProcessorTests.java => GeoIpProcessorTests.java} (98%) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java index 9916e24a587..a023b78ade4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java @@ -41,14 +41,15 @@ public interface Processor { /** * A factory that knows how to construct a processor based on a map of maps. */ - interface Factory extends Closeable { + interface Factory

extends Closeable { /** * Creates a processor based on the specified map of maps config */ - Processor create(Map config) throws IOException; + P create(Map config) throws IOException; /** + * Sets the configuration directory when needed to read additional config files */ default void setConfigDirectory(Path configDirectory) { } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index 77d9833d0ab..020b88a23e0 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -27,14 +27,13 @@ import com.maxmind.geoip2.record.*; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.ingest.Data; -import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; +import java.io.IOException; import java.io.InputStream; +import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.file.Files; -import java.io.IOException; -import java.net.InetAddress; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.security.AccessController; @@ -159,12 +158,12 @@ public final class GeoIpProcessor implements Processor { return geoData; } - public static class Factory implements Processor.Factory { + public static class Factory implements Processor.Factory { private Path geoIpConfigDirectory; private final DatabaseReaderService databaseReaderService = new DatabaseReaderService(); - public Processor create(Map config) throws IOException { + public GeoIpProcessor create(Map config) throws IOException { String ipField = readStringProperty(config, "ip_field", null); String targetField = readStringProperty(config, "target_field", "geoip"); String databaseFile = readStringProperty(config, "database_file", "GeoLite2-City.mmdb"); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index 7d7af7ecaa4..9f7a7c78c07 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -64,10 +64,10 @@ public final class GrokProcessor implements Processor { return grok; } - public static class Factory implements Processor.Factory { + public static class Factory implements Processor.Factory { private Path grokConfigDirectory; - public Processor create(Map config) throws IOException { + public GrokProcessor create(Map config) throws IOException { String matchField = ConfigurationUtils.readStringProperty(config, "field", null); String matchPattern = ConfigurationUtils.readStringProperty(config, "pattern", null); Map patternBank = new HashMap<>(); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java index 4ae28989cd5..0bfd07de462 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.io.IOException; import java.util.Map; public final class SimpleProcessor implements Processor { @@ -53,9 +52,9 @@ public final class SimpleProcessor implements Processor { } } - public static class Factory implements Processor.Factory { + public static class Factory implements Processor.Factory { - public Processor create(Map config) { + public SimpleProcessor create(Map config) { String path = ConfigurationUtils.readStringProperty(config, "path", null); String expectedValue = ConfigurationUtils.readStringProperty(config, "expected_value", null); String addField = ConfigurationUtils.readStringProperty(config, "add_field", null); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java similarity index 91% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java index 7a0119ce10b..ad270d18e96 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java @@ -23,16 +23,16 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; import org.junit.Before; -import static org.hamcrest.Matchers.*; - import java.io.ByteArrayInputStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.Collections; import java.util.HashMap; import java.util.Map; -public class GeoProcessorFactoryTests extends ESTestCase { +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.startsWith; + +public class GeoIpProcessorFactoryTests extends ESTestCase { private Path configDir; @@ -52,7 +52,7 @@ public class GeoProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("ip_field", "_field"); - GeoIpProcessor processor = (GeoIpProcessor) factory.create(config); + GeoIpProcessor processor = factory.create(config); assertThat(processor.getIpField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-City")); @@ -64,7 +64,7 @@ public class GeoProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("ip_field", "_field"); config.put("target_field", "_field"); - GeoIpProcessor processor = (GeoIpProcessor) factory.create(config); + GeoIpProcessor processor = factory.create(config); assertThat(processor.getIpField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("_field")); } @@ -75,7 +75,7 @@ public class GeoProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("ip_field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb"); - GeoIpProcessor processor = (GeoIpProcessor) factory.create(config); + GeoIpProcessor processor = factory.create(config); assertThat(processor.getIpField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country")); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java similarity index 98% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java index 43b293501ae..735776ec43d 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java @@ -29,7 +29,7 @@ import java.util.Map; import static org.hamcrest.Matchers.equalTo; -public class GeoProcessorTests extends ESTestCase { +public class GeoIpProcessorTests extends ESTestCase { public void testCity() throws Exception { InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java index 25a8321575d..9291c1bb04a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java @@ -49,7 +49,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("pattern", "(?\\w+)"); - GrokProcessor processor = (GrokProcessor) factory.create(config); + GrokProcessor processor = factory.create(config); assertThat(processor.getMatchField(), equalTo("_field")); assertThat(processor.getGrok(), notNullValue()); } From 682876f7d7978926f87dcb9331a4cf019fe1a653 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 6 Nov 2015 15:44:20 +0100 Subject: [PATCH 032/347] added factory tests --- .../ingest/processor/ConfigurationUtils.java | 56 ++++++- .../ingest/processor/date/DateProcessor.java | 50 ++++-- .../processor/geoip/GeoIpProcessor.java | 2 +- .../ingest/processor/grok/GrokProcessor.java | 4 +- .../processor/simple/SimpleProcessor.java | 6 +- .../date/DateProcessorFactoryTests.java | 142 ++++++++++++++++++ .../geoip/GeoIpProcessorFactoryTests.java | 2 - 7 files changed, 232 insertions(+), 30 deletions(-) create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java index 7e4de927f74..182ff61c421 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import java.util.List; import java.util.Map; public final class ConfigurationUtils { @@ -26,6 +27,26 @@ public final class ConfigurationUtils { private ConfigurationUtils() { } + /** + * Returns and removes the specified optional property from the specified configuration map. + * + * If the property value isn't of type string a {@link IllegalArgumentException} is thrown. + */ + public static String readOptionalStringProperty(Map configuration, String propertyName) { + Object value = configuration.remove(propertyName); + return readString(propertyName, value); + } + + /** + * Returns and removes the specified property from the specified configuration map. + * + * If the property value isn't of type string an {@link IllegalArgumentException} is thrown. + * If the property is missing an {@link IllegalArgumentException} is thrown + */ + public static String readStringProperty(Map configuration, String propertyName) { + return readStringProperty(configuration, propertyName, null); + } + /** * Returns and removes the specified property from the specified configuration map. * @@ -39,13 +60,36 @@ public final class ConfigurationUtils { } else if (value == null) { throw new IllegalArgumentException("required property [" + propertyName + "] is missing"); } - - if (value instanceof String) { - return (String) value; - } else { - throw new IllegalArgumentException("property [" + propertyName + "] isn't a string, but of type [" + value.getClass() + "]"); - } + return readString(propertyName, value); } + private static String readString(String propertyName, Object value) { + if (value == null) { + return null; + } + if (value instanceof String) { + return (String) value; + } + throw new IllegalArgumentException("property [" + propertyName + "] isn't a string, but of type [" + value.getClass().getName() + "]"); + } + /** + * Returns and removes the specified property of type list from the specified configuration map. + * + * If the property value isn't of type list an {@link IllegalArgumentException} is thrown. + * If the property is missing an {@link IllegalArgumentException} is thrown + */ + public static List readStringList(Map configuration, String propertyName) { + Object value = configuration.remove(propertyName); + if (value == null) { + throw new IllegalArgumentException("required property [" + propertyName + "] is missing"); + } + if (value instanceof List) { + @SuppressWarnings("unchecked") + List stringList = (List) value; + return stringList; + } else { + throw new IllegalArgumentException("property [" + propertyName + "] isn't a list, but of type [" + value.getClass().getName() + "]"); + } + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java index d8ea69424c8..4c61cc8ee7a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java @@ -20,9 +20,11 @@ package org.elasticsearch.ingest.processor.date; import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; +import org.joda.time.format.ISODateTimeFormat; import java.util.ArrayList; import java.util.List; @@ -38,6 +40,7 @@ public final class DateProcessor implements Processor { private final Locale locale; private final String matchField; private final String targetField; + private final List matchFormats; private final List dateParsers; DateProcessor(DateTimeZone timezone, Locale locale, String matchField, List matchFormats, String targetField) { @@ -45,6 +48,7 @@ public final class DateProcessor implements Processor { this.locale = locale; this.matchField = matchField; this.targetField = targetField; + this.matchFormats = matchFormats; this.dateParsers = new ArrayList<>(); for (String matchFormat : matchFormats) { dateParsers.add(DateParserFactory.createDateParser(matchFormat, timezone, locale)); @@ -62,8 +66,7 @@ public final class DateProcessor implements Processor { try { dateTime = dateParser.parseDateTime(value); } catch(Exception e) { - //TODO is there a better way other than catching exception? - //try the next parser + //try the next parser and keep track of the last exception lastException = e; } } @@ -72,26 +75,41 @@ public final class DateProcessor implements Processor { throw new IllegalArgumentException("unable to parse date [" + value + "]", lastException); } - String dateAsISO8601 = dateTime.toString(); - data.addField(targetField, dateAsISO8601); + data.addField(targetField, ISODateTimeFormat.dateTime().print(dateTime)); } - public static class Factory implements Processor.Factory { + DateTimeZone getTimezone() { + return timezone; + } + + Locale getLocale() { + return locale; + } + + String getMatchField() { + return matchField; + } + + String getTargetField() { + return targetField; + } + + List getMatchFormats() { + return matchFormats; + } + + public static class Factory implements Processor.Factory { @SuppressWarnings("unchecked") - public Processor create(Map config) { - String timezoneString = (String) config.get("timezone"); - DateTimeZone timezone = (timezoneString == null) ? DateTimeZone.UTC : DateTimeZone.forID(timezoneString); - String localeString = (String) config.get("locale"); + public DateProcessor create(Map config) { + String matchField = ConfigurationUtils.readStringProperty(config, "match_field"); + String targetField = ConfigurationUtils.readStringProperty(config, "target_field", DEFAULT_TARGET_FIELD); + String timezoneString = ConfigurationUtils.readOptionalStringProperty(config, "timezone"); + DateTimeZone timezone = timezoneString == null ? DateTimeZone.UTC : DateTimeZone.forID(timezoneString); + String localeString = ConfigurationUtils.readOptionalStringProperty(config, "locale"); Locale locale = localeString == null ? Locale.ENGLISH : Locale.forLanguageTag(localeString); - String matchField = (String) config.get("match_field"); - List matchFormats = (List) config.get("match_formats"); - String targetField = (String) config.get("target_field"); - if (targetField == null) { - targetField = DEFAULT_TARGET_FIELD; - } + List matchFormats = ConfigurationUtils.readStringList(config, "match_formats"); return new DateProcessor(timezone, locale, matchField, matchFormats, targetField); } } - } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index 020b88a23e0..e0ecd61370e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -164,7 +164,7 @@ public final class GeoIpProcessor implements Processor { private final DatabaseReaderService databaseReaderService = new DatabaseReaderService(); public GeoIpProcessor create(Map config) throws IOException { - String ipField = readStringProperty(config, "ip_field", null); + String ipField = readStringProperty(config, "ip_field"); String targetField = readStringProperty(config, "target_field", "geoip"); String databaseFile = readStringProperty(config, "database_file", "GeoLite2-City.mmdb"); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index 9f7a7c78c07..bdba25c7c78 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -68,8 +68,8 @@ public final class GrokProcessor implements Processor { private Path grokConfigDirectory; public GrokProcessor create(Map config) throws IOException { - String matchField = ConfigurationUtils.readStringProperty(config, "field", null); - String matchPattern = ConfigurationUtils.readStringProperty(config, "pattern", null); + String matchField = ConfigurationUtils.readStringProperty(config, "field"); + String matchPattern = ConfigurationUtils.readStringProperty(config, "pattern"); Map patternBank = new HashMap<>(); Path patternsDirectory = grokConfigDirectory.resolve("patterns"); try (DirectoryStream stream = Files.newDirectoryStream(patternsDirectory)) { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java index 0bfd07de462..d8dfc230a7b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java @@ -55,10 +55,10 @@ public final class SimpleProcessor implements Processor { public static class Factory implements Processor.Factory { public SimpleProcessor create(Map config) { - String path = ConfigurationUtils.readStringProperty(config, "path", null); - String expectedValue = ConfigurationUtils.readStringProperty(config, "expected_value", null); + String path = ConfigurationUtils.readStringProperty(config, "path"); + String expectedValue = ConfigurationUtils.readStringProperty(config, "expected_value"); String addField = ConfigurationUtils.readStringProperty(config, "add_field", null); - String addFieldValue = ConfigurationUtils.readStringProperty(config, "add_field_value", null); + String addFieldValue = ConfigurationUtils.readStringProperty(config, "add_field_value"); return new SimpleProcessor(path, expectedValue, addField, addFieldValue); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java new file mode 100644 index 00000000000..e637bf19012 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java @@ -0,0 +1,142 @@ +/* + * Licensed to ElasticSearch and Shay Banon under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. ElasticSearch licenses this + * file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTimeZone; + +import java.util.*; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class DateProcessorFactoryTests extends ESTestCase { + + public void testBuildDefaults() throws Exception { + DateProcessor.Factory factory = new DateProcessor.Factory(); + Map config = new HashMap<>(); + String sourceField = randomAsciiOfLengthBetween(1, 10); + config.put("match_field", sourceField); + config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); + + DateProcessor processor = factory.create(config); + assertThat(processor.getMatchField(), equalTo(sourceField)); + assertThat(processor.getTargetField(), equalTo(DateProcessor.DEFAULT_TARGET_FIELD)); + assertThat(processor.getMatchFormats(), equalTo(Collections.singletonList("dd/MM/yyyyy"))); + assertThat(processor.getLocale(), equalTo(Locale.ENGLISH)); + assertThat(processor.getTimezone(), equalTo(DateTimeZone.UTC)); + } + + public void testMatchFieldIsMandatory() throws Exception { + DateProcessor.Factory factory = new DateProcessor.Factory(); + Map config = new HashMap<>(); + String targetField = randomAsciiOfLengthBetween(1, 10); + config.put("target_field", targetField); + config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); + + try { + factory.create(config); + fail("processor creation should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("required property [match_field] is missing")); + } + } + + public void testMatchFormatsIsMandatory() throws Exception { + DateProcessor.Factory factory = new DateProcessor.Factory(); + Map config = new HashMap<>(); + String sourceField = randomAsciiOfLengthBetween(1, 10); + String targetField = randomAsciiOfLengthBetween(1, 10); + config.put("match_field", sourceField); + config.put("target_field", targetField); + + try { + factory.create(config); + fail("processor creation should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("required property [match_formats] is missing")); + } + } + + public void testParseLocale() throws Exception { + DateProcessor.Factory factory = new DateProcessor.Factory(); + Map config = new HashMap<>(); + String sourceField = randomAsciiOfLengthBetween(1, 10); + config.put("match_field", sourceField); + config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); + Locale locale = randomLocale(random()); + config.put("locale", locale.toLanguageTag()); + + DateProcessor processor = factory.create(config); + assertThat(processor.getLocale(), equalTo(locale)); + } + + public void testParseTimezone() throws Exception { + DateProcessor.Factory factory = new DateProcessor.Factory(); + Map config = new HashMap<>(); + String sourceField = randomAsciiOfLengthBetween(1, 10); + config.put("match_field", sourceField); + config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); + DateTimeZone timeZone = DateTimeZone.forTimeZone(randomTimeZone(random())); + config.put("timezone", timeZone.getID()); + + DateProcessor processor = factory.create(config); + assertThat(processor.getTimezone(), equalTo(timeZone)); + } + + public void testParseMatchFormats() throws Exception { + DateProcessor.Factory factory = new DateProcessor.Factory(); + Map config = new HashMap<>(); + String sourceField = randomAsciiOfLengthBetween(1, 10); + config.put("match_field", sourceField); + config.put("match_formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy")); + + DateProcessor processor = factory.create(config); + assertThat(processor.getMatchFormats(), equalTo(Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy"))); + } + + public void testParseMatchFormatsFailure() throws Exception { + DateProcessor.Factory factory = new DateProcessor.Factory(); + Map config = new HashMap<>(); + String sourceField = randomAsciiOfLengthBetween(1, 10); + config.put("match_field", sourceField); + config.put("match_formats", "dd/MM/yyyy"); + + try { + factory.create(config); + fail("processor creation should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("property [match_formats] isn't a list, but of type [java.lang.String]")); + } + } + + public void testParseTargetField() throws Exception { + DateProcessor.Factory factory = new DateProcessor.Factory(); + Map config = new HashMap<>(); + String sourceField = randomAsciiOfLengthBetween(1, 10); + String targetField = randomAsciiOfLengthBetween(1, 10); + config.put("match_field", sourceField); + config.put("target_field", targetField); + config.put("match_formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy")); + + DateProcessor processor = factory.create(config); + assertThat(processor.getTargetField(), equalTo(targetField)); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java index ad270d18e96..236e91818fb 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java @@ -94,6 +94,4 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { assertThat(e.getMessage(), startsWith("database file [does-not-exist.mmdb] doesn't exist in")); } } - - } From d318990339e438e25d3318291ea41d7ada96548b Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 6 Nov 2015 16:10:22 +0100 Subject: [PATCH 033/347] added REST test --- .../elasticsearch/ingest/DatePipelineIT.java | 101 ------------------ .../test/ingest/60_date_processor.yaml | 49 +++++++++ 2 files changed, 49 insertions(+), 101 deletions(-) delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/DatePipelineIT.java create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_date_processor.yaml diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DatePipelineIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DatePipelineIT.java deleted file mode 100644 index 370dece2a44..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DatePipelineIT.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest; - -import org.elasticsearch.plugin.ingest.IngestPlugin; -import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; -import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequestBuilder; -import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineResponse; -import org.elasticsearch.plugin.ingest.transport.get.GetPipelineAction; -import org.elasticsearch.plugin.ingest.transport.get.GetPipelineRequestBuilder; -import org.elasticsearch.plugin.ingest.transport.get.GetPipelineResponse; -import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; -import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequestBuilder; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESIntegTestCase; - -import java.util.Collection; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.*; - -public class DatePipelineIT extends ESIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return pluginList(IngestPlugin.class); - } - - @Override - protected Collection> transportClientPlugins() { - return nodePlugins(); - } - - public void testBasics() throws Exception { - new PutPipelineRequestBuilder(client(), PutPipelineAction.INSTANCE) - .setId("_id") - .setSource(jsonBuilder().startObject() - .field("description", "my_pipeline") - .startArray("processors") - .startObject() - .startObject("date") - .field("timezone", "UTC") - .field("locale", "en") - .field("match_field", "my_date_field") - .startArray("match_formats") - .value("yyyy MMM dd HH:mm:ss Z") - .endArray() - .endObject() - .endObject() - .endArray() - .endObject().bytes()) - .get(); - assertBusy(new Runnable() { - @Override - public void run() { - GetPipelineResponse response = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) - .setIds("_id") - .get(); - assertThat(response.isFound(), is(true)); - assertThat(response.pipelines().get("_id"), notNullValue()); - } - }); - - createIndex("test"); - client().prepareIndex("test", "type", "1").setSource("my_date_field", "2015 Nov 24 01:29:01 -0800") - .putHeader("ingest", "_id") - .get(); - - assertBusy(new Runnable() { - @Override - public void run() { - Map doc = client().prepareGet("test", "type", "1") - .get().getSourceAsMap(); - assertThat(doc.get("@timestamp"), equalTo("2015-11-24T09:29:01.000Z")); - } - }); - } - - @Override - protected boolean enableMockModules() { - return false; - } -} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_date_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_date_processor.yaml new file mode 100644 index 00000000000..f2c6d2c0cce --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_date_processor.yaml @@ -0,0 +1,49 @@ +--- +"Test date processor": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "date" : { + "match_field" : "date_source_field", + "target_field" : "date_target_field", + "match_formats" : ["dd/MM/yyyy"], + "timezone" : "Europe/Amsterdam" + } + } + ] + } + - match: { _id: "my_pipeline" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline_id: "my_pipeline" + body: {date_source_field: "12/06/2010"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.date_source_field: "12/06/2010" } + - match: { _source.date_target_field: "2010-06-12T00:00:00.000+02:00" } + From a84ce07a90a706afc3199a8cd4291f14d8144143 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 6 Nov 2015 17:48:44 +0100 Subject: [PATCH 034/347] added docs --- docs/plugins/ingest.asciidoc | 43 ++++++++++++++++++++++++++++++++++-- 1 file changed, 41 insertions(+), 2 deletions(-) diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 43daebc4ece..b3cd98051cf 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -159,7 +159,7 @@ is located at `$ES_HOME/config/ingest/geoip` and holds the shipped databases too [options="header"] |====== | Name | Required | Default | Description -| `ip_field` | yes | - | The field to get the ip address from for the geographical lookip. +| `ip_field` | yes | - | The field to get the ip address from for the geographical lookup. | `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database. | `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest plugin ships with the GeoLite2-City.mmdb and GeoLite2-Country.mmdb files. |====== @@ -171,7 +171,7 @@ and `location`. If the GeoLite2 Country database is used then the following fields will be added under the `target_field`: `ip`, `country_iso_code`, `country_name` and `continent_name`. -An example that uses the default city database and adds the geographical information to the `geoip` field based on the `ip` field`: +An example that uses the default city database and adds the geographical information to the `geoip` field based on the `ip` field: [source,js] -------------------------------------------------- @@ -205,6 +205,45 @@ An example that uses the default country database and add the geographical infor } -------------------------------------------------- +==== Date processor + +The date processor is used for parsing dates from fields, and then using that date or timestamp as the timestamp for that document. +The date processor adds by default the parsed date as a new field called `@timestamp`, configurable by setting the `target_field` +configuration parameter. Multiple date formats are supported as part of the same date processor definition. They will be used +sequentially to attempt parsing the date field, in the same order they were defined as part of the processor definition. + +[[date-options]] +.Date options +[options="header"] +|====== +| Name | Required | Default | Description +| `match_field` | yes | - | The field to get the date from. +| `target_field` | no | @timestamp | The field that will hold the parsed date. +| `match_formats` | yes | - | Array of the expected date formats. Can be a joda pattern or one of the following formats: ISO8601, UNIX, UNIX_MS, TAI64N. +| `timezone` | no | UTC | The timezone to use when parsing the date. +| `locale` | no | ENGLISH | The locale to use when parsing the date, relevant when parsing month names or week days. +|====== + +An example that adds the parsed date to the `timestamp` field based on the `initial_date` field: + +[source,js] +-------------------------------------------------- +{ + "description" : "...", + "processors" : [ + { + "date" : { + "match_field" : "initial_date", + "target_field" : "timestamp", + "match_formats" : ["dd/MM/yyyy hh:mm:ss"], + "timezone" : "Europe/Amsterdam" + } + } + ] +} +-------------------------------------------------- + + === Put pipeline API The put pipeline api adds pipelines and updates existing pipelines in the cluster. From 1dfe6f6dcf6e0c111e89b16b233cd6639963c2d7 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 6 Nov 2015 19:21:30 +0100 Subject: [PATCH 035/347] make sure headers etc. are passed over to inner index and delete requests in put/delete pipeline --- .../transport/delete/DeletePipelineTransportAction.java | 7 +------ .../ingest/transport/put/PutPipelineTransportAction.java | 2 +- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java index b57a3db00c9..c8b60a0252f 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java @@ -26,17 +26,12 @@ import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - public class DeletePipelineTransportAction extends HandledTransportAction { private final TransportDeleteAction deleteAction; @@ -49,7 +44,7 @@ public class DeletePipelineTransportAction extends HandledTransportAction listener) { - DeleteRequest deleteRequest = new DeleteRequest(); + DeleteRequest deleteRequest = new DeleteRequest(request); deleteRequest.index(PipelineStore.INDEX); deleteRequest.type(PipelineStore.TYPE); deleteRequest.id(request.id()); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java index 542c0842627..b6c59ff8ed8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java @@ -59,7 +59,7 @@ public class PutPipelineTransportAction extends HandledTransportAction Date: Fri, 6 Nov 2015 18:09:55 +0700 Subject: [PATCH 036/347] geoip: don't store geoinfo if nothing can be resolved --- plugins/ingest/build.gradle | 2 +- .../processor/geoip/GeoIpProcessor.java | 57 ++++++++++++------- .../processor/geoip/GeoIpProcessorTests.java | 13 +++++ 3 files changed, 52 insertions(+), 20 deletions(-) diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index 4aca00bcb9f..ea0fd5de81c 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -48,7 +48,7 @@ task copyDefaultGeoIp2DatabaseFiles(type: Copy) { project.bundlePlugin.dependsOn(copyDefaultGeoIp2DatabaseFiles) -compileJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" +compileJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked,-serial" compileTestJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" bundlePlugin { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index 020b88a23e0..2ad63359357 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -20,10 +20,12 @@ package org.elasticsearch.ingest.processor.geoip; import com.maxmind.geoip2.DatabaseReader; +import com.maxmind.geoip2.exception.AddressNotFoundException; import com.maxmind.geoip2.exception.GeoIp2Exception; import com.maxmind.geoip2.model.CityResponse; import com.maxmind.geoip2.model.CountryResponse; import com.maxmind.geoip2.record.*; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.ingest.Data; @@ -38,6 +40,7 @@ import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.security.AccessController; import java.security.PrivilegedAction; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -67,13 +70,21 @@ public final class GeoIpProcessor implements Processor { throw new RuntimeException(e); } - final Map geoData; + Map geoData; switch (dbReader.getMetadata().getDatabaseType()) { case "GeoLite2-City": - geoData = retrieveCityGeoData(ipAddress); + try { + geoData = retrieveCityGeoData(ipAddress); + } catch (AddressNotFoundRuntimeException e) { + geoData = Collections.emptyMap(); + } break; case "GeoLite2-Country": - geoData = retrieveCountryGeoData(ipAddress); + try { + geoData = retrieveCountryGeoData(ipAddress); + } catch (AddressNotFoundRuntimeException e) { + geoData = Collections.emptyMap(); + } break; default: throw new IllegalStateException("Unsupported database type [" + dbReader.getMetadata().getDatabaseType() + "]"); @@ -98,14 +109,13 @@ public final class GeoIpProcessor implements Processor { if (sm != null) { sm.checkPermission(new SpecialPermission()); } - CityResponse response = AccessController.doPrivileged(new PrivilegedAction() { - @Override - public CityResponse run() { - try { - return dbReader.city(ipAddress); - } catch (IOException | GeoIp2Exception e) { - throw new RuntimeException(e); - } + CityResponse response = AccessController.doPrivileged((PrivilegedAction) () -> { + try { + return dbReader.city(ipAddress); + } catch (AddressNotFoundException e) { + throw new AddressNotFoundRuntimeException(e); + } catch (Exception e) { + throw new RuntimeException(e); } }); @@ -136,14 +146,13 @@ public final class GeoIpProcessor implements Processor { if (sm != null) { sm.checkPermission(new SpecialPermission()); } - CountryResponse response = AccessController.doPrivileged(new PrivilegedAction() { - @Override - public CountryResponse run() { - try { - return dbReader.country(ipAddress); - } catch (IOException | GeoIp2Exception e) { - throw new RuntimeException(e); - } + CountryResponse response = AccessController.doPrivileged((PrivilegedAction) () -> { + try { + return dbReader.country(ipAddress); + } catch (AddressNotFoundException e) { + throw new AddressNotFoundRuntimeException(e); + } catch (Exception e) { + throw new RuntimeException(e); } }); @@ -190,4 +199,14 @@ public final class GeoIpProcessor implements Processor { } } + // Geoip2's AddressNotFoundException is checked and due to the fact that we need run their code + // inside a PrivilegedAction code block, we are forced to catch any checked exception and rethrow + // it with an unchecked exception. + private final static class AddressNotFoundRuntimeException extends RuntimeException { + + public AddressNotFoundRuntimeException(Throwable cause) { + super(cause); + } + } + } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java index 735776ec43d..caeb3df24e9 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java @@ -77,4 +77,17 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(geoData.get("continent_name"), equalTo("Europe")); } + public void testAddressIsNotInTheDatabase() throws Exception { + InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb"); + GeoIpProcessor processor = new GeoIpProcessor("source_field", new DatabaseReader.Builder(database).build(), "target_field"); + + Map document = new HashMap<>(); + document.put("source_field", "202.45.11.11"); + Data data = new Data("_index", "_type", "_id", document); + processor.execute(data); + @SuppressWarnings("unchecked") + Map geoData = (Map) data.getDocument().get("target_field"); + assertThat(geoData.size(), equalTo(0)); + } + } From 8fc5a3d0321ab38a6a4efbce7b1c00f9ddbba34c Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Thu, 22 Oct 2015 17:47:08 +0300 Subject: [PATCH 037/347] introduce mutate processor. fix forbiddenapis update clean up and add rest test update mutate factory to use configuration utilities compile gsub pattern cleanup, update parseBooleans, null tests --- docs/plugins/ingest.asciidoc | 153 +++++++- .../java/org/elasticsearch/ingest/Data.java | 30 ++ .../ingest/processor/ConfigurationUtils.java | 78 +++++ .../processor/mutate/MutateProcessor.java | 326 ++++++++++++++++++ .../plugin/ingest/IngestModule.java | 2 + .../org/elasticsearch/ingest/DataTests.java | 16 + .../processor/ConfigurationUtilsTests.java | 65 ++++ .../mutate/MutateProcessorFactoryTests.java | 81 +++++ .../mutate/MutateProcessorTests.java | 193 +++++++++++ .../test/ingest/60_mutate_processor.yaml | 50 +++ 10 files changed, 993 insertions(+), 1 deletion(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate_processor.yaml diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index b3cd98051cf..e002dd52bc8 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -1,7 +1,158 @@ [[ingest]] == Ingest Plugin -TODO +=== Processors + +==== Mutate Processor + +The Mutate Processor applies functions on the structure of a document. The processor comes with a few +functions to help achieve this. + +The following are the supported configuration actions and how to use them. + +===== Convert +Convert a field's value to a different type, like turning a string to an integer. +If the field value is an array, all members will be converted. + +The supported types include: `integer`, `float`, `string`, and `boolean`. + +`boolean` will set a field to "true" if its string value does not match any of the following: "false", "0", "off", "no". + +[source,js] +-------------------------------------------------- +{ + "mutate": { + "convert": { + "field1": "integer", + "field2": "float" + } + } +} +-------------------------------------------------- + +===== Gsub +Convert a string field by applying a regular expression and a replacement. +If the field is not a string, no action will be taken. + +This configuration takes an array consisting of two elements per field/substition. One for the +pattern to be replaced, and the second for the pattern to replace with. + + +[source,js] +-------------------------------------------------- +{ + "mutate": { + "gsub": { + "field1": ["\.", "-"] + } + } +} +-------------------------------------------------- + +===== Join +Join an array with a separator character. Does nothing on non-array fields. + +[source,js] +-------------------------------------------------- +{ + "mutate": { + "join": { + "joined_array_field": "other_array_field" + } + } +} +-------------------------------------------------- + +===== Lowercase +Convert a string to its lowercase equivalent. + +[source,js] +-------------------------------------------------- +{ + "mutate": { + "lowercase": ["foo", "bar"] + } +} +-------------------------------------------------- + +===== Remove +Remove one or more fields. + +[source,js] +-------------------------------------------------- +{ + "mutate": { + "remove": ["foo", "bar"] + } +} +-------------------------------------------------- + +===== Rename +Renames one or more fields. + +[source,js] +-------------------------------------------------- +{ + "mutate": { + "rename": { + "foo": "update_foo", + "bar": "new_bar" + } + } +} +-------------------------------------------------- + +===== Split +Split a field to an array using a separator character. Only works on string fields. + +[source,js] +-------------------------------------------------- +{ + "mutate": { + "split": { + "message": "," + } + } +} +-------------------------------------------------- + +===== Strip +Strip whitespace from field. NOTE: this only works on leading and trailing whitespace. + +[source,js] +-------------------------------------------------- +{ + "mutate": { + "strip": ["foo", "bar"] + } +} +-------------------------------------------------- + +===== Update +Update an existing field with a new value. If the field does not exist, then no action will be taken. + +[source,js] +-------------------------------------------------- +{ + "mutate": { + "update": { + "field": 582.1 + } + } +} +-------------------------------------------------- + +===== Uppercase +Convert a string to its uppercase equivalent. + +[source,js] +-------------------------------------------------- +{ + "mutate": { + "uppercase": ["foo", "bar"] + } +} +-------------------------------------------------- === Processors diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index 66b81e8fe83..879acfcbf8e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -54,6 +54,36 @@ public final class Data { return (T) XContentMapValues.extractValue(path, document); } + public boolean containsProperty(String path) { + boolean containsProperty = false; + String[] pathElements = Strings.splitStringToArray(path, '.'); + if (pathElements.length == 0) { + return false; + } + + Map inner = document; + + for (int i = 0; i < pathElements.length; i++) { + if (inner == null) { + containsProperty = false; + break; + } + if (i == pathElements.length - 1) { + containsProperty = inner.containsKey(pathElements[i]); + break; + } + + Object obj = inner.get(pathElements[i]); + if (obj instanceof Map) { + inner = (Map) obj; + } else { + inner = null; + } + } + + return containsProperty; + } + /** * add `value` to path in document. If path does not exist, * nested hashmaps will be put in as parent key values until diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java index 182ff61c421..94fb6d14587 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java @@ -92,4 +92,82 @@ public final class ConfigurationUtils { throw new IllegalArgumentException("property [" + propertyName + "] isn't a list, but of type [" + value.getClass().getName() + "]"); } } + + /** + * Returns and removes the specified property of type list from the specified configuration map. + * + * If the property value isn't of type list an {@link IllegalArgumentException} is thrown. + */ + public static List readOptionalStringList(Map configuration, String propertyName) { + Object value = configuration.remove(propertyName); + if (value == null) { + return null; + } + if (value instanceof List) { + @SuppressWarnings("unchecked") + List stringList = (List) value; + return stringList; + } else { + throw new IllegalArgumentException("property [" + propertyName + "] isn't a list, but of type [" + value.getClass().getName() + "]"); + } + } + + /** + * Returns and removes the specified property of type map from the specified configuration map. + * + * If the property value isn't of type map an {@link IllegalArgumentException} is thrown. + */ + public static Map> readOptionalStringListMap(Map configuration, String propertyName) { + Object value = configuration.remove(propertyName); + if (value == null) { + return null; + } + if (value instanceof Map) { + @SuppressWarnings("unchecked") + Map> stringList = (Map>) value; + return stringList; + } else { + throw new IllegalArgumentException("property [" + propertyName + "] isn't a map, but of type [" + value.getClass().getName() + "]"); + } + } + + /** + * Returns and removes the specified property of type map from the specified configuration map. + * + * If the property value isn't of type map an {@link IllegalArgumentException} is thrown. + */ + public static Map readOptionalStringMap(Map configuration, String propertyName) { + Object value = configuration.remove(propertyName); + + if (value == null) { + return null; + } + + if (value instanceof Map) { + Map map = (Map) value; + return map; + } else { + throw new IllegalArgumentException("property [" + propertyName + "] isn't a map, but of type [" + value.getClass().getName() + "]"); + } + } + + /** + * Returns and removes the specified property of type map from the specified configuration map. + * + * If the property value isn't of type map an {@link IllegalArgumentException} is thrown. + */ + public static Map readOptionalObjectMap(Map configuration, String propertyName) { + Object value = configuration.remove(propertyName); + + if (value == null) { + return null; + } + + if (value instanceof Map) { + Map map = (Map) value; + return map; + } else { + throw new IllegalArgumentException("property [" + propertyName + "] isn't a map, but of type [" + value.getClass().getName() + "]"); + } + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java new file mode 100644 index 00000000000..ab85a9dbd04 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java @@ -0,0 +1,326 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.mutate; + +import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.processor.ConfigurationUtils; +import org.elasticsearch.ingest.processor.Processor; + +import java.io.IOException; +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +public final class MutateProcessor implements Processor { + + public static final String TYPE = "mutate"; + + private final Map update; + private final Map rename; + private final Map convert; + private final Map split; + private final Map> gsub; + private final Map join; + private final List remove; + private final List trim; + private final List uppercase; + private final List lowercase; + + public MutateProcessor(Map update, + Map rename, + Map convert, + Map split, + Map> gsub, + Map join, + List remove, + List trim, + List uppercase, + List lowercase) { + this.update = update; + this.rename = rename; + this.convert = convert; + this.split = split; + this.gsub = gsub; + this.join = join; + this.remove = remove; + this.trim = trim; + this.uppercase = uppercase; + this.lowercase = lowercase; + } + + public Map getUpdate() { + return update; + } + + public Map getRename() { + return rename; + } + + public Map getConvert() { + return convert; + } + + public Map getSplit() { + return split; + } + + public Map> getGsub() { + return gsub; + } + + public Map getJoin() { + return join; + } + + public List getRemove() { + return remove; + } + + public List getTrim() { + return trim; + } + + public List getUppercase() { + return uppercase; + } + + public List getLowercase() { + return lowercase; + } + + @Override + public void execute(Data data) { + if (update != null) { + doUpdate(data); + } + if (rename != null) { + doRename(data); + } + if (convert != null) { + doConvert(data); + } + if (split != null) { + doSplit(data); + } + if (gsub != null) { + doGsub(data); + } + if (join != null) { + doJoin(data); + } + if (remove != null) { + doRemove(data); + } + if (trim != null) { + doTrim(data); + } + if (uppercase != null) { + doUppercase(data); + } + if (lowercase != null) { + doLowercase(data); + } + } + + private void doUpdate(Data data) { + for(Map.Entry entry : update.entrySet()) { + data.addField(entry.getKey(), entry.getValue()); + } + } + + private void doRename(Data data) { + for(Map.Entry entry : rename.entrySet()) { + if (data.containsProperty(entry.getKey())) { + Object oldVal = data.getProperty(entry.getKey()); + data.getDocument().remove(entry.getKey()); + data.addField(entry.getValue(), oldVal); + } + } + } + + private Object parseValueAsType(Object oldVal, String toType) { + switch (toType) { + case "integer": + oldVal = Integer.parseInt(oldVal.toString()); + break; + case "float": + oldVal = Float.parseFloat(oldVal.toString()); + break; + case "string": + oldVal = oldVal.toString(); + break; + case "boolean": + // TODO(talevy): Booleans#parseBoolean depends on Elasticsearch, should be moved into dedicated library. + oldVal = Booleans.parseBoolean(oldVal.toString(), false); + } + + return oldVal; + } + + @SuppressWarnings("unchecked") + private void doConvert(Data data) { + for(Map.Entry entry : convert.entrySet()) { + String toType = entry.getValue(); + + Object oldVal = data.getProperty(entry.getKey()); + Object newVal; + + if (oldVal instanceof List) { + newVal = new ArrayList<>(); + for (Object e : ((List) oldVal)) { + ((List) newVal).add(parseValueAsType(e, toType)); + } + } else { + if (oldVal == null) { + throw new IllegalArgumentException("Field \"" + entry.getKey() + "\" is null, cannot be converted to a/an " + toType); + } + newVal = parseValueAsType(oldVal, toType); + } + + data.addField(entry.getKey(), newVal); + } + } + + private void doSplit(Data data) { + for(Map.Entry entry : split.entrySet()) { + Object oldVal = data.getProperty(entry.getKey()); + if (oldVal instanceof String) { + data.addField(entry.getKey(), Arrays.asList(((String) oldVal).split(entry.getValue()))); + } else { + throw new IllegalArgumentException("Cannot split a field that is not a String type"); + } + } + } + + private void doGsub(Data data) { + for (Map.Entry> entry : gsub.entrySet()) { + String fieldName = entry.getKey(); + Tuple matchAndReplace = entry.getValue(); + String oldVal = data.getProperty(fieldName); + if (oldVal == null) { + throw new IllegalArgumentException("Field \"" + fieldName + "\" is null, cannot match pattern."); + } + Matcher matcher = matchAndReplace.v1().matcher(oldVal); + String newVal = matcher.replaceAll(matchAndReplace.v2()); + data.addField(entry.getKey(), newVal); + } + } + + @SuppressWarnings("unchecked") + private void doJoin(Data data) { + for(Map.Entry entry : join.entrySet()) { + Object oldVal = data.getProperty(entry.getKey()); + if (oldVal instanceof List) { + String joined = (String) ((List) oldVal) + .stream() + .map(Object::toString) + .collect(Collectors.joining(entry.getValue())); + + data.addField(entry.getKey(), joined); + } else { + throw new IllegalArgumentException("Cannot join field:" + entry.getKey() + " with type: " + oldVal.getClass()); + } + } + } + + private void doRemove(Data data) { + for(String field : remove) { + data.getDocument().remove(field); + } + } + + private void doTrim(Data data) { + for(String field : trim) { + Object val = data.getProperty(field); + if (val instanceof String) { + data.addField(field, ((String) val).trim()); + } else { + throw new IllegalArgumentException("Cannot trim field:" + field + " with type: " + val.getClass()); + } + } + } + + private void doUppercase(Data data) { + for(String field : uppercase) { + Object val = data.getProperty(field); + if (val instanceof String) { + data.addField(field, ((String) val).toUpperCase(Locale.ROOT)); + } else { + throw new IllegalArgumentException("Cannot uppercase field:" + field + " with type: " + val.getClass()); + } + } + } + + private void doLowercase(Data data) { + for(String field : lowercase) { + Object val = data.getProperty(field); + if (val instanceof String) { + data.addField(field, ((String) val).toLowerCase(Locale.ROOT)); + } else { + throw new IllegalArgumentException("Cannot lowercase field:" + field + " with type: " + val.getClass()); + } + } + } + + public static final class Factory implements Processor.Factory { + @Override + public MutateProcessor create(Map config) throws IOException { + Map update = ConfigurationUtils.readOptionalObjectMap(config, "update"); + Map rename = ConfigurationUtils.readOptionalStringMap(config, "rename"); + Map convert = ConfigurationUtils.readOptionalStringMap(config, "convert"); + Map split = ConfigurationUtils.readOptionalStringMap(config, "split"); + Map> gsubConfig = ConfigurationUtils.readOptionalStringListMap(config, "gsub"); + Map join = ConfigurationUtils.readOptionalStringMap(config, "join"); + List remove = ConfigurationUtils.readOptionalStringList(config, "remove"); + List trim = ConfigurationUtils.readOptionalStringList(config, "trim"); + List uppercase = ConfigurationUtils.readOptionalStringList(config, "uppercase"); + List lowercase = ConfigurationUtils.readOptionalStringList(config, "lowercase"); + + // pre-compile regex patterns + Map> gsub = null; + if (gsubConfig != null) { + gsub = new HashMap<>(); + for (Map.Entry> entry : gsubConfig.entrySet()) { + List searchAndReplace = entry.getValue(); + if (searchAndReplace.size() != 2) { + throw new IllegalArgumentException("Invalid search and replace values (" + Arrays.toString(searchAndReplace.toArray()) + ") for field: " + entry.getKey()); + } + Pattern searchPattern = Pattern.compile(searchAndReplace.get(0)); + gsub.put(entry.getKey(), new Tuple<>(searchPattern, searchAndReplace.get(1))); + } + } + + return new MutateProcessor( + (update == null) ? null : Collections.unmodifiableMap(update), + (rename == null) ? null : Collections.unmodifiableMap(rename), + (convert == null) ? null : Collections.unmodifiableMap(convert), + (split == null) ? null : Collections.unmodifiableMap(split), + (gsub == null) ? null : Collections.unmodifiableMap(gsub), + (join == null) ? null : Collections.unmodifiableMap(join), + (remove == null) ? null : Collections.unmodifiableList(remove), + (trim == null) ? null : Collections.unmodifiableList(trim), + (uppercase == null) ? null : Collections.unmodifiableList(uppercase), + (lowercase == null) ? null : Collections.unmodifiableList(lowercase)); + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index d7a52d0315c..675d38f5df6 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -25,6 +25,7 @@ import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.ingest.processor.date.DateProcessor; import org.elasticsearch.ingest.processor.geoip.GeoIpProcessor; import org.elasticsearch.ingest.processor.grok.GrokProcessor; +import org.elasticsearch.ingest.processor.mutate.MutateProcessor; import org.elasticsearch.ingest.processor.simple.SimpleProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; @@ -46,6 +47,7 @@ public class IngestModule extends AbstractModule { addProcessor(GeoIpProcessor.TYPE, new GeoIpProcessor.Factory()); addProcessor(GrokProcessor.TYPE, new GrokProcessor.Factory()); addProcessor(DateProcessor.TYPE, new DateProcessor.Factory()); + addProcessor(MutateProcessor.TYPE, new MutateProcessor.Factory()); MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, Processor.Factory.class); for (Map.Entry entry : processors.entrySet()) { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java index d048620cea3..e6cae34035e 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java @@ -49,6 +49,22 @@ public class DataTests extends ESTestCase { assertThat(data.getProperty("fizz.buzz"), equalTo("hello world")); } + public void testContainsProperty() { + assertTrue(data.containsProperty("fizz")); + } + + public void testContainsProperty_Nested() { + assertTrue(data.containsProperty("fizz.buzz")); + } + + public void testContainsProperty_NotFound() { + assertFalse(data.containsProperty("doesnotexist")); + } + + public void testContainsProperty_NestedNotFound() { + assertFalse(data.containsProperty("fizz.doesnotexist")); + } + public void testSimpleAddField() { data.addField("new_field", "foo"); assertThat(data.getDocument().get("new_field"), equalTo("foo")); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java new file mode 100644 index 00000000000..274a952f935 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor; + +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.*; + +import static org.hamcrest.Matchers.*; + + +public class ConfigurationUtilsTests extends ESTestCase { + private Map config; + + @Before + public void setConfig() { + config = new HashMap<>(); + config.put("foo", "bar"); + config.put("arr", Arrays.asList("1", "2", "3")); + List list = new ArrayList<>(); + list.add(2); + config.put("int", list); + config.put("ip", "127.0.0.1"); + Map fizz = new HashMap<>(); + fizz.put("buzz", "hello world"); + config.put("fizz", fizz); + } + + public void testReadStringProperty() { + String val = ConfigurationUtils.readStringProperty(config, "foo"); + assertThat(val, equalTo("bar")); + } + + public void testReadStringProperty_InvalidType() { + try { + ConfigurationUtils.readStringProperty(config, "arr"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("property [arr] isn't a string, but of type [java.util.Arrays$ArrayList]")); + } + } + + // TODO(talevy): Issue with generics. This test should fail, "int" is of type List + public void testOptional_InvalidType() { + List val = ConfigurationUtils.readStringList(config, "int"); + assertThat(val, equalTo(Arrays.asList(2))); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java new file mode 100644 index 00000000000..ccc3e223adf --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.mutate; + +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class MutateProcessorFactoryTests extends ESTestCase { + + public void testCreate() throws Exception { + MutateProcessor.Factory factory = new MutateProcessor.Factory(); + Map config = new HashMap<>(); + Map update = new HashMap<>(); + update.put("foo", 123); + config.put("update", update); + MutateProcessor processor = factory.create(config); + assertThat(processor.getUpdate(), equalTo(update)); + } + + public void testCreateGsubPattern() throws Exception { + MutateProcessor.Factory factory = new MutateProcessor.Factory(); + Map config = new HashMap<>(); + Map> gsub = new HashMap<>(); + gsub.put("foo", Arrays.asList("\\s.*e\\s", "")); + config.put("gsub", gsub); + + Map> compiledGsub = new HashMap<>(); + Pattern searchPattern = Pattern.compile("\\s.*e\\s"); + compiledGsub.put("foo", new Tuple<>(searchPattern, "")); + + MutateProcessor processor = factory.create(config); + for (Map.Entry> entry : compiledGsub.entrySet()) { + Tuple actualSearchAndReplace = processor.getGsub().get(entry.getKey()); + assertThat(actualSearchAndReplace, notNullValue()); + assertThat(actualSearchAndReplace.v1().pattern(), equalTo(entry.getValue().v1().pattern())); + assertThat(actualSearchAndReplace.v2(), equalTo(entry.getValue().v2())); + } + } + + public void testCreateGsubPattern_InvalidFormat() throws Exception { + MutateProcessor.Factory factory = new MutateProcessor.Factory(); + Map config = new HashMap<>(); + Map> gsub = new HashMap<>(); + gsub.put("foo", Arrays.asList("only_one")); + config.put("gsub", gsub); + + try { + factory.create(config); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Invalid search and replace values ([only_one]) for field: foo")); + } + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java new file mode 100644 index 00000000000..55066c4e2e7 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java @@ -0,0 +1,193 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.mutate; + +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + + +public class MutateProcessorTests extends ESTestCase { + private static final MutateProcessor.Factory FACTORY = new MutateProcessor.Factory(); + private Data data; + private Map config; + + @Before + public void setData() { + Map document = new HashMap<>(); + document.put("foo", "bar"); + document.put("alpha", "aBcD"); + document.put("num", "64"); + document.put("to_strip", " clean "); + document.put("arr", Arrays.asList("1", "2", "3")); + document.put("ip", "127.0.0.1"); + Map fizz = new HashMap<>(); + fizz.put("buzz", "hello world"); + document.put("fizz", fizz); + + data = new Data("index", "type", "id", document); + config = new HashMap<>(); + } + + public void testUpdate() throws IOException { + Map update = new HashMap<>(); + update.put("foo", 123); + config.put("update", update); + + Processor processor = FACTORY.create(config); + processor.execute(data); + assertThat(data.getProperty("foo"), equalTo(123)); + } + + public void testRename() throws IOException { + Map rename = new HashMap<>(); + rename.put("foo", "bar"); + config.put("rename", rename); + Processor processor = FACTORY.create(config); + processor.execute(data); + assertThat(data.getProperty("bar"), equalTo("bar")); + assertThat(data.containsProperty("foo"), is(false)); + } + + public void testConvert() throws IOException { + Map convert = new HashMap<>(); + convert.put("num", "integer"); + config.put("convert", convert); + + Processor processor = FACTORY.create(config); + processor.execute(data); + assertThat(data.getProperty("num"), equalTo(64)); + } + + public void testConvert_NullField() throws IOException { + Map convert = new HashMap<>(); + convert.put("null", "integer"); + config.put("convert", convert); + + Processor processor = FACTORY.create(config); + try { + processor.execute(data); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Field \"null\" is null, cannot be converted to a/an integer")); + } + } + + public void testConvert_List() throws IOException { + Map convert = new HashMap<>(); + convert.put("arr", "integer"); + config.put("convert", convert); + + Processor processor = FACTORY.create(config); + processor.execute(data); + assertThat(data.getProperty("arr"), equalTo(Arrays.asList(1, 2, 3))); + } + + public void testSplit() throws IOException { + HashMap split = new HashMap<>(); + split.put("ip", "\\."); + config.put("split", split); + + Processor processor = FACTORY.create(config); + processor.execute(data); + assertThat(data.getProperty("ip"), equalTo(Arrays.asList("127", "0", "0", "1"))); + } + + public void testGsub() throws IOException { + HashMap> gsub = new HashMap<>(); + gsub.put("ip", Arrays.asList("\\.", "-")); + config.put("gsub", gsub); + + Processor processor = FACTORY.create(config); + processor.execute(data); + assertThat(data.getProperty("ip"), equalTo("127-0-0-1")); + } + + public void testGsub_NullValue() throws IOException { + HashMap> gsub = new HashMap<>(); + gsub.put("null_field", Arrays.asList("\\.", "-")); + config.put("gsub", gsub); + + Processor processor = FACTORY.create(config); + try { + processor.execute(data); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Field \"null_field\" is null, cannot match pattern.")); + } + } + + public void testJoin() throws IOException { + HashMap join = new HashMap<>(); + join.put("arr", "-"); + config.put("join", join); + + Processor processor = FACTORY.create(config); + processor.execute(data); + assertThat(data.getProperty("arr"), equalTo("1-2-3")); + } + + public void testRemove() throws IOException { + List remove = Arrays.asList("foo", "ip"); + config.put("remove", remove); + + Processor processor = FACTORY.create(config); + processor.execute(data); + assertThat(data.getProperty("foo"), nullValue()); + assertThat(data.getProperty("ip"), nullValue()); + } + + public void testTrim() throws IOException { + List trim = Arrays.asList("to_strip", "foo"); + config.put("trim", trim); + + Processor processor = FACTORY.create(config); + processor.execute(data); + assertThat(data.getProperty("foo"), equalTo("bar")); + assertThat(data.getProperty("to_strip"), equalTo("clean")); + } + + public void testUppercase() throws IOException { + List uppercase = Arrays.asList("foo"); + config.put("uppercase", uppercase); + Processor processor = FACTORY.create(config); + processor.execute(data); + assertThat(data.getProperty("foo"), equalTo("BAR")); + } + + public void testLowercase() throws IOException { + List lowercase = Arrays.asList("alpha"); + config.put("lowercase", lowercase); + Processor processor = FACTORY.create(config); + processor.execute(data); + assertThat(data.getProperty("alpha"), equalTo("abcd")); + } +} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate_processor.yaml new file mode 100644 index 00000000000..4ab6ba652ca --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate_processor.yaml @@ -0,0 +1,50 @@ +--- +"Test mutate processor": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "mutate" : { + "rename" : { + "field1": "foo" + }, + "update" : { + "field2": "bar" + } + } + } + ] + } + - match: { _id: "my_pipeline" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline_id: "my_pipeline" + body: {field1: "val"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.foo: "val" } + - match: { _source.field2: "bar" } From e9b72f5394c7fe632d7c8c320d250dfc783a8d14 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Mon, 9 Nov 2015 19:14:06 -0800 Subject: [PATCH 038/347] remove SimpleProcessor --- .../processor/simple/SimpleProcessor.java | 67 ------------------- .../plugin/ingest/IngestModule.java | 2 - .../ingest/PipelineFactoryTests.java | 27 +++----- .../plugin/ingest/PipelineStoreTests.java | 5 +- .../transport/IngestActionFilterTests.java | 14 +++- .../rest-api-spec/test/ingest/20_crud.yaml | 9 ++- .../test/ingest/30_simple_processor.yaml | 63 ----------------- 7 files changed, 28 insertions(+), 159 deletions(-) delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java delete mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_simple_processor.yaml diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java deleted file mode 100644 index d8dfc230a7b..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/simple/SimpleProcessor.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.simple; - -import org.elasticsearch.ingest.Data; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.ingest.processor.Processor; - -import java.util.Map; - -public final class SimpleProcessor implements Processor { - - public static final String TYPE = "simple"; - - private final String path; - private final String expectedValue; - - private final String addField; - private final String addFieldValue; - - public SimpleProcessor(String path, String expectedValue, String addField, String addFieldValue) { - this.path = path; - this.expectedValue = expectedValue; - this.addField = addField; - this.addFieldValue = addFieldValue; - } - - @Override - public void execute(Data data) { - Object value = data.getProperty(path); - if (value != null) { - if (value.toString().equals(this.expectedValue)) { - data.addField(addField, addFieldValue); - } - } - } - - public static class Factory implements Processor.Factory { - - public SimpleProcessor create(Map config) { - String path = ConfigurationUtils.readStringProperty(config, "path"); - String expectedValue = ConfigurationUtils.readStringProperty(config, "expected_value"); - String addField = ConfigurationUtils.readStringProperty(config, "add_field", null); - String addFieldValue = ConfigurationUtils.readStringProperty(config, "add_field_value"); - return new SimpleProcessor(path, expectedValue, addField, addFieldValue); - } - - } - -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index 675d38f5df6..94dfb4fb690 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -26,7 +26,6 @@ import org.elasticsearch.ingest.processor.date.DateProcessor; import org.elasticsearch.ingest.processor.geoip.GeoIpProcessor; import org.elasticsearch.ingest.processor.grok.GrokProcessor; import org.elasticsearch.ingest.processor.mutate.MutateProcessor; -import org.elasticsearch.ingest.processor.simple.SimpleProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; import java.util.HashMap; @@ -43,7 +42,6 @@ public class IngestModule extends AbstractModule { binder().bind(PipelineStore.class).asEagerSingleton(); binder().bind(PipelineStoreClient.class).asEagerSingleton(); - addProcessor(SimpleProcessor.TYPE, new SimpleProcessor.Factory()); addProcessor(GeoIpProcessor.TYPE, new GeoIpProcessor.Factory()); addProcessor(GrokProcessor.TYPE, new GrokProcessor.Factory()); addProcessor(DateProcessor.TYPE, new DateProcessor.Factory()); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java index 4bc63dcbc24..bc1c9eb0fe5 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java @@ -20,11 +20,10 @@ package org.elasticsearch.ingest; import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.ingest.processor.simple.SimpleProcessor; +import org.elasticsearch.ingest.processor.mutate.MutateProcessor; import org.elasticsearch.test.ESTestCase; -import org.junit.Test; -import java.io.IOException; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -37,43 +36,37 @@ public class PipelineFactoryTests extends ESTestCase { public void testCreate() throws Exception { Pipeline.Factory factory = new Pipeline.Factory(); Map processorRegistry = new HashMap<>(); - processorRegistry.put("simple", new SimpleProcessor.Factory()); + processorRegistry.put("mutate", new MutateProcessor.Factory()); Map processorConfig = new HashMap<>(); - processorConfig.put("path", "_path"); - processorConfig.put("expected_value", "_expected_value"); - processorConfig.put("add_field", "_add_field"); - processorConfig.put("add_field_value", "_add_field_value"); + processorConfig.put("uppercase", Arrays.asList("field1")); Map pipelineConfig = new HashMap<>(); pipelineConfig.put("description", "_description"); - pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("simple", processorConfig))); + pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("mutate", processorConfig))); Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); assertThat(pipeline.getId(), equalTo("_id")); assertThat(pipeline.getDescription(), equalTo("_description")); assertThat(pipeline.getProcessors().size(), equalTo(1)); - assertThat(pipeline.getProcessors().get(0), instanceOf(SimpleProcessor.class)); + assertThat(pipeline.getProcessors().get(0), instanceOf(MutateProcessor.class)); } public void testCreate_unusedProcessorOptions() throws Exception { Pipeline.Factory factory = new Pipeline.Factory(); Map processorRegistry = new HashMap<>(); - processorRegistry.put("simple", new SimpleProcessor.Factory()); + processorRegistry.put("mutate", new MutateProcessor.Factory()); Map processorConfig = new HashMap<>(); - processorConfig.put("path", "_path"); - processorConfig.put("expected_value", "_expected_value"); - processorConfig.put("add_field", "_add_field"); - processorConfig.put("add_field_value", "_add_field_value"); + processorConfig.put("uppercase", Arrays.asList("field1")); processorConfig.put("foo", "bar"); Map pipelineConfig = new HashMap<>(); pipelineConfig.put("description", "_description"); - pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("simple", processorConfig))); + pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("mutate", processorConfig))); try { factory.create("_id", pipelineConfig, processorRegistry); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("processor [simple] doesn't support one or more provided configuration parameters [[foo]]")); + assertThat(e.getMessage(), equalTo("processor [mutate] doesn't support one or more provided configuration parameters [[foo]]")); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java index 46271a1c4bb..5b7e02bec28 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.StringText; import org.elasticsearch.env.Environment; -import org.elasticsearch.ingest.processor.simple.SimpleProcessor; +import org.elasticsearch.ingest.processor.mutate.MutateProcessor; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.test.ESTestCase; @@ -32,7 +32,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; -import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -57,7 +56,7 @@ public class PipelineStoreTests extends ESTestCase { ClusterService clusterService = mock(ClusterService.class); client = mock(PipelineStoreClient.class); Environment environment = mock(Environment.class); - store = new PipelineStore(Settings.EMPTY, threadPool, environment, clusterService, client, Collections.singletonMap(SimpleProcessor.TYPE, new SimpleProcessor.Factory())); + store = new PipelineStore(Settings.EMPTY, threadPool, environment, clusterService, client, Collections.singletonMap(MutateProcessor.TYPE, new MutateProcessor.Factory())); store.start(); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index a4c71a6a729..c0ff79b056f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -29,7 +29,8 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.processor.simple.SimpleProcessor; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.processor.mutate.MutateProcessor; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineExecutionService; import org.elasticsearch.plugin.ingest.PipelineStore; @@ -40,6 +41,8 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Matchers.any; @@ -164,7 +167,14 @@ public class IngestActionFilterTests extends ESTestCase { .build() ); PipelineStore store = mock(PipelineStore.class); - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(new SimpleProcessor("field1", "value1", "field2", "value2")))); + + Map mutateConfig = new HashMap<>(); + Map update = new HashMap<>(); + update.put("field2", "value2"); + mutateConfig.put("update", update); + + Processor mutateProcessor = (new MutateProcessor.Factory()).create(mutateConfig); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(mutateProcessor))); executionService = new PipelineExecutionService(store, threadPool); filter = new IngestActionFilter(Settings.EMPTY, executionService); diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml index 4c982ddb914..82177c615ab 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml @@ -12,11 +12,10 @@ "description": "_description", "processors": [ { - "simple" : { - "path" : "field1", - "expected_value" : "_value", - "add_field" : "field2", - "add_field_value" : "_value" + "mutate" : { + "update" : { + "field2": "_value" + } } } ] diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_simple_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_simple_processor.yaml deleted file mode 100644 index 5fa10e10122..00000000000 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_simple_processor.yaml +++ /dev/null @@ -1,63 +0,0 @@ ---- -"Test simple processor": - - do: - cluster.health: - wait_for_status: green - - - do: - ingest.put_pipeline: - id: "my_pipeline" - body: > - { - "description": "_description", - "processors": [ - { - "simple" : { - "path" : "field1", - "expected_value" : "_value", - "add_field" : "field2", - "add_field_value" : "_value" - } - } - ] - } - - match: { _id: "my_pipeline" } - - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - - do: - ingest.index: - index: test - type: test - id: 1 - pipeline_id: "my_pipeline" - body: {field1: "_value"} - - - do: - get: - index: test - type: test - id: 1 - - match: { _source.field1: "_value" } - - match: { _source.field2: "_value" } - - - do: - ingest.bulk: - pipeline_id: "my_pipeline" - body: - - '{ "index": { "_index": "test", "_type": "test", "_id": "2" } }' - - '{ "field1": "_value" }' - - - do: - get: - index: test - type: test - id: 2 - - match: { _source.field1: "_value" } - - match: { _source.field2: "_value" } From 4da05168f488c9e06085d0b2c3031db228606346 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 10 Nov 2015 09:17:28 +0700 Subject: [PATCH 039/347] geoip: renamed `ip_field` option to `source_field`, because it can hold a ip or hostname. geoip: add a `fields` option to control what fields are added by geoip processor geoip: instead of adding all fields, only `country_code`, `city_name`, `location`, `continent_name` and `region_name` fields are added. --- docs/plugins/ingest.asciidoc | 23 +-- .../processor/geoip/GeoIpProcessor.java | 135 ++++++++++++++---- .../geoip/GeoIpProcessorFactoryTests.java | 63 ++++++-- .../processor/geoip/GeoIpProcessorTests.java | 7 +- .../test/ingest/50_geoip_processor.yaml | 60 +++++++- 5 files changed, 230 insertions(+), 58 deletions(-) diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index e002dd52bc8..d94d6932d45 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -309,18 +309,19 @@ is located at `$ES_HOME/config/ingest/geoip` and holds the shipped databases too .Geoip options [options="header"] |====== -| Name | Required | Default | Description -| `ip_field` | yes | - | The field to get the ip address from for the geographical lookup. -| `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database. -| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest plugin ships with the GeoLite2-City.mmdb and GeoLite2-Country.mmdb files. +| Name | Required | Default | Description +| `source_field` | yes | - | The field to get the ip address or hostname from for the geographical lookup. +| `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database. +| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest plugin ships with the GeoLite2-City.mmdb and GeoLite2-Country.mmdb files. +| `fields` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] <1> | Controls what properties are added to the `target_field` based on the geoip lookup. |====== -If the GeoLite2 City database is used then the following fields will be added under the `target_field`: `ip`, +<1> Depends on what is available in `database_field`: +* If the GeoLite2 City database is used then the following fields may be added under the `target_field`: `ip`, `country_iso_code`, `country_name`, `continent_name`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude` -and `location`. - -If the GeoLite2 Country database is used then the following fields will be added under the `target_field`: `ip`, -`country_iso_code`, `country_name` and `continent_name`. +and `location`. The fields actually added depend on what has been found and which fields were configured in `fields`. +* If the GeoLite2 Country database is used then the following fields may be added under the `target_field`: `ip`, +`country_iso_code`, `country_name` and `continent_name`.The fields actually added depend on what has been found and which fields were configured in `fields`. An example that uses the default city database and adds the geographical information to the `geoip` field based on the `ip` field: @@ -331,7 +332,7 @@ An example that uses the default city database and adds the geographical informa "processors" : [ { "geoip" : { - "ip_field" : "ip" + "source_field" : "ip" } } ] @@ -347,7 +348,7 @@ An example that uses the default country database and add the geographical infor "processors" : [ { "geoip" : { - "ip_field" : "ip", + "source_field" : "ip", "target_field" : "geo", "database_file" : "GeoLite2-Country.mmdb" } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index b25245c83b5..ae63efd05cb 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -21,11 +21,9 @@ package org.elasticsearch.ingest.processor.geoip; import com.maxmind.geoip2.DatabaseReader; import com.maxmind.geoip2.exception.AddressNotFoundException; -import com.maxmind.geoip2.exception.GeoIp2Exception; import com.maxmind.geoip2.model.CityResponse; import com.maxmind.geoip2.model.CountryResponse; import com.maxmind.geoip2.record.*; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.ingest.Data; @@ -40,29 +38,30 @@ import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.security.AccessController; import java.security.PrivilegedAction; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; +import java.util.*; +import static org.elasticsearch.ingest.processor.ConfigurationUtils.readStringList; import static org.elasticsearch.ingest.processor.ConfigurationUtils.readStringProperty; public final class GeoIpProcessor implements Processor { public static final String TYPE = "geoip"; - private final String ipField; + private final String sourceField; private final String targetField; private final DatabaseReader dbReader; + private final Set fields; - GeoIpProcessor(String ipField, DatabaseReader dbReader, String targetField) throws IOException { - this.ipField = ipField; + GeoIpProcessor(String sourceField, DatabaseReader dbReader, String targetField, Set fields) throws IOException { + this.sourceField = sourceField; this.targetField = targetField; this.dbReader = dbReader; + this.fields = fields; } @Override public void execute(Data data) { - String ip = data.getProperty(ipField); + String ip = data.getProperty(sourceField); final InetAddress ipAddress; try { ipAddress = InetAddress.getByName(ip); @@ -92,8 +91,8 @@ public final class GeoIpProcessor implements Processor { data.addField(targetField, geoData); } - String getIpField() { - return ipField; + String getSourceField() { + return sourceField; } String getTargetField() { @@ -104,6 +103,10 @@ public final class GeoIpProcessor implements Processor { return dbReader; } + Set getFields() { + return fields; + } + private Map retrieveCityGeoData(InetAddress ipAddress) { SecurityManager sm = System.getSecurityManager(); if (sm != null) { @@ -125,18 +128,42 @@ public final class GeoIpProcessor implements Processor { Continent continent = response.getContinent(); Subdivision subdivision = response.getMostSpecificSubdivision(); - Map geoData = new HashMap(); - geoData.put("ip", NetworkAddress.formatAddress(ipAddress)); - geoData.put("country_iso_code", country.getIsoCode()); - geoData.put("country_name", country.getName()); - geoData.put("continent_name", continent.getName()); - geoData.put("region_name", subdivision.getName()); - geoData.put("city_name", city.getName()); - geoData.put("timezone", location.getTimeZone()); - geoData.put("latitude", location.getLatitude()); - geoData.put("longitude", location.getLongitude()); - if (location.getLatitude() != null && location.getLongitude() != null) { - geoData.put("location", new double[]{location.getLongitude(), location.getLatitude()}); + Map geoData = new HashMap<>(); + for (Field field : fields) { + switch (field) { + case IP: + geoData.put("ip", NetworkAddress.formatAddress(ipAddress)); + break; + case COUNTRY_ISO_CODE: + geoData.put("country_iso_code", country.getIsoCode()); + break; + case COUNTRY_NAME: + geoData.put("country_name", country.getName()); + break; + case CONTINENT_NAME: + geoData.put("continent_name", continent.getName()); + break; + case REGION_NAME: + geoData.put("region_name", subdivision.getName()); + break; + case CITY_NAME: + geoData.put("city_name", city.getName()); + break; + case TIMEZONE: + geoData.put("timezone", location.getTimeZone()); + break; + case LATITUDE: + geoData.put("latitude", location.getLatitude()); + break; + case LONGITUDE: + geoData.put("longitude", location.getLongitude()); + break; + case LOCATION: + if (location.getLatitude() != null && location.getLongitude() != null) { + geoData.put("location", new double[]{location.getLongitude(), location.getLatitude()}); + } + break; + } } return geoData; } @@ -159,29 +186,59 @@ public final class GeoIpProcessor implements Processor { Country country = response.getCountry(); Continent continent = response.getContinent(); - Map geoData = new HashMap(); - geoData.put("ip", NetworkAddress.formatAddress(ipAddress)); - geoData.put("country_iso_code", country.getIsoCode()); - geoData.put("country_name", country.getName()); - geoData.put("continent_name", continent.getName()); + Map geoData = new HashMap<>(); + for (Field field : fields) { + switch (field) { + case IP: + geoData.put("ip", NetworkAddress.formatAddress(ipAddress)); + break; + case COUNTRY_ISO_CODE: + geoData.put("country_iso_code", country.getIsoCode()); + break; + case COUNTRY_NAME: + geoData.put("country_name", country.getName()); + break; + case CONTINENT_NAME: + geoData.put("continent_name", continent.getName()); + break; + } + } return geoData; } public static class Factory implements Processor.Factory { + static final Set DEFAULT_FIELDS = EnumSet.of( + Field.CONTINENT_NAME, Field.COUNTRY_ISO_CODE, Field.REGION_NAME, Field.CITY_NAME, Field.LOCATION + ); + private Path geoIpConfigDirectory; private final DatabaseReaderService databaseReaderService = new DatabaseReaderService(); public GeoIpProcessor create(Map config) throws IOException { - String ipField = readStringProperty(config, "ip_field"); + String ipField = readStringProperty(config, "source_field"); String targetField = readStringProperty(config, "target_field", "geoip"); String databaseFile = readStringProperty(config, "database_file", "GeoLite2-City.mmdb"); + final Set fields; + if (config.containsKey("fields")) { + fields = EnumSet.noneOf(Field.class); + List fieldNames = readStringList(config, "fields"); + for (String fieldName : fieldNames) { + try { + fields.add(Field.parse(fieldName)); + } catch (Exception e) { + throw new IllegalArgumentException("illegal field option [" + fieldName +"]. valid values are [" + Arrays.toString(Field.values()) +"]", e); + } + } + } else { + fields = DEFAULT_FIELDS; + } Path databasePath = geoIpConfigDirectory.resolve(databaseFile); if (Files.exists(databasePath) && Files.isRegularFile(databasePath)) { try (InputStream database = Files.newInputStream(databasePath, StandardOpenOption.READ)) { DatabaseReader databaseReader = databaseReaderService.getOrCreateDatabaseReader(databaseFile, database); - return new GeoIpProcessor(ipField, databaseReader, targetField); + return new GeoIpProcessor(ipField, databaseReader, targetField, fields); } } else { throw new IllegalArgumentException("database file [" + databaseFile + "] doesn't exist in [" + geoIpConfigDirectory + "]"); @@ -209,4 +266,22 @@ public final class GeoIpProcessor implements Processor { } } + public enum Field { + + IP, + COUNTRY_ISO_CODE, + COUNTRY_NAME, + CONTINENT_NAME, + REGION_NAME, + CITY_NAME, + TIMEZONE, + LATITUDE, + LONGITUDE, + LOCATION; + + public static Field parse(String value) { + return valueOf(value.toUpperCase(Locale.ROOT)); + } + } + } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java index 236e91818fb..010b0ede5c1 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java @@ -24,12 +24,13 @@ import org.elasticsearch.test.StreamsUtils; import org.junit.Before; import java.io.ByteArrayInputStream; +import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; -import java.util.HashMap; -import java.util.Map; +import java.util.*; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.sameInstance; import static org.hamcrest.Matchers.startsWith; public class GeoIpProcessorFactoryTests extends ESTestCase { @@ -50,22 +51,23 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { factory.setConfigDirectory(configDir); Map config = new HashMap<>(); - config.put("ip_field", "_field"); + config.put("source_field", "_field"); GeoIpProcessor processor = factory.create(config); - assertThat(processor.getIpField(), equalTo("_field")); + assertThat(processor.getSourceField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-City")); + assertThat(processor.getFields(), sameInstance(GeoIpProcessor.Factory.DEFAULT_FIELDS)); } public void testBuild_targetField() throws Exception { GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); factory.setConfigDirectory(configDir); Map config = new HashMap<>(); - config.put("ip_field", "_field"); + config.put("source_field", "_field"); config.put("target_field", "_field"); GeoIpProcessor processor = factory.create(config); - assertThat(processor.getIpField(), equalTo("_field")); + assertThat(processor.getSourceField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("_field")); } @@ -73,10 +75,10 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); factory.setConfigDirectory(configDir); Map config = new HashMap<>(); - config.put("ip_field", "_field"); + config.put("source_field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb"); GeoIpProcessor processor = factory.create(config); - assertThat(processor.getIpField(), equalTo("_field")); + assertThat(processor.getSourceField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country")); } @@ -86,7 +88,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { factory.setConfigDirectory(configDir); Map config = new HashMap<>(); - config.put("ip_field", "_field"); + config.put("source_field", "_field"); config.put("database_file", "does-not-exist.mmdb"); try { factory.create(config); @@ -94,4 +96,47 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { assertThat(e.getMessage(), startsWith("database file [does-not-exist.mmdb] doesn't exist in")); } } + + public void testBuild_fields() throws Exception { + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); + factory.setConfigDirectory(configDir); + + Set fields = EnumSet.noneOf(GeoIpProcessor.Field.class); + List fieldNames = new ArrayList<>(); + int numFields = scaledRandomIntBetween(1, GeoIpProcessor.Field.values().length); + for (int i = 0; i < numFields; i++) { + GeoIpProcessor.Field field = GeoIpProcessor.Field.values()[i]; + fields.add(field); + fieldNames.add(field.name().toLowerCase(Locale.ROOT)); + } + Map config = new HashMap<>(); + config.put("source_field", "_field"); + config.put("fields", fieldNames); + GeoIpProcessor processor = factory.create(config); + assertThat(processor.getSourceField(), equalTo("_field")); + assertThat(processor.getFields(), equalTo(fields)); + } + + public void testBuild_illegalFieldOption() throws Exception { + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); + factory.setConfigDirectory(configDir); + + Map config = new HashMap<>(); + config.put("source_field", "_field"); + config.put("fields", Collections.singletonList("invalid")); + try { + factory.create(config); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("illegal field option [invalid]. valid values are [[IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LATITUDE, LONGITUDE, LOCATION]]")); + } + + config = new HashMap<>(); + config.put("source_field", "_field"); + config.put("fields", "invalid"); + try { + factory.create(config); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("property [fields] isn't a list, but of type [java.lang.String]")); + } + } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java index caeb3df24e9..c93b4b78f1a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.ingest.Data; import org.elasticsearch.test.ESTestCase; import java.io.InputStream; +import java.util.EnumSet; import java.util.HashMap; import java.util.Map; @@ -33,7 +34,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCity() throws Exception { InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb"); - GeoIpProcessor processor = new GeoIpProcessor("source_field", new DatabaseReader.Builder(database).build(), "target_field"); + GeoIpProcessor processor = new GeoIpProcessor("source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); Map document = new HashMap<>(); document.put("source_field", "82.170.213.79"); @@ -59,7 +60,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCountry() throws Exception { InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-Country.mmdb"); - GeoIpProcessor processor = new GeoIpProcessor("source_field", new DatabaseReader.Builder(database).build(), "target_field"); + GeoIpProcessor processor = new GeoIpProcessor("source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); Map document = new HashMap<>(); document.put("source_field", "82.170.213.79"); @@ -79,7 +80,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testAddressIsNotInTheDatabase() throws Exception { InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb"); - GeoIpProcessor processor = new GeoIpProcessor("source_field", new DatabaseReader.Builder(database).build(), "target_field"); + GeoIpProcessor processor = new GeoIpProcessor("source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); Map document = new HashMap<>(); document.put("source_field", "202.45.11.11"); diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_geoip_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_geoip_processor.yaml index 05912572562..3c0efc19c78 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_geoip_processor.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_geoip_processor.yaml @@ -13,7 +13,59 @@ "processors": [ { "geoip" : { - "ip_field" : "field1" + "source_field" : "field1" + } + } + ] + } + - match: { _id: "my_pipeline" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline_id: "my_pipeline" + body: {field1: "128.101.101.101"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.field1: "128.101.101.101" } + - length: { _source.geoip: 5 } + - match: { _source.geoip.city_name: "Minneapolis" } + - match: { _source.geoip.country_iso_code: "US" } + - match: { _source.geoip.location: [-93.2166, 44.9759] } + - match: { _source.geoip.region_name: "Minnesota" } + - match: { _source.geoip.continent_name: "North America" } + +--- +"Test geoip processor with fields": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "geoip" : { + "source_field" : "field1", + "fields" : ["city_name", "country_iso_code", "ip", "latitude", "longitude", "location", "timezone", "country_name", "region_name", "continent_name"] } } ] @@ -69,7 +121,7 @@ "processors": [ { "geoip" : { - "ip_field" : "field1", + "source_field" : "field1", "database_file" : "GeoLite2-Country.mmdb" } } @@ -99,8 +151,6 @@ type: test id: 1 - match: { _source.field1: "128.101.101.101" } - - length: { _source.geoip: 4 } + - length: { _source.geoip: 2 } - match: { _source.geoip.country_iso_code: "US" } - - match: { _source.geoip.ip: "128.101.101.101" } - - match: { _source.geoip.country_name: "United States" } - match: { _source.geoip.continent_name: "North America" } From 347b8e600e52e3a1010f77bbfcd631dfcf15cd50 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 11 Nov 2015 14:39:48 +0700 Subject: [PATCH 040/347] specify all the dependencies of dependencies, because transitive dependencies have been disabled --- plugins/ingest/build.gradle | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index 02ec152f979..2d43139a7bb 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -24,10 +24,15 @@ esplugin { dependencies { compile 'org.jruby.joni:joni:2.1.6' - compile (group: 'com.maxmind.geoip2', name: 'geoip2', version: '2.3.1') { - // we don't use Maxmind's http service: - exclude group: 'com.google.http-client', module: 'google-http-client' - } + // joni dependencies: + compile 'org.jruby.jcodings:jcodings:1.0.12' + + compile ('com.maxmind.geoip2:geoip2:2.3.1') + // geoip2 dependencies: + compile('com.fasterxml.jackson.core:jackson-annotations:2.5.0') + compile('com.fasterxml.jackson.core:jackson-databind:2.5.3') + compile('com.maxmind.db:maxmind-db:1.0.0') + compile 'joda-time:joda-time:2.8.2' testCompile 'org.elasticsearch:geolite2-databases:20151029' testCompile 'org.elasticsearch:securemock:1.1' From 2bde3848259160d5d4a653b10032a61646c2f6db Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 11 Nov 2015 14:41:29 +0700 Subject: [PATCH 041/347] renamed yaml tests --- .../rest-api-spec/test/ingest/{40_grok.yaml => 30_grok.yaml} | 0 .../ingest/{50_geoip_processor.yaml => 40_geoip_processor.yaml} | 0 .../ingest/{60_date_processor.yaml => 50_date_processor.yaml} | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename plugins/ingest/src/test/resources/rest-api-spec/test/ingest/{40_grok.yaml => 30_grok.yaml} (100%) rename plugins/ingest/src/test/resources/rest-api-spec/test/ingest/{50_geoip_processor.yaml => 40_geoip_processor.yaml} (100%) rename plugins/ingest/src/test/resources/rest-api-spec/test/ingest/{60_date_processor.yaml => 50_date_processor.yaml} (100%) diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_grok.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml similarity index 100% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_grok.yaml rename to plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_geoip_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml similarity index 100% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_geoip_processor.yaml rename to plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_date_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_date_processor.yaml similarity index 100% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_date_processor.yaml rename to plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_date_processor.yaml From 2b31f4fff7ef8e79e398a82981de485e78d22bc0 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 10 Nov 2015 17:37:28 +0100 Subject: [PATCH 042/347] Mutate processor improvements Remove code duplications from ConfigurationUtils Make sure that the mutate processor doesn't use Tuple as that would require to depend on core. Also make sure that the MutateProcessor tests don't end up testing the factory as well. Make processor getters package private as they are only needed in tests. Add new tests to MutateProcessorFactoryTests --- .../ingest/processor/ConfigurationUtils.java | 56 +---- .../processor/mutate/GsubExpression.java | 50 +++++ .../processor/mutate/MutateProcessor.java | 72 +++---- .../mutate/MutateProcessorFactoryTests.java | 191 ++++++++++++++++-- .../mutate/MutateProcessorTests.java | 89 ++++---- 5 files changed, 292 insertions(+), 166 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/GsubExpression.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java index 94fb6d14587..e3f77b4a141 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java @@ -84,13 +84,7 @@ public final class ConfigurationUtils { if (value == null) { throw new IllegalArgumentException("required property [" + propertyName + "] is missing"); } - if (value instanceof List) { - @SuppressWarnings("unchecked") - List stringList = (List) value; - return stringList; - } else { - throw new IllegalArgumentException("property [" + propertyName + "] isn't a list, but of type [" + value.getClass().getName() + "]"); - } + return readStringList(propertyName, value); } /** @@ -103,6 +97,10 @@ public final class ConfigurationUtils { if (value == null) { return null; } + return readStringList(propertyName, value); + } + + private static List readStringList(String propertyName, Object value) { if (value instanceof List) { @SuppressWarnings("unchecked") List stringList = (List) value; @@ -117,54 +115,14 @@ public final class ConfigurationUtils { * * If the property value isn't of type map an {@link IllegalArgumentException} is thrown. */ - public static Map> readOptionalStringListMap(Map configuration, String propertyName) { + public static Map readOptionalMap(Map configuration, String propertyName) { Object value = configuration.remove(propertyName); if (value == null) { return null; } if (value instanceof Map) { @SuppressWarnings("unchecked") - Map> stringList = (Map>) value; - return stringList; - } else { - throw new IllegalArgumentException("property [" + propertyName + "] isn't a map, but of type [" + value.getClass().getName() + "]"); - } - } - - /** - * Returns and removes the specified property of type map from the specified configuration map. - * - * If the property value isn't of type map an {@link IllegalArgumentException} is thrown. - */ - public static Map readOptionalStringMap(Map configuration, String propertyName) { - Object value = configuration.remove(propertyName); - - if (value == null) { - return null; - } - - if (value instanceof Map) { - Map map = (Map) value; - return map; - } else { - throw new IllegalArgumentException("property [" + propertyName + "] isn't a map, but of type [" + value.getClass().getName() + "]"); - } - } - - /** - * Returns and removes the specified property of type map from the specified configuration map. - * - * If the property value isn't of type map an {@link IllegalArgumentException} is thrown. - */ - public static Map readOptionalObjectMap(Map configuration, String propertyName) { - Object value = configuration.remove(propertyName); - - if (value == null) { - return null; - } - - if (value instanceof Map) { - Map map = (Map) value; + Map map = (Map) value; return map; } else { throw new IllegalArgumentException("property [" + propertyName + "] isn't a map, but of type [" + value.getClass().getName() + "]"); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/GsubExpression.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/GsubExpression.java new file mode 100644 index 00000000000..c62577d1cc8 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/GsubExpression.java @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.mutate; + +import java.util.regex.Pattern; + +/** + * Represents a gsub expression containing the field name, the pattern to look for and its string replacement. + */ +public class GsubExpression { + + private final String fieldName; + private final Pattern pattern; + private final String replacement; + + public GsubExpression(String fieldName, Pattern pattern, String replacement) { + this.fieldName = fieldName; + this.pattern = pattern; + this.replacement = replacement; + } + + public String getFieldName() { + return fieldName; + } + + public Pattern getPattern() { + return pattern; + } + + public String getReplacement() { + return replacement; + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java index ab85a9dbd04..d09454b1016 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.processor.mutate; import org.elasticsearch.common.Booleans; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; @@ -39,23 +38,16 @@ public final class MutateProcessor implements Processor { private final Map rename; private final Map convert; private final Map split; - private final Map> gsub; + private final List gsub; private final Map join; private final List remove; private final List trim; private final List uppercase; private final List lowercase; - public MutateProcessor(Map update, - Map rename, - Map convert, - Map split, - Map> gsub, - Map join, - List remove, - List trim, - List uppercase, - List lowercase) { + public MutateProcessor(Map update, Map rename, Map convert, + Map split, List gsub, Map join, + List remove, List trim, List uppercase, List lowercase) { this.update = update; this.rename = rename; this.convert = convert; @@ -68,43 +60,43 @@ public final class MutateProcessor implements Processor { this.lowercase = lowercase; } - public Map getUpdate() { + Map getUpdate() { return update; } - public Map getRename() { + Map getRename() { return rename; } - public Map getConvert() { + Map getConvert() { return convert; } - public Map getSplit() { + Map getSplit() { return split; } - public Map> getGsub() { + List getGsub() { return gsub; } - public Map getJoin() { + Map getJoin() { return join; } - public List getRemove() { + List getRemove() { return remove; } - public List getTrim() { + List getTrim() { return trim; } - public List getUppercase() { + List getUppercase() { return uppercase; } - public List getLowercase() { + List getLowercase() { return lowercase; } @@ -213,16 +205,14 @@ public final class MutateProcessor implements Processor { } private void doGsub(Data data) { - for (Map.Entry> entry : gsub.entrySet()) { - String fieldName = entry.getKey(); - Tuple matchAndReplace = entry.getValue(); - String oldVal = data.getProperty(fieldName); + for (GsubExpression gsubExpression : gsub) { + String oldVal = data.getProperty(gsubExpression.getFieldName()); if (oldVal == null) { - throw new IllegalArgumentException("Field \"" + fieldName + "\" is null, cannot match pattern."); + throw new IllegalArgumentException("Field \"" + gsubExpression.getFieldName() + "\" is null, cannot match pattern."); } - Matcher matcher = matchAndReplace.v1().matcher(oldVal); - String newVal = matcher.replaceAll(matchAndReplace.v2()); - data.addField(entry.getKey(), newVal); + Matcher matcher = gsubExpression.getPattern().matcher(oldVal); + String newVal = matcher.replaceAll(gsubExpression.getReplacement()); + data.addField(gsubExpression.getFieldName(), newVal); } } @@ -285,28 +275,28 @@ public final class MutateProcessor implements Processor { public static final class Factory implements Processor.Factory { @Override public MutateProcessor create(Map config) throws IOException { - Map update = ConfigurationUtils.readOptionalObjectMap(config, "update"); - Map rename = ConfigurationUtils.readOptionalStringMap(config, "rename"); - Map convert = ConfigurationUtils.readOptionalStringMap(config, "convert"); - Map split = ConfigurationUtils.readOptionalStringMap(config, "split"); - Map> gsubConfig = ConfigurationUtils.readOptionalStringListMap(config, "gsub"); - Map join = ConfigurationUtils.readOptionalStringMap(config, "join"); + Map update = ConfigurationUtils.readOptionalMap(config, "update"); + Map rename = ConfigurationUtils.readOptionalMap(config, "rename"); + Map convert = ConfigurationUtils.readOptionalMap(config, "convert"); + Map split = ConfigurationUtils.readOptionalMap(config, "split"); + Map> gsubConfig = ConfigurationUtils.readOptionalMap(config, "gsub"); + Map join = ConfigurationUtils.readOptionalMap(config, "join"); List remove = ConfigurationUtils.readOptionalStringList(config, "remove"); List trim = ConfigurationUtils.readOptionalStringList(config, "trim"); List uppercase = ConfigurationUtils.readOptionalStringList(config, "uppercase"); List lowercase = ConfigurationUtils.readOptionalStringList(config, "lowercase"); // pre-compile regex patterns - Map> gsub = null; + List gsubExpressions = null; if (gsubConfig != null) { - gsub = new HashMap<>(); + gsubExpressions = new ArrayList<>(); for (Map.Entry> entry : gsubConfig.entrySet()) { List searchAndReplace = entry.getValue(); if (searchAndReplace.size() != 2) { - throw new IllegalArgumentException("Invalid search and replace values (" + Arrays.toString(searchAndReplace.toArray()) + ") for field: " + entry.getKey()); + throw new IllegalArgumentException("Invalid search and replace values " + searchAndReplace + " for field: " + entry.getKey()); } Pattern searchPattern = Pattern.compile(searchAndReplace.get(0)); - gsub.put(entry.getKey(), new Tuple<>(searchPattern, searchAndReplace.get(1))); + gsubExpressions.add(new GsubExpression(entry.getKey(), searchPattern, searchAndReplace.get(1))); } } @@ -315,7 +305,7 @@ public final class MutateProcessor implements Processor { (rename == null) ? null : Collections.unmodifiableMap(rename), (convert == null) ? null : Collections.unmodifiableMap(convert), (split == null) ? null : Collections.unmodifiableMap(split), - (gsub == null) ? null : Collections.unmodifiableMap(gsub), + (gsubExpressions == null) ? null : Collections.unmodifiableList(gsubExpressions), (join == null) ? null : Collections.unmodifiableMap(join), (remove == null) ? null : Collections.unmodifiableList(remove), (trim == null) ? null : Collections.unmodifiableList(trim), diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java index ccc3e223adf..017d0465563 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java @@ -19,30 +19,183 @@ package org.elasticsearch.ingest.processor.mutate; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.regex.Pattern; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class MutateProcessorFactoryTests extends ESTestCase { - public void testCreate() throws Exception { + public void testCreateUpdate() throws Exception { MutateProcessor.Factory factory = new MutateProcessor.Factory(); Map config = new HashMap<>(); Map update = new HashMap<>(); update.put("foo", 123); config.put("update", update); MutateProcessor processor = factory.create(config); + assertThat(processor.getRename(), nullValue()); + assertThat(processor.getRemove(), nullValue()); + assertThat(processor.getGsub(), nullValue()); + assertThat(processor.getConvert(), nullValue()); + assertThat(processor.getJoin(), nullValue()); + assertThat(processor.getLowercase(), nullValue()); + assertThat(processor.getUppercase(), nullValue()); + assertThat(processor.getSplit(), nullValue()); + assertThat(processor.getTrim(), nullValue()); assertThat(processor.getUpdate(), equalTo(update)); } + public void testCreateRename() throws Exception { + MutateProcessor.Factory factory = new MutateProcessor.Factory(); + Map config = new HashMap<>(); + Map rename = new HashMap<>(); + rename.put("foo", "bar"); + config.put("rename", rename); + MutateProcessor processor = factory.create(config); + assertThat(processor.getUpdate(), nullValue()); + assertThat(processor.getRemove(), nullValue()); + assertThat(processor.getGsub(), nullValue()); + assertThat(processor.getConvert(), nullValue()); + assertThat(processor.getJoin(), nullValue()); + assertThat(processor.getLowercase(), nullValue()); + assertThat(processor.getUppercase(), nullValue()); + assertThat(processor.getSplit(), nullValue()); + assertThat(processor.getTrim(), nullValue()); + assertThat(processor.getRename(), equalTo(rename)); + } + + public void testCreateRemove() throws Exception { + MutateProcessor.Factory factory = new MutateProcessor.Factory(); + Map config = new HashMap<>(); + List remove = Collections.singletonList("foo"); + config.put("remove", remove); + MutateProcessor processor = factory.create(config); + assertThat(processor.getUpdate(), nullValue()); + assertThat(processor.getGsub(), nullValue()); + assertThat(processor.getConvert(), nullValue()); + assertThat(processor.getJoin(), nullValue()); + assertThat(processor.getLowercase(), nullValue()); + assertThat(processor.getUppercase(), nullValue()); + assertThat(processor.getSplit(), nullValue()); + assertThat(processor.getTrim(), nullValue()); + assertThat(processor.getRename(), nullValue()); + assertThat(processor.getRemove(), equalTo(remove)); + } + + public void testCreateConvert() throws Exception { + MutateProcessor.Factory factory = new MutateProcessor.Factory(); + Map config = new HashMap<>(); + Map convert = new HashMap<>(); + convert.put("foo", "integer"); + config.put("convert", convert); + MutateProcessor processor = factory.create(config); + assertThat(processor.getUpdate(), nullValue()); + assertThat(processor.getGsub(), nullValue()); + assertThat(processor.getJoin(), nullValue()); + assertThat(processor.getLowercase(), nullValue()); + assertThat(processor.getUppercase(), nullValue()); + assertThat(processor.getSplit(), nullValue()); + assertThat(processor.getTrim(), nullValue()); + assertThat(processor.getRename(), nullValue()); + assertThat(processor.getRemove(), nullValue()); + assertThat(processor.getConvert(), equalTo(convert)); + } + + public void testCreateJoin() throws Exception { + MutateProcessor.Factory factory = new MutateProcessor.Factory(); + Map config = new HashMap<>(); + Map join = new HashMap<>(); + join.put("foo", "bar"); + config.put("join", join); + MutateProcessor processor = factory.create(config); + assertThat(processor.getUpdate(), nullValue()); + assertThat(processor.getGsub(), nullValue()); + assertThat(processor.getConvert(), nullValue()); + assertThat(processor.getLowercase(), nullValue()); + assertThat(processor.getUppercase(), nullValue()); + assertThat(processor.getSplit(), nullValue()); + assertThat(processor.getTrim(), nullValue()); + assertThat(processor.getRename(), nullValue()); + assertThat(processor.getRemove(), nullValue()); + assertThat(processor.getJoin(), equalTo(join)); + } + + public void testCreateSplit() throws Exception { + MutateProcessor.Factory factory = new MutateProcessor.Factory(); + Map config = new HashMap<>(); + Map split = new HashMap<>(); + split.put("foo", "bar"); + config.put("split", split); + MutateProcessor processor = factory.create(config); + assertThat(processor.getUpdate(), nullValue()); + assertThat(processor.getGsub(), nullValue()); + assertThat(processor.getConvert(), nullValue()); + assertThat(processor.getLowercase(), nullValue()); + assertThat(processor.getUppercase(), nullValue()); + assertThat(processor.getJoin(), nullValue()); + assertThat(processor.getTrim(), nullValue()); + assertThat(processor.getRename(), nullValue()); + assertThat(processor.getRemove(), nullValue()); + assertThat(processor.getSplit(), equalTo(split)); + } + + public void testCreateLowercase() throws Exception { + MutateProcessor.Factory factory = new MutateProcessor.Factory(); + Map config = new HashMap<>(); + List lowercase = Collections.singletonList("foo"); + config.put("lowercase", lowercase); + MutateProcessor processor = factory.create(config); + assertThat(processor.getUpdate(), nullValue()); + assertThat(processor.getGsub(), nullValue()); + assertThat(processor.getConvert(), nullValue()); + assertThat(processor.getJoin(), nullValue()); + assertThat(processor.getRemove(), nullValue()); + assertThat(processor.getUppercase(), nullValue()); + assertThat(processor.getSplit(), nullValue()); + assertThat(processor.getTrim(), nullValue()); + assertThat(processor.getRename(), nullValue()); + assertThat(processor.getLowercase(), equalTo(lowercase)); + } + + public void testCreateUppercase() throws Exception { + MutateProcessor.Factory factory = new MutateProcessor.Factory(); + Map config = new HashMap<>(); + List uppercase = Collections.singletonList("foo"); + config.put("uppercase", uppercase); + MutateProcessor processor = factory.create(config); + assertThat(processor.getUpdate(), nullValue()); + assertThat(processor.getGsub(), nullValue()); + assertThat(processor.getConvert(), nullValue()); + assertThat(processor.getJoin(), nullValue()); + assertThat(processor.getRemove(), nullValue()); + assertThat(processor.getLowercase(), nullValue()); + assertThat(processor.getSplit(), nullValue()); + assertThat(processor.getTrim(), nullValue()); + assertThat(processor.getRename(), nullValue()); + assertThat(processor.getUppercase(), equalTo(uppercase)); + } + + public void testCreateTrim() throws Exception { + MutateProcessor.Factory factory = new MutateProcessor.Factory(); + Map config = new HashMap<>(); + List trim = Collections.singletonList("foo"); + config.put("trim", trim); + MutateProcessor processor = factory.create(config); + assertThat(processor.getUpdate(), nullValue()); + assertThat(processor.getGsub(), nullValue()); + assertThat(processor.getConvert(), nullValue()); + assertThat(processor.getJoin(), nullValue()); + assertThat(processor.getRemove(), nullValue()); + assertThat(processor.getUppercase(), nullValue()); + assertThat(processor.getSplit(), nullValue()); + assertThat(processor.getLowercase(), nullValue()); + assertThat(processor.getRename(), nullValue()); + assertThat(processor.getTrim(), equalTo(trim)); + } + public void testCreateGsubPattern() throws Exception { MutateProcessor.Factory factory = new MutateProcessor.Factory(); Map config = new HashMap<>(); @@ -50,32 +203,26 @@ public class MutateProcessorFactoryTests extends ESTestCase { gsub.put("foo", Arrays.asList("\\s.*e\\s", "")); config.put("gsub", gsub); - Map> compiledGsub = new HashMap<>(); - Pattern searchPattern = Pattern.compile("\\s.*e\\s"); - compiledGsub.put("foo", new Tuple<>(searchPattern, "")); - MutateProcessor processor = factory.create(config); - for (Map.Entry> entry : compiledGsub.entrySet()) { - Tuple actualSearchAndReplace = processor.getGsub().get(entry.getKey()); - assertThat(actualSearchAndReplace, notNullValue()); - assertThat(actualSearchAndReplace.v1().pattern(), equalTo(entry.getValue().v1().pattern())); - assertThat(actualSearchAndReplace.v2(), equalTo(entry.getValue().v2())); - } + assertThat(processor.getGsub().size(), equalTo(1)); + GsubExpression gsubExpression = processor.getGsub().get(0); + assertThat(gsubExpression.getFieldName(), equalTo("foo")); + assertThat(gsubExpression.getPattern().pattern(), equalTo(Pattern.compile("\\s.*e\\s").pattern())); + assertThat(gsubExpression.getReplacement(), equalTo("")); } - public void testCreateGsubPattern_InvalidFormat() throws Exception { + public void testCreateGsubPatternInvalidFormat() throws Exception { MutateProcessor.Factory factory = new MutateProcessor.Factory(); Map config = new HashMap<>(); Map> gsub = new HashMap<>(); - gsub.put("foo", Arrays.asList("only_one")); + gsub.put("foo", Collections.singletonList("only_one")); config.put("gsub", gsub); try { factory.create(config); - fail(); + fail("processor creation should have failed"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Invalid search and replace values ([only_one]) for field: foo")); + assertThat(e.getMessage(), equalTo("Invalid search and replace values [only_one] for field: foo")); } } - } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java index 55066c4e2e7..f5420eb30ee 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java @@ -25,10 +25,8 @@ import org.elasticsearch.test.ESTestCase; import org.junit.Before; import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; +import java.util.regex.Pattern; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -36,9 +34,7 @@ import static org.hamcrest.Matchers.nullValue; public class MutateProcessorTests extends ESTestCase { - private static final MutateProcessor.Factory FACTORY = new MutateProcessor.Factory(); private Data data; - private Map config; @Before public void setData() { @@ -54,25 +50,23 @@ public class MutateProcessorTests extends ESTestCase { document.put("fizz", fizz); data = new Data("index", "type", "id", document); - config = new HashMap<>(); } public void testUpdate() throws IOException { Map update = new HashMap<>(); update.put("foo", 123); - config.put("update", update); - - Processor processor = FACTORY.create(config); + Processor processor = new MutateProcessor(update, null, null, null, null, null, null, null, null, null); processor.execute(data); + assertThat(data.getDocument().size(), equalTo(7)); assertThat(data.getProperty("foo"), equalTo(123)); } public void testRename() throws IOException { Map rename = new HashMap<>(); rename.put("foo", "bar"); - config.put("rename", rename); - Processor processor = FACTORY.create(config); + Processor processor = new MutateProcessor(null, rename, null, null, null, null, null, null, null, null); processor.execute(data); + assertThat(data.getDocument().size(), equalTo(7)); assertThat(data.getProperty("bar"), equalTo("bar")); assertThat(data.containsProperty("foo"), is(false)); } @@ -80,66 +74,56 @@ public class MutateProcessorTests extends ESTestCase { public void testConvert() throws IOException { Map convert = new HashMap<>(); convert.put("num", "integer"); - config.put("convert", convert); - - Processor processor = FACTORY.create(config); + Processor processor = new MutateProcessor(null, null, convert, null, null, null, null, null, null, null); processor.execute(data); + assertThat(data.getDocument().size(), equalTo(7)); assertThat(data.getProperty("num"), equalTo(64)); } - public void testConvert_NullField() throws IOException { + public void testConvertNullField() throws IOException { Map convert = new HashMap<>(); convert.put("null", "integer"); - config.put("convert", convert); - - Processor processor = FACTORY.create(config); + Processor processor = new MutateProcessor(null, null, convert, null, null, null, null, null, null, null); try { processor.execute(data); - fail(); + fail("processor execute should have failed"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("Field \"null\" is null, cannot be converted to a/an integer")); } } - public void testConvert_List() throws IOException { + public void testConvertList() throws IOException { Map convert = new HashMap<>(); convert.put("arr", "integer"); - config.put("convert", convert); - - Processor processor = FACTORY.create(config); + Processor processor = new MutateProcessor(null, null, convert, null, null, null, null, null, null, null); processor.execute(data); + assertThat(data.getDocument().size(), equalTo(7)); assertThat(data.getProperty("arr"), equalTo(Arrays.asList(1, 2, 3))); } public void testSplit() throws IOException { - HashMap split = new HashMap<>(); + Map split = new HashMap<>(); split.put("ip", "\\."); - config.put("split", split); - - Processor processor = FACTORY.create(config); + Processor processor = new MutateProcessor(null, null, null, split, null, null, null, null, null, null); processor.execute(data); + assertThat(data.getDocument().size(), equalTo(7)); assertThat(data.getProperty("ip"), equalTo(Arrays.asList("127", "0", "0", "1"))); } public void testGsub() throws IOException { - HashMap> gsub = new HashMap<>(); - gsub.put("ip", Arrays.asList("\\.", "-")); - config.put("gsub", gsub); - - Processor processor = FACTORY.create(config); + List gsubExpressions = Collections.singletonList(new GsubExpression("ip", Pattern.compile("\\."), "-")); + Processor processor = new MutateProcessor(null, null, null, null, gsubExpressions, null, null, null, null, null); processor.execute(data); + assertThat(data.getDocument().size(), equalTo(7)); assertThat(data.getProperty("ip"), equalTo("127-0-0-1")); } public void testGsub_NullValue() throws IOException { - HashMap> gsub = new HashMap<>(); - gsub.put("null_field", Arrays.asList("\\.", "-")); - config.put("gsub", gsub); - - Processor processor = FACTORY.create(config); + List gsubExpressions = Collections.singletonList(new GsubExpression("null_field", Pattern.compile("\\."), "-")); + Processor processor = new MutateProcessor(null, null, null, null, gsubExpressions, null, null, null, null, null); try { processor.execute(data); - fail(); + fail("processor execution should have failed"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("Field \"null_field\" is null, cannot match pattern.")); } @@ -148,46 +132,43 @@ public class MutateProcessorTests extends ESTestCase { public void testJoin() throws IOException { HashMap join = new HashMap<>(); join.put("arr", "-"); - config.put("join", join); - - Processor processor = FACTORY.create(config); + Processor processor = new MutateProcessor(null, null, null, null, null, join, null, null, null, null); processor.execute(data); + assertThat(data.getDocument().size(), equalTo(7)); assertThat(data.getProperty("arr"), equalTo("1-2-3")); } public void testRemove() throws IOException { List remove = Arrays.asList("foo", "ip"); - config.put("remove", remove); - - Processor processor = FACTORY.create(config); + Processor processor = new MutateProcessor(null, null, null, null, null, null, remove, null, null, null); processor.execute(data); + assertThat(data.getDocument().size(), equalTo(5)); assertThat(data.getProperty("foo"), nullValue()); assertThat(data.getProperty("ip"), nullValue()); } public void testTrim() throws IOException { List trim = Arrays.asList("to_strip", "foo"); - config.put("trim", trim); - - Processor processor = FACTORY.create(config); + Processor processor = new MutateProcessor(null, null, null, null, null, null, null, trim, null, null); processor.execute(data); + assertThat(data.getDocument().size(), equalTo(7)); assertThat(data.getProperty("foo"), equalTo("bar")); assertThat(data.getProperty("to_strip"), equalTo("clean")); } public void testUppercase() throws IOException { - List uppercase = Arrays.asList("foo"); - config.put("uppercase", uppercase); - Processor processor = FACTORY.create(config); + List uppercase = Collections.singletonList("foo"); + Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null); processor.execute(data); + assertThat(data.getDocument().size(), equalTo(7)); assertThat(data.getProperty("foo"), equalTo("BAR")); } public void testLowercase() throws IOException { - List lowercase = Arrays.asList("alpha"); - config.put("lowercase", lowercase); - Processor processor = FACTORY.create(config); + List lowercase = Collections.singletonList("alpha"); + Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, null, lowercase); processor.execute(data); + assertThat(data.getDocument().size(), equalTo(7)); assertThat(data.getProperty("alpha"), equalTo("abcd")); } } From c12c9e6e29db2ab364106e642c8f1875679b728c Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 11 Nov 2015 10:17:32 +0100 Subject: [PATCH 043/347] add equals and hashcode to GsubExpression --- .../processor/mutate/GsubExpression.java | 26 ++++++++++++++++--- .../mutate/MutateProcessorFactoryTests.java | 5 +--- 2 files changed, 24 insertions(+), 7 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/GsubExpression.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/GsubExpression.java index c62577d1cc8..402061b18ac 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/GsubExpression.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/GsubExpression.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor.mutate; +import java.util.Objects; import java.util.regex.Pattern; /** @@ -31,9 +32,9 @@ public class GsubExpression { private final String replacement; public GsubExpression(String fieldName, Pattern pattern, String replacement) { - this.fieldName = fieldName; - this.pattern = pattern; - this.replacement = replacement; + this.fieldName = Objects.requireNonNull(fieldName); + this.pattern = Objects.requireNonNull(pattern); + this.replacement = Objects.requireNonNull(replacement); } public String getFieldName() { @@ -47,4 +48,23 @@ public class GsubExpression { public String getReplacement() { return replacement; } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GsubExpression that = (GsubExpression) o; + return Objects.equals(fieldName, that.fieldName) && + Objects.equals(pattern.pattern(), that.pattern.pattern()) && + Objects.equals(replacement, that.replacement); + } + + @Override + public int hashCode() { + return Objects.hash(fieldName, pattern, replacement); + } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java index 017d0465563..3ebd98c10c3 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java @@ -205,10 +205,7 @@ public class MutateProcessorFactoryTests extends ESTestCase { MutateProcessor processor = factory.create(config); assertThat(processor.getGsub().size(), equalTo(1)); - GsubExpression gsubExpression = processor.getGsub().get(0); - assertThat(gsubExpression.getFieldName(), equalTo("foo")); - assertThat(gsubExpression.getPattern().pattern(), equalTo(Pattern.compile("\\s.*e\\s").pattern())); - assertThat(gsubExpression.getReplacement(), equalTo("")); + assertThat(processor.getGsub().get(0), equalTo(new GsubExpression("foo", Pattern.compile("\\s.*e\\s"), ""))); } public void testCreateGsubPatternInvalidFormat() throws Exception { From bce7f6c7ad2b22b56a5a7b2d2e8dbf0516311c4e Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Wed, 4 Nov 2015 19:48:14 -0800 Subject: [PATCH 044/347] Add simulate endpoint --- .../java/org/elasticsearch/ingest/Data.java | 24 +++- .../plugin/ingest/IngestModule.java | 1 + .../plugin/ingest/IngestPlugin.java | 8 +- .../ingest/PipelineExecutionService.java | 25 +++- .../plugin/ingest/PipelineStore.java | 4 + .../rest/RestSimulatePipelineAction.java | 51 +++++++ .../simulate/SimulatePipelineAction.java | 43 ++++++ .../simulate/SimulatePipelineRequest.java | 75 ++++++++++ .../SimulatePipelineRequestBuilder.java | 42 ++++++ .../SimulatePipelineRequestPayload.java | 89 ++++++++++++ .../simulate/SimulatePipelineResponse.java | 96 +++++++++++++ .../SimulatePipelineTransportAction.java | 101 ++++++++++++++ .../simulate/SimulatedItemResponse.java | 129 ++++++++++++++++++ .../elasticsearch/ingest/IngestClientIT.java | 64 ++++++++- .../rest-api-spec/api/ingest.simulate.json | 23 ++++ .../test/ingest/80_simulate.yaml | 51 +++++++ 16 files changed, 814 insertions(+), 12 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineAction.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestPayload.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index 879acfcbf8e..407c1b894f1 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -23,10 +23,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.support.XContentMapValues; import java.lang.reflect.Array; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; /** * Represents the data and meta data (like id and type) of a single document that is going to be indexed. @@ -129,4 +126,23 @@ public final class Data { public boolean isModified() { return modified; } + + @Override + public boolean equals(Object obj) { + if (obj == this) { return true; } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + Data other = (Data) obj; + return Objects.equals(document, other.document) && + Objects.equals(index, other.index) && + Objects.equals(type, other.type) && + Objects.equals(id, other.id); + } + + @Override + public int hashCode() { + return Objects.hash(index, type, id, document); + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index 94dfb4fb690..15c2b7769fb 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -39,6 +39,7 @@ public class IngestModule extends AbstractModule { protected void configure() { binder().bind(IngestRestFilter.class).asEagerSingleton(); binder().bind(PipelineExecutionService.class).asEagerSingleton(); + // TODO(talevy): write it! binder().bind(PipelineSimulateService.class).asEagerSingleton(); binder().bind(PipelineStore.class).asEagerSingleton(); binder().bind(PipelineStoreClient.class).asEagerSingleton(); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index c5efe74db26..34411fd60e7 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -20,7 +20,6 @@ package org.elasticsearch.plugin.ingest; -import org.elasticsearch.SpecialPermission; import org.elasticsearch.action.ActionModule; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; @@ -30,6 +29,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugin.ingest.rest.RestDeletePipelineAction; import org.elasticsearch.plugin.ingest.rest.RestGetPipelineAction; import org.elasticsearch.plugin.ingest.rest.RestPutPipelineAction; +import org.elasticsearch.plugin.ingest.rest.RestSimulatePipelineAction; import org.elasticsearch.plugin.ingest.transport.IngestActionFilter; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineTransportAction; @@ -37,11 +37,11 @@ import org.elasticsearch.plugin.ingest.transport.get.GetPipelineAction; import org.elasticsearch.plugin.ingest.transport.get.GetPipelineTransportAction; import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; import org.elasticsearch.plugin.ingest.transport.put.PutPipelineTransportAction; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineAction; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineTransportAction; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestModule; -import java.security.AccessController; -import java.security.PrivilegedAction; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -105,11 +105,13 @@ public class IngestPlugin extends Plugin { module.registerAction(PutPipelineAction.INSTANCE, PutPipelineTransportAction.class); module.registerAction(GetPipelineAction.INSTANCE, GetPipelineTransportAction.class); module.registerAction(DeletePipelineAction.INSTANCE, DeletePipelineTransportAction.class); + module.registerAction(SimulatePipelineAction.INSTANCE, SimulatePipelineTransportAction.class); } public void onModule(RestModule restModule) { restModule.addRestAction(RestPutPipelineAction.class); restModule.addRestAction(RestGetPipelineAction.class); restModule.addRestAction(RestDeletePipelineAction.class); + restModule.addRestAction(RestSimulatePipelineAction.class); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index 18d656813ec..c6314ae6a6c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -25,8 +25,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.threadpool.ThreadPool; +import java.util.Map; + public class PipelineExecutionService { static final String THREAD_POOL_NAME = IngestPlugin.NAME; @@ -40,13 +43,29 @@ public class PipelineExecutionService { this.threadPool = threadPool; } - public void execute(Data data, String pipelineId, Listener listener) { + public Pipeline getPipeline(String pipelineId) { Pipeline pipeline = store.get(pipelineId); + if (pipeline == null) { - listener.failed(new IllegalArgumentException(LoggerMessageFormat.format("pipeline with id [{}] does not exist", pipelineId))); - return; + throw new IllegalArgumentException(LoggerMessageFormat.format("pipeline with id [{}] does not exist", pipelineId)); } + return pipeline; + } + + public Map getProcessorFactoryRegistry() { + return store.getProcessorFactoryRegistry(); + } + + public void execute(Data data, String pipelineId, Listener listener) { + try { + execute(data, getPipeline(pipelineId), listener); + } catch (IllegalArgumentException e) { + listener.failed(e); + } + } + + public void execute(Data data, Pipeline pipeline, Listener listener) { threadPool.executor(THREAD_POOL_NAME).execute(new Runnable() { @Override public void run() { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index 8fc0e4d2d2c..3bbddb1b842 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -95,6 +95,10 @@ public class PipelineStore extends AbstractLifecycleComponent { } } + public Map getProcessorFactoryRegistry() { + return processorFactoryRegistry; + } + public List getReference(String... ids) { List result = new ArrayList<>(ids.length); for (String id : ids) { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java new file mode 100644 index 00000000000..983f43d6a1a --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.rest; + +import org.elasticsearch.client.Client; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineAction; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequest; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.support.RestStatusToXContentListener; + +public class RestSimulatePipelineAction extends BaseRestHandler { + + @Inject + public RestSimulatePipelineAction(Settings settings, RestController controller, Client client) { + super(settings, controller, client); + controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/{id}/_simulate", this); + controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/_simulate", this); + } + + @Override + protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { + SimulatePipelineRequest request = new SimulatePipelineRequest(); + request.id(restRequest.param("id")); + if (restRequest.hasContent()) { + request.source(restRequest.content()); + } + client.execute(SimulatePipelineAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineAction.java new file mode 100644 index 00000000000..7c671a442f6 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineAction.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; + +public class SimulatePipelineAction extends Action { + + public static final SimulatePipelineAction INSTANCE = new SimulatePipelineAction(); + public static final String NAME = "cluster:admin/ingest/pipeline/simulate"; + + public SimulatePipelineAction() { + super(NAME); + } + + @Override + public SimulatePipelineRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new SimulatePipelineRequestBuilder(client, this); + } + + @Override + public SimulatePipelineResponse newResponse() { + return new SimulatePipelineResponse(); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java new file mode 100644 index 00000000000..2bf7c01bf24 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class SimulatePipelineRequest extends ActionRequest { + + private String id; + private BytesReference source; + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (source == null) { + validationException = addValidationError("source is missing", validationException); + } + return validationException; + } + + public String id() { + return id; + } + + public void id(String id) { + this.id = id; + } + + public BytesReference source() { + return source; + } + + public void source(BytesReference source) { + this.source = source; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + source = in.readBytesReference(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeBytesReference(source); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java new file mode 100644 index 00000000000..8f446b75238 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.bytes.BytesReference; + +public class SimulatePipelineRequestBuilder extends ActionRequestBuilder { + + public SimulatePipelineRequestBuilder(ElasticsearchClient client, SimulatePipelineAction action) { + super(client, action, new SimulatePipelineRequest()); + } + + public SimulatePipelineRequestBuilder setId(String id) { + request.id(id); + return this; + } + + public SimulatePipelineRequestBuilder setSource(BytesReference source) { + request.source(source); + return this; + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestPayload.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestPayload.java new file mode 100644 index 00000000000..b376c67d006 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestPayload.java @@ -0,0 +1,89 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.processor.ConfigurationUtils; +import org.elasticsearch.plugin.ingest.PipelineExecutionService; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public class SimulatePipelineRequestPayload { + + private final List documents; + private final Pipeline pipeline; + + public SimulatePipelineRequestPayload(Pipeline pipeline, List documents) { + this.pipeline = pipeline; + this.documents = Collections.unmodifiableList(documents); + } + + public String pipelineId() { + return pipeline.getId(); + } + + public Pipeline pipeline() { + return pipeline; + } + + + public Data getDocument(int i) { + return documents.get(i); + } + + public int size() { + return documents.size(); + } + + public static class Factory { + + public SimulatePipelineRequestPayload create(String pipelineId, Map config, PipelineExecutionService executionService) throws IOException { + Pipeline pipeline; + // if pipeline `id` passed to request, fetch pipeline from store. + if (pipelineId != null) { + pipeline = executionService.getPipeline(pipelineId); + } else { + Map pipelineConfig = (Map) config.get("pipeline"); + pipeline = (new Pipeline.Factory()).create("_pipeline_id", pipelineConfig, executionService.getProcessorFactoryRegistry()); + } + + // distribute docs by shard key to SimulateShardPipelineResponse + List> docs = (List>) config.get("docs"); + + List dataList = new ArrayList<>(); + + for (int i = 0; i < docs.size(); i++) { + Map dataMap = docs.get(i); + Map document = (Map) dataMap.get("_source"); + Data data = new Data(ConfigurationUtils.readStringProperty(dataMap, "_index", null), + ConfigurationUtils.readStringProperty(dataMap, "_type", null), + ConfigurationUtils.readStringProperty(dataMap, "_id", null), + document); + dataList.add(data); + } + + return new SimulatePipelineRequestPayload(pipeline, dataList); + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java new file mode 100644 index 00000000000..92e936dd245 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.StatusToXContent; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; + +public class SimulatePipelineResponse extends ActionResponse implements StatusToXContent { + + private String pipelineId; + private SimulatedItemResponse[] responses; + + public String pipelineId() { + return pipelineId; + } + + public SimulatePipelineResponse pipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public SimulatePipelineResponse responses(SimulatedItemResponse[] responses) { + this.responses = responses; + return this; + } + + public SimulatedItemResponse[] responses() { + return responses; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(pipelineId); + out.writeVInt(responses.length); + for (SimulatedItemResponse response : responses) { + response.writeTo(out); + } + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + this.pipelineId = in.readString(); + int responsesLength = in.readVInt(); + responses = new SimulatedItemResponse[responsesLength]; + for (int i = 0; i < responsesLength; i++) { + SimulatedItemResponse response = new SimulatedItemResponse(); + response.readFrom(in); + responses[i] = response; + } + + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray("docs"); + for (SimulatedItemResponse response : responses) { + builder.value(response); + } + builder.endArray(); + + return builder; + } + + @Override + public RestStatus status() { + return RestStatus.OK; + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java new file mode 100644 index 00000000000..04c26d3c98f --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java @@ -0,0 +1,101 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.plugin.ingest.PipelineExecutionService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + +public class SimulatePipelineTransportAction extends HandledTransportAction { + + private final PipelineExecutionService executionService; + + @Inject + public SimulatePipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PipelineExecutionService executionService) { + super(settings, SimulatePipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, SimulatePipelineRequest::new); + this.executionService = executionService; + } + + @Override + protected void doExecute(SimulatePipelineRequest request, ActionListener listener) { + Map source = XContentHelper.convertToMap(request.source(), false).v2(); + + SimulatePipelineRequestPayload payload; + SimulatePipelineRequestPayload.Factory factory = new SimulatePipelineRequestPayload.Factory(); + try { + payload = factory.create(request.id(), source, executionService); + } catch (IOException e) { + listener.onFailure(e); + return; + } + + final AtomicArray responses = new AtomicArray<>(payload.size()); + final AtomicInteger counter = new AtomicInteger(payload.size()); + + for (int i = 0; i < payload.size(); i++) { + final int index = i; + + executionService.execute(payload.getDocument(index), payload.pipeline(), new PipelineExecutionService.Listener() { + @Override + public void executed(Data data) { + responses.set(index, new SimulatedItemResponse(data)); + + if (counter.decrementAndGet() == 0) { + finishHim(); + } + } + + @Override + public void failed(Exception e) { + logger.error("failed to execute pipeline [{}]", e, payload.pipelineId()); + responses.set(index, new SimulatedItemResponse(e)); + + if (counter.decrementAndGet() == 0) { + finishHim(); + } + } + + public void finishHim() { + SimulatedItemResponse[] responseArray = new SimulatedItemResponse[responses.length()]; + responses.toArray(responseArray); + + SimulatePipelineResponse response = new SimulatePipelineResponse() + .pipelineId(payload.pipelineId()) + .responses(responseArray); + + listener.onResponse(response); + } + }); + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java new file mode 100644 index 00000000000..a41661c0685 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java @@ -0,0 +1,129 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.StatusToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +public class SimulatedItemResponse implements Streamable, StatusToXContent { + + private Data data; + private Throwable failure; + + public SimulatedItemResponse() { + + } + + public SimulatedItemResponse(Data data) { + this.data = data; + } + + public SimulatedItemResponse(Throwable failure) { + this.failure = failure; + } + + public boolean failed() { + return this.failure != null; + } + + public Data getData() { + return data; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + boolean failed = in.readBoolean(); + + if (failed) { + this.failure = in.readThrowable(); + // TODO(talevy): check out mget for throwable limitations + } else { + String index = in.readString(); + String type = in.readString(); + String id = in.readString(); + Map doc = in.readMap(); + this.data = new Data(index, type, id, doc); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(failed()); + + if (failed()) { + out.writeThrowable(failure); + } else { + out.writeString(data.getIndex()); + out.writeString(data.getType()); + out.writeString(data.getId()); + out.writeMap(data.getDocument()); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Fields.ERROR, failed()); + if (failed()) { + builder.field(Fields.FAILURE, failure.toString()); + } else { + builder.field(Fields.MODIFIED, data.isModified()); + builder.field(Fields.DOCUMENT, data.getDocument()); + } + builder.endObject(); + return builder; + } + + @Override + public RestStatus status() { + return null; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { return true; } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + SimulatedItemResponse other = (SimulatedItemResponse) obj; + return Objects.equals(data, other.data) && Objects.equals(failure, other.failure); + } + + @Override + public int hashCode() { + return Objects.hash(data, failure); + } + + static final class Fields { + static final XContentBuilderString DOCUMENT = new XContentBuilderString("doc"); + static final XContentBuilderString ERROR = new XContentBuilderString("error"); + static final XContentBuilderString FAILURE = new XContentBuilderString("failure"); + static final XContentBuilderString MODIFIED = new XContentBuilderString("modified"); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index fe860b742d6..f055d92f2d2 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -30,10 +30,14 @@ import org.elasticsearch.plugin.ingest.transport.get.GetPipelineRequestBuilder; import org.elasticsearch.plugin.ingest.transport.get.GetPipelineResponse; import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequestBuilder; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineAction; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequestBuilder; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineResponse; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulatedItemResponse; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import java.util.Collection; -import java.util.Map; + +import java.util.*; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -55,6 +59,62 @@ public class IngestClientIT extends ESIntegTestCase { } + public void testSimulate() throws Exception { + new PutPipelineRequestBuilder(client(), PutPipelineAction.INSTANCE) + .setId("_id") + .setSource(jsonBuilder().startObject() + .field("description", "my_pipeline") + .startArray("processors") + .startObject() + .startObject("grok") + .field("field", "field1") + .field("pattern", "%{NUMBER:val:float} %{NUMBER:status:int} <%{WORD:msg}>") + .endObject() + .endObject() + .endArray() + .endObject().bytes()) + .get(); + assertBusy(new Runnable() { + @Override + public void run() { + GetPipelineResponse response = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) + .setIds("_id") + .get(); + assertThat(response.isFound(), is(true)); + assertThat(response.pipelines().get("_id"), notNullValue()); + } + }); + + SimulatePipelineResponse response = new SimulatePipelineRequestBuilder(client(), SimulatePipelineAction.INSTANCE) + .setId("_id") + .setSource(jsonBuilder().startObject() + .startArray("docs") + .startObject() + .field("_index", "index") + .field("_type", "type") + .field("_id", "id") + .startObject("_source") + .field("foo", "bar") + .endObject() + .endObject() + .endArray() + .endObject().bytes()) + .get(); + + Map expectedDoc = new HashMap<>(); + expectedDoc.put("foo", "bar"); + Data expectedData = new Data("index", "type", "id", expectedDoc); + SimulatedItemResponse expectedResponse = new SimulatedItemResponse(expectedData); + SimulatedItemResponse[] expectedResponses = new SimulatedItemResponse[] { expectedResponse }; + + assertThat(response.responses().length, equalTo(1)); + assertThat(response.responses()[0].getData().getIndex(), equalTo(expectedResponse.getData().getIndex())); + assertThat(response.responses()[0].getData(), equalTo(expectedResponse.getData())); + assertThat(response.responses()[0], equalTo(expectedResponse)); + assertThat(response.responses(), equalTo(expectedResponses)); + assertThat(response.pipelineId(), equalTo("_id")); + } + public void test() throws Exception { new PutPipelineRequestBuilder(client(), PutPipelineAction.INSTANCE) .setId("_id") diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json new file mode 100644 index 00000000000..33007e3be87 --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json @@ -0,0 +1,23 @@ +{ + "ingest.simulate": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/ingest.html", + "methods": [ "GET", "POST" ], + "url": { + "path": "/_ingest/pipeline/_simulate", + "paths": [ "/_ingest/pipeline/_simulate", "/_ingest/pipeline/{id}/_simulate" ], + "parts": { + "id": { + "type" : "string", + "description" : "Pipeline ID", + "required" : false + } + }, + "params": { + } + }, + "body": { + "description" : "The simulate definition", + "required" : true + } + } +} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml new file mode 100644 index 00000000000..eb68a439bcb --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml @@ -0,0 +1,51 @@ +--- +"Test simulate with stored ingest pipeline": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "simple" : { + "path" : "field1", + "expected_value" : "_value", + "add_field" : "field2", + "add_field_value" : "_value" + } + } + ] + } + - match: { _id: "my_pipeline" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.simulate: + id: "my_pipeline" + body: > + { + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "foo": "bar" + } + } + ] + } + - length: { docs: 1 } + From 1f29fa4fe94558fc60a36ff7f7e054933ff684ed Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Thu, 5 Nov 2015 16:23:30 -0800 Subject: [PATCH 045/347] update rest status --- .../org/elasticsearch/plugin/ingest/IngestModule.java | 1 - .../transport/simulate/SimulatePipelineResponse.java | 8 +++++--- .../ingest/transport/simulate/SimulatedItemResponse.java | 6 +++++- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index 15c2b7769fb..94dfb4fb690 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -39,7 +39,6 @@ public class IngestModule extends AbstractModule { protected void configure() { binder().bind(IngestRestFilter.class).asEagerSingleton(); binder().bind(PipelineExecutionService.class).asEagerSingleton(); - // TODO(talevy): write it! binder().bind(PipelineSimulateService.class).asEagerSingleton(); binder().bind(PipelineStore.class).asEagerSingleton(); binder().bind(PipelineStoreClient.class).asEagerSingleton(); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java index 92e936dd245..d152424abe5 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java @@ -23,10 +23,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.StatusToXContent; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.ingest.Data; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -91,6 +88,11 @@ public class SimulatePipelineResponse extends ActionResponse implements StatusTo @Override public RestStatus status() { + for (SimulatedItemResponse response : responses) { + if (response.failed()) { + return response.status(); + } + } return RestStatus.OK; } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java index a41661c0685..5377b8e0ea0 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java @@ -102,7 +102,11 @@ public class SimulatedItemResponse implements Streamable, StatusToXContent { @Override public RestStatus status() { - return null; + if (failed()) { + return RestStatus.BAD_REQUEST; + } else { + return RestStatus.OK; + } } @Override From c22c1e0f54d8894934c46ff414635ba80fdb4d8a Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Thu, 5 Nov 2015 21:46:48 -0800 Subject: [PATCH 046/347] remove simulate executor service call and move to simple execution --- .../ingest/PipelineExecutionService.java | 4 -- .../SimulatePipelineRequestPayload.java | 24 +++++--- .../simulate/SimulatePipelineResponse.java | 51 ++++++++++++----- .../SimulatePipelineTransportAction.java | 57 ++++--------------- .../elasticsearch/ingest/IngestClientIT.java | 10 +--- .../rest-api-spec/api/ingest.simulate.json | 2 +- 6 files changed, 70 insertions(+), 78 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index c6314ae6a6c..3b2f71aa142 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -53,10 +53,6 @@ public class PipelineExecutionService { return pipeline; } - public Map getProcessorFactoryRegistry() { - return store.getProcessorFactoryRegistry(); - } - public void execute(Data data, String pipelineId, Listener listener) { try { execute(data, getPipeline(pipelineId), listener); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestPayload.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestPayload.java index b376c67d006..03ab8253583 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestPayload.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestPayload.java @@ -22,6 +22,7 @@ import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.plugin.ingest.PipelineExecutionService; +import org.elasticsearch.plugin.ingest.PipelineStore; import java.io.IOException; import java.util.ArrayList; @@ -48,24 +49,33 @@ public class SimulatePipelineRequestPayload { } - public Data getDocument(int i) { - return documents.get(i); + public List documents() { + return documents; } - public int size() { - return documents.size(); + public SimulatePipelineResponse execute() { + List responses = new ArrayList<>(); + for (Data data : documents) { + try { + pipeline.execute(data); + responses.add(new SimulatedItemResponse(data)); + } catch (Exception e) { + responses.add(new SimulatedItemResponse(e)); + } + } + return new SimulatePipelineResponse(pipeline.getId(), responses); } public static class Factory { - public SimulatePipelineRequestPayload create(String pipelineId, Map config, PipelineExecutionService executionService) throws IOException { + public SimulatePipelineRequestPayload create(String pipelineId, Map config, PipelineStore pipelineStore) throws IOException { Pipeline pipeline; // if pipeline `id` passed to request, fetch pipeline from store. if (pipelineId != null) { - pipeline = executionService.getPipeline(pipelineId); + pipeline = pipelineStore.get(pipelineId); } else { Map pipelineConfig = (Map) config.get("pipeline"); - pipeline = (new Pipeline.Factory()).create("_pipeline_id", pipelineConfig, executionService.getProcessorFactoryRegistry()); + pipeline = (new Pipeline.Factory()).create("_pipeline_id", pipelineConfig, pipelineStore.getProcessorFactoryRegistry()); } // distribute docs by shard key to SimulateShardPipelineResponse diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java index d152424abe5..aeed148d74a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java @@ -27,35 +27,46 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; public class SimulatePipelineResponse extends ActionResponse implements StatusToXContent { private String pipelineId; - private SimulatedItemResponse[] responses; + private List responses; + + public SimulatePipelineResponse() { + + } + + public SimulatePipelineResponse(String pipelineId, List responses) { + this.pipelineId = pipelineId; + this.responses = Collections.unmodifiableList(responses); + } public String pipelineId() { return pipelineId; } - public SimulatePipelineResponse pipelineId(String pipelineId) { + public void pipelineId(String pipelineId) { this.pipelineId = pipelineId; - return this; } - public SimulatePipelineResponse responses(SimulatedItemResponse[] responses) { - this.responses = responses; - return this; - } - - public SimulatedItemResponse[] responses() { + public List responses() { return responses; } + public void responses(List responses) { + this.responses = responses; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(pipelineId); - out.writeVInt(responses.length); + out.writeVInt(responses.size()); for (SimulatedItemResponse response : responses) { response.writeTo(out); } @@ -66,11 +77,11 @@ public class SimulatePipelineResponse extends ActionResponse implements StatusTo super.readFrom(in); this.pipelineId = in.readString(); int responsesLength = in.readVInt(); - responses = new SimulatedItemResponse[responsesLength]; + responses = new ArrayList<>(); for (int i = 0; i < responsesLength; i++) { SimulatedItemResponse response = new SimulatedItemResponse(); response.readFrom(in); - responses[i] = response; + responses.add(response); } } @@ -90,9 +101,23 @@ public class SimulatePipelineResponse extends ActionResponse implements StatusTo public RestStatus status() { for (SimulatedItemResponse response : responses) { if (response.failed()) { - return response.status(); + return RestStatus.BAD_REQUEST; } } return RestStatus.OK; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SimulatePipelineResponse that = (SimulatePipelineResponse) o; + return Objects.equals(pipelineId, that.pipelineId) && + Objects.equals(responses, that.responses); + } + + @Override + public int hashCode() { + return Objects.hash(pipelineId, responses); + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java index 04c26d3c98f..c4f3484ca82 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java @@ -25,25 +25,22 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.ingest.Data; -import org.elasticsearch.plugin.ingest.PipelineExecutionService; +import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Map; -import java.util.concurrent.atomic.AtomicInteger; public class SimulatePipelineTransportAction extends HandledTransportAction { - private final PipelineExecutionService executionService; + private final PipelineStore pipelineStore; @Inject - public SimulatePipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PipelineExecutionService executionService) { + public SimulatePipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PipelineStore pipelineStore) { super(settings, SimulatePipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, SimulatePipelineRequest::new); - this.executionService = executionService; + this.pipelineStore = pipelineStore; } @Override @@ -53,49 +50,17 @@ public class SimulatePipelineTransportAction extends HandledTransportAction responses = new AtomicArray<>(payload.size()); - final AtomicInteger counter = new AtomicInteger(payload.size()); - - for (int i = 0; i < payload.size(); i++) { - final int index = i; - - executionService.execute(payload.getDocument(index), payload.pipeline(), new PipelineExecutionService.Listener() { - @Override - public void executed(Data data) { - responses.set(index, new SimulatedItemResponse(data)); - - if (counter.decrementAndGet() == 0) { - finishHim(); - } - } - - @Override - public void failed(Exception e) { - logger.error("failed to execute pipeline [{}]", e, payload.pipelineId()); - responses.set(index, new SimulatedItemResponse(e)); - - if (counter.decrementAndGet() == 0) { - finishHim(); - } - } - - public void finishHim() { - SimulatedItemResponse[] responseArray = new SimulatedItemResponse[responses.length()]; - responses.toArray(responseArray); - - SimulatePipelineResponse response = new SimulatePipelineResponse() - .pipelineId(payload.pipelineId()) - .responses(responseArray); - - listener.onResponse(response); - } - }); - } + threadPool.executor(ThreadPool.Names.MANAGEMENT).execute(new Runnable() { + @Override + public void run() { + listener.onResponse(payload.execute()); + } + }); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index f055d92f2d2..d0d11ae60a0 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -105,14 +105,10 @@ public class IngestClientIT extends ESIntegTestCase { expectedDoc.put("foo", "bar"); Data expectedData = new Data("index", "type", "id", expectedDoc); SimulatedItemResponse expectedResponse = new SimulatedItemResponse(expectedData); - SimulatedItemResponse[] expectedResponses = new SimulatedItemResponse[] { expectedResponse }; + List expectedResponses = Arrays.asList(expectedResponse); + SimulatePipelineResponse expected = new SimulatePipelineResponse("_id", expectedResponses); - assertThat(response.responses().length, equalTo(1)); - assertThat(response.responses()[0].getData().getIndex(), equalTo(expectedResponse.getData().getIndex())); - assertThat(response.responses()[0].getData(), equalTo(expectedResponse.getData())); - assertThat(response.responses()[0], equalTo(expectedResponse)); - assertThat(response.responses(), equalTo(expectedResponses)); - assertThat(response.pipelineId(), equalTo("_id")); + assertThat(response, equalTo(expected)); } public void test() throws Exception { diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json index 33007e3be87..bf08435eb8e 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json @@ -1,7 +1,7 @@ { "ingest.simulate": { "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/ingest.html", - "methods": [ "GET", "POST" ], + "methods": [ "POST" ], "url": { "path": "/_ingest/pipeline/_simulate", "paths": [ "/_ingest/pipeline/_simulate", "/_ingest/pipeline/{id}/_simulate" ], From b40af1bcfd735df7dd73aaf2d58449b5cabc2a0e Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Wed, 11 Nov 2015 18:20:40 -0800 Subject: [PATCH 047/347] updates, moar verbose --- .../java/org/elasticsearch/ingest/Data.java | 4 + .../org/elasticsearch/ingest/Pipeline.java | 15 ++ .../ingest/processor/ConfigurationUtils.java | 20 +- .../ingest/processor/Processor.java | 6 + .../ingest/processor/date/DateProcessor.java | 7 +- .../processor/geoip/GeoIpProcessor.java | 9 +- .../ingest/processor/grok/GrokProcessor.java | 5 + .../processor/mutate/MutateProcessor.java | 35 +++- .../plugin/ingest/IngestModule.java | 2 + .../ingest/PipelineExecutionService.java | 23 +-- .../rest/RestSimulatePipelineAction.java | 10 +- .../simulate/ParsedSimulateRequest.java | 101 +++++++++++ .../ProcessedData.java} | 44 +++-- .../simulate/SimulateExecutionService.java | 94 ++++++++++ .../simulate/SimulatedItemResponse.java | 171 ++++++++++++++++++ .../simulate/SimulatePipelineRequest.java | 11 ++ .../SimulatePipelineRequestBuilder.java | 5 + .../SimulatePipelineRequestPayload.java | 99 ---------- .../simulate/SimulatePipelineResponse.java | 3 +- .../SimulatePipelineTransportAction.java | 20 +- .../elasticsearch/ingest/IngestClientIT.java | 2 +- .../processor/ConfigurationUtilsTests.java | 2 +- .../ParsedSimulateRequestParserTests.java | 97 ++++++++++ .../SimulateExecutionServiceTests.java | 130 +++++++++++++ .../rest-api-spec/api/ingest.simulate.json | 5 + .../test/ingest/80_simulate.yaml | 149 ++++++++++++++- 26 files changed, 899 insertions(+), 170 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/ParsedSimulateRequest.java rename plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/{transport/simulate/SimulatedItemResponse.java => simulate/ProcessedData.java} (75%) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/SimulateExecutionService.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/SimulatedItemResponse.java delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestPayload.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/simulate/ParsedSimulateRequestParserTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/simulate/SimulateExecutionServiceTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index 407c1b894f1..dd1bfde7923 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -44,6 +44,10 @@ public final class Data { this.document = document; } + public Data(Data other) { + this(other.index, other.type, other.id, new HashMap<>(other.document)); + } + @SuppressWarnings("unchecked") public T getProperty(String path) { // TODO: we should not rely on any core class, so we should have custom map extract value logic: diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index a719c4ec727..844a6889e74 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -70,6 +70,21 @@ public final class Pipeline { return processors; } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Pipeline pipeline = (Pipeline) o; + return Objects.equals(id, pipeline.id) && + Objects.equals(description, pipeline.description) && + Objects.equals(processors, pipeline.processors); + } + + @Override + public int hashCode() { + return Objects.hash(id, description, processors); + } + public final static class Factory { public Pipeline create(String id, Map config, Map processorRegistry) throws IOException { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java index e3f77b4a141..3a8dbbf448d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java @@ -77,39 +77,41 @@ public final class ConfigurationUtils { * Returns and removes the specified property of type list from the specified configuration map. * * If the property value isn't of type list an {@link IllegalArgumentException} is thrown. - * If the property is missing an {@link IllegalArgumentException} is thrown */ - public static List readStringList(Map configuration, String propertyName) { + public static List readOptionalList(Map configuration, String propertyName) { Object value = configuration.remove(propertyName); if (value == null) { - throw new IllegalArgumentException("required property [" + propertyName + "] is missing"); + return null; } - return readStringList(propertyName, value); + return readList(propertyName, value); } /** * Returns and removes the specified property of type list from the specified configuration map. * * If the property value isn't of type list an {@link IllegalArgumentException} is thrown. + * If the property is missing an {@link IllegalArgumentException} is thrown */ - public static List readOptionalStringList(Map configuration, String propertyName) { + public static List readList(Map configuration, String propertyName) { Object value = configuration.remove(propertyName); if (value == null) { - return null; + throw new IllegalArgumentException("required property [" + propertyName + "] is missing"); } - return readStringList(propertyName, value); + + return readList(propertyName, value); } - private static List readStringList(String propertyName, Object value) { + private static List readList(String propertyName, Object value) { if (value instanceof List) { @SuppressWarnings("unchecked") - List stringList = (List) value; + List stringList = (List) value; return stringList; } else { throw new IllegalArgumentException("property [" + propertyName + "] isn't a list, but of type [" + value.getClass().getName() + "]"); } } + /** * Returns and removes the specified property of type map from the specified configuration map. * diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java index a023b78ade4..46a9d43e280 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java @@ -38,6 +38,11 @@ public interface Processor { */ void execute(Data data); + /** + * Gets the type of a processor + */ + String getType(); + /** * A factory that knows how to construct a processor based on a map of maps. */ @@ -54,6 +59,7 @@ public interface Processor { default void setConfigDirectory(Path configDirectory) { } + @Override default void close() throws IOException { } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java index 4c61cc8ee7a..d19433b10ae 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java @@ -78,6 +78,11 @@ public final class DateProcessor implements Processor { data.addField(targetField, ISODateTimeFormat.dateTime().print(dateTime)); } + @Override + public String getType() { + return TYPE; + } + DateTimeZone getTimezone() { return timezone; } @@ -108,7 +113,7 @@ public final class DateProcessor implements Processor { DateTimeZone timezone = timezoneString == null ? DateTimeZone.UTC : DateTimeZone.forID(timezoneString); String localeString = ConfigurationUtils.readOptionalStringProperty(config, "locale"); Locale locale = localeString == null ? Locale.ENGLISH : Locale.forLanguageTag(localeString); - List matchFormats = ConfigurationUtils.readStringList(config, "match_formats"); + List matchFormats = ConfigurationUtils.readList(config, "match_formats"); return new DateProcessor(timezone, locale, matchField, matchFormats, targetField); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index ae63efd05cb..a50bde0e6cb 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -40,7 +40,7 @@ import java.security.AccessController; import java.security.PrivilegedAction; import java.util.*; -import static org.elasticsearch.ingest.processor.ConfigurationUtils.readStringList; +import static org.elasticsearch.ingest.processor.ConfigurationUtils.readList; import static org.elasticsearch.ingest.processor.ConfigurationUtils.readStringProperty; public final class GeoIpProcessor implements Processor { @@ -91,6 +91,11 @@ public final class GeoIpProcessor implements Processor { data.addField(targetField, geoData); } + @Override + public String getType() { + return TYPE; + } + String getSourceField() { return sourceField; } @@ -222,7 +227,7 @@ public final class GeoIpProcessor implements Processor { final Set fields; if (config.containsKey("fields")) { fields = EnumSet.noneOf(Field.class); - List fieldNames = readStringList(config, "fields"); + List fieldNames = readList(config, "fields"); for (String fieldName : fieldNames) { try { fields.add(Field.parse(fieldName)); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index bdba25c7c78..c0688c42220 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -56,6 +56,11 @@ public final class GrokProcessor implements Processor { } } + @Override + public String getType() { + return TYPE; + } + String getMatchField() { return matchField; } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java index d09454b1016..4a950bea083 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java @@ -134,6 +134,11 @@ public final class MutateProcessor implements Processor { } } + @Override + public String getType() { + return TYPE; + } + private void doUpdate(Data data) { for(Map.Entry entry : update.entrySet()) { data.addField(entry.getKey(), entry.getValue()); @@ -272,6 +277,28 @@ public final class MutateProcessor implements Processor { } } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MutateProcessor that = (MutateProcessor) o; + return Objects.equals(update, that.update) && + Objects.equals(rename, that.rename) && + Objects.equals(convert, that.convert) && + Objects.equals(split, that.split) && + Objects.equals(gsub, that.gsub) && + Objects.equals(join, that.join) && + Objects.equals(remove, that.remove) && + Objects.equals(trim, that.trim) && + Objects.equals(uppercase, that.uppercase) && + Objects.equals(lowercase, that.lowercase); + } + + @Override + public int hashCode() { + return Objects.hash(update, rename, convert, split, gsub, join, remove, trim, uppercase, lowercase); + } + public static final class Factory implements Processor.Factory { @Override public MutateProcessor create(Map config) throws IOException { @@ -281,10 +308,10 @@ public final class MutateProcessor implements Processor { Map split = ConfigurationUtils.readOptionalMap(config, "split"); Map> gsubConfig = ConfigurationUtils.readOptionalMap(config, "gsub"); Map join = ConfigurationUtils.readOptionalMap(config, "join"); - List remove = ConfigurationUtils.readOptionalStringList(config, "remove"); - List trim = ConfigurationUtils.readOptionalStringList(config, "trim"); - List uppercase = ConfigurationUtils.readOptionalStringList(config, "uppercase"); - List lowercase = ConfigurationUtils.readOptionalStringList(config, "lowercase"); + List remove = ConfigurationUtils.readOptionalList(config, "remove"); + List trim = ConfigurationUtils.readOptionalList(config, "trim"); + List uppercase = ConfigurationUtils.readOptionalList(config, "uppercase"); + List lowercase = ConfigurationUtils.readOptionalList(config, "lowercase"); // pre-compile regex patterns List gsubExpressions = null; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index 94dfb4fb690..e9f7cdd1f88 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -27,6 +27,7 @@ import org.elasticsearch.ingest.processor.geoip.GeoIpProcessor; import org.elasticsearch.ingest.processor.grok.GrokProcessor; import org.elasticsearch.ingest.processor.mutate.MutateProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; +import org.elasticsearch.plugin.ingest.simulate.SimulateExecutionService; import java.util.HashMap; import java.util.Map; @@ -41,6 +42,7 @@ public class IngestModule extends AbstractModule { binder().bind(PipelineExecutionService.class).asEagerSingleton(); binder().bind(PipelineStore.class).asEagerSingleton(); binder().bind(PipelineStoreClient.class).asEagerSingleton(); + binder().bind(SimulateExecutionService.class).asEagerSingleton(); addProcessor(GeoIpProcessor.TYPE, new GeoIpProcessor.Factory()); addProcessor(GrokProcessor.TYPE, new GrokProcessor.Factory()); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index 3b2f71aa142..18d656813ec 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -25,11 +25,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.threadpool.ThreadPool; -import java.util.Map; - public class PipelineExecutionService { static final String THREAD_POOL_NAME = IngestPlugin.NAME; @@ -43,25 +40,13 @@ public class PipelineExecutionService { this.threadPool = threadPool; } - public Pipeline getPipeline(String pipelineId) { - Pipeline pipeline = store.get(pipelineId); - - if (pipeline == null) { - throw new IllegalArgumentException(LoggerMessageFormat.format("pipeline with id [{}] does not exist", pipelineId)); - } - - return pipeline; - } - public void execute(Data data, String pipelineId, Listener listener) { - try { - execute(data, getPipeline(pipelineId), listener); - } catch (IllegalArgumentException e) { - listener.failed(e); + Pipeline pipeline = store.get(pipelineId); + if (pipeline == null) { + listener.failed(new IllegalArgumentException(LoggerMessageFormat.format("pipeline with id [{}] does not exist", pipelineId))); + return; } - } - public void execute(Data data, Pipeline pipeline, Listener listener) { threadPool.executor(THREAD_POOL_NAME).execute(new Runnable() { @Override public void run() { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java index 983f43d6a1a..c110da28b99 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; public class RestSimulatePipelineAction extends BaseRestHandler { @@ -37,15 +38,20 @@ public class RestSimulatePipelineAction extends BaseRestHandler { super(settings, controller, client); controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/{id}/_simulate", this); controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/_simulate", this); + // controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}/_simulate", this); + // controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/_simulate", this); } @Override protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { SimulatePipelineRequest request = new SimulatePipelineRequest(); request.id(restRequest.param("id")); - if (restRequest.hasContent()) { - request.source(restRequest.content()); + request.verbose(restRequest.paramAsBoolean("verbose", false)); + + if (RestActions.hasBodyContent(restRequest)) { + request.source(RestActions.getRestContent(restRequest)); } + client.execute(SimulatePipelineAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/ParsedSimulateRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/ParsedSimulateRequest.java new file mode 100644 index 00000000000..d9f2dc4dc0c --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/ParsedSimulateRequest.java @@ -0,0 +1,101 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.plugin.ingest.simulate; + +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.processor.ConfigurationUtils; +import org.elasticsearch.plugin.ingest.PipelineStore; + +import java.io.IOException; +import java.util.*; + +public class ParsedSimulateRequest { + private final List documents; + private final Pipeline pipeline; + private final boolean verbose; + + ParsedSimulateRequest(Pipeline pipeline, List documents, boolean verbose) { + this.pipeline = pipeline; + this.documents = Collections.unmodifiableList(documents); + this.verbose = verbose; + } + + public Pipeline getPipeline() { + return pipeline; + } + + public List getDocuments() { + return documents; + } + + public boolean isVerbose() { + return verbose; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ParsedSimulateRequest that = (ParsedSimulateRequest) o; + return Objects.equals(verbose, that.verbose) && + Objects.equals(documents, that.documents) && + Objects.equals(pipeline, that.pipeline); + } + + @Override + public int hashCode() { + return Objects.hash(documents, pipeline, verbose); + } + + public static class Parser { + private static final Pipeline.Factory PIPELINE_FACTORY = new Pipeline.Factory(); + public static final String SIMULATED_PIPELINE_ID = "_simulate_pipeline"; + + public ParsedSimulateRequest parse(String pipelineId, Map config, boolean verbose, PipelineStore pipelineStore) throws IOException { + Pipeline pipeline; + // if pipeline `id` passed to request, fetch pipeline from store. + if (pipelineId != null) { + pipeline = pipelineStore.get(pipelineId); + } else { + Map pipelineConfig = ConfigurationUtils.readOptionalMap(config, "pipeline"); + pipeline = PIPELINE_FACTORY.create(SIMULATED_PIPELINE_ID, pipelineConfig, pipelineStore.getProcessorFactoryRegistry()); + } + + List> docs = ConfigurationUtils.readList(config, "docs"); + + List dataList = new ArrayList<>(); + + for (int i = 0; i < docs.size(); i++) { + Map dataMap = docs.get(i); + Map document = ConfigurationUtils.readOptionalMap(dataMap, "_source"); + if (document == null) { + document = Collections.emptyMap(); + } + Data data = new Data(ConfigurationUtils.readOptionalStringProperty(dataMap, "_index"), + ConfigurationUtils.readOptionalStringProperty(dataMap, "_type"), + ConfigurationUtils.readOptionalStringProperty(dataMap, "_id"), + document); + dataList.add(data); + } + + return new ParsedSimulateRequest(pipeline, dataList, verbose); + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/ProcessedData.java similarity index 75% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/ProcessedData.java index 5377b8e0ea0..238a1e79d40 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/ProcessedData.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.plugin.ingest.simulate; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -31,24 +31,26 @@ import java.io.IOException; import java.util.Map; import java.util.Objects; -public class SimulatedItemResponse implements Streamable, StatusToXContent { +public class ProcessedData implements Streamable, StatusToXContent { + private String processorId; private Data data; private Throwable failure; - public SimulatedItemResponse() { + public ProcessedData() { } - public SimulatedItemResponse(Data data) { + public ProcessedData(String processorId, Data data) { + this.processorId = processorId; this.data = data; } - public SimulatedItemResponse(Throwable failure) { + public ProcessedData(Throwable failure) { this.failure = failure; } - public boolean failed() { + public boolean isFailed() { return this.failure != null; } @@ -56,14 +58,18 @@ public class SimulatedItemResponse implements Streamable, StatusToXContent { return data; } + public String getProcessorId() { + return processorId; + } + @Override public void readFrom(StreamInput in) throws IOException { - boolean failed = in.readBoolean(); - - if (failed) { + boolean isFailure = in.readBoolean(); + if (isFailure) { this.failure = in.readThrowable(); // TODO(talevy): check out mget for throwable limitations } else { + this.processorId = in.readString(); String index = in.readString(); String type = in.readString(); String id = in.readString(); @@ -74,11 +80,11 @@ public class SimulatedItemResponse implements Streamable, StatusToXContent { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeBoolean(failed()); - - if (failed()) { + out.writeBoolean(isFailed()); + if (isFailed()) { out.writeThrowable(failure); } else { + out.writeString(processorId); out.writeString(data.getIndex()); out.writeString(data.getType()); out.writeString(data.getId()); @@ -89,8 +95,9 @@ public class SimulatedItemResponse implements Streamable, StatusToXContent { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Fields.ERROR, failed()); - if (failed()) { + builder.field(Fields.PROCESSOR_ID, processorId); + builder.field(Fields.ERROR, isFailed()); + if (isFailed()) { builder.field(Fields.FAILURE, failure.toString()); } else { builder.field(Fields.MODIFIED, data.isModified()); @@ -102,7 +109,7 @@ public class SimulatedItemResponse implements Streamable, StatusToXContent { @Override public RestStatus status() { - if (failed()) { + if (isFailed()) { return RestStatus.BAD_REQUEST; } else { return RestStatus.OK; @@ -115,17 +122,18 @@ public class SimulatedItemResponse implements Streamable, StatusToXContent { if (obj == null || getClass() != obj.getClass()) { return false; } - SimulatedItemResponse other = (SimulatedItemResponse) obj; - return Objects.equals(data, other.data) && Objects.equals(failure, other.failure); + ProcessedData other = (ProcessedData) obj; + return Objects.equals(processorId, other.processorId) && Objects.equals(data, other.data) && Objects.equals(failure, other.failure); } @Override public int hashCode() { - return Objects.hash(data, failure); + return Objects.hash(processorId, data, failure); } static final class Fields { static final XContentBuilderString DOCUMENT = new XContentBuilderString("doc"); + static final XContentBuilderString PROCESSOR_ID = new XContentBuilderString("processor_id"); static final XContentBuilderString ERROR = new XContentBuilderString("error"); static final XContentBuilderString FAILURE = new XContentBuilderString("failure"); static final XContentBuilderString MODIFIED = new XContentBuilderString("modified"); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/SimulateExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/SimulateExecutionService.java new file mode 100644 index 00000000000..d7949532c0b --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/SimulateExecutionService.java @@ -0,0 +1,94 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.simulate; + +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineResponse; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.List; + +public class SimulateExecutionService { + + static final String THREAD_POOL_NAME = ThreadPool.Names.MANAGEMENT; + + private final ThreadPool threadPool; + + @Inject + public SimulateExecutionService(ThreadPool threadPool) { + this.threadPool = threadPool; + } + + + SimulatedItemResponse executeItem(Pipeline pipeline, Data data, boolean verbose) { + try { + if (verbose) { + return executeVerboseItem(pipeline, data); + } else { + pipeline.execute(data); + return new SimulatedItemResponse(data); + } + } catch (Exception e) { + return new SimulatedItemResponse(e); + } + + } + + SimulatedItemResponse executeVerboseItem(Pipeline pipeline, Data data) { + List processedDataList = new ArrayList<>(); + Data currentData = new Data(data); + for (int i = 0; i < pipeline.getProcessors().size(); i++) { + Processor processor = pipeline.getProcessors().get(i); + String processorId = "processor[" + processor.getType() + "]-" + i; + + processor.execute(currentData); + processedDataList.add(new ProcessedData(processorId, currentData)); + + currentData = new Data(currentData); + } + return new SimulatedItemResponse(processedDataList); + } + + SimulatePipelineResponse execute(ParsedSimulateRequest request) { + List responses = new ArrayList<>(); + for (Data data : request.getDocuments()) { + responses.add(executeItem(request.getPipeline(), data, request.isVerbose())); + } + return new SimulatePipelineResponse(request.getPipeline().getId(), responses); + } + + public void execute(ParsedSimulateRequest request, Listener listener) { + threadPool.executor(THREAD_POOL_NAME).execute(new Runnable() { + @Override + public void run() { + SimulatePipelineResponse response = execute(request); + listener.onResponse(response); + } + }); + } + + public interface Listener { + void onResponse(SimulatePipelineResponse response); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/SimulatedItemResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/SimulatedItemResponse.java new file mode 100644 index 00000000000..0fff29ed0c4 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/SimulatedItemResponse.java @@ -0,0 +1,171 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.plugin.ingest.simulate; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.StatusToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class SimulatedItemResponse implements Streamable, StatusToXContent { + + private Data data; + private List processedDataList; + private Throwable failure; + + public SimulatedItemResponse() { + + } + + public SimulatedItemResponse(Data data) { + this.data = data; + } + + public SimulatedItemResponse(List processedDataList) { + this.processedDataList = processedDataList; + } + + public SimulatedItemResponse(Throwable failure) { + this.failure = failure; + } + + public boolean isFailed() { + return this.failure != null; + } + + public boolean isVerbose() { + return this.processedDataList != null; + } + + public Data getData() { + return data; + } + + public List getProcessedDataList() { + return processedDataList; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + boolean isFailed = in.readBoolean(); + boolean isVerbose = in.readBoolean(); + if (isFailed) { + this.failure = in.readThrowable(); + // TODO(talevy): check out mget for throwable limitations + } else if (isVerbose) { + int size = in.readVInt(); + processedDataList = new ArrayList<>(); + for (int i = 0; i < size; i++) { + ProcessedData processedData = new ProcessedData(); + processedData.readFrom(in); + processedDataList.add(processedData); + } + } else { + String index = in.readString(); + String type = in.readString(); + String id = in.readString(); + Map doc = in.readMap(); + this.data = new Data(index, type, id, doc); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(isFailed()); + out.writeBoolean(isVerbose()); + + if (isFailed()) { + out.writeThrowable(failure); + } else if (isVerbose()) { + out.writeVInt(processedDataList.size()); + for (ProcessedData p : processedDataList) { + p.writeTo(out); + } + } else { + out.writeString(data.getIndex()); + out.writeString(data.getType()); + out.writeString(data.getId()); + out.writeMap(data.getDocument()); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Fields.ERROR, isFailed()); + builder.field(Fields.VERBOSE, isVerbose()); + if (isFailed()) { + builder.field(Fields.FAILURE, failure.toString()); + } else if (isVerbose()) { + builder.startArray(Fields.PROCESSOR_STEPS); + for (ProcessedData processedData : processedDataList) { + builder.value(processedData); + } + builder.endArray(); + } else { + builder.field(Fields.MODIFIED, data.isModified()); + builder.field(Fields.DOCUMENT, data.getDocument()); + } + builder.endObject(); + return builder; + } + + @Override + public RestStatus status() { + if (isFailed()) { + return RestStatus.BAD_REQUEST; + } else { + return RestStatus.OK; + } + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { return true; } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + SimulatedItemResponse other = (SimulatedItemResponse) obj; + return Objects.equals(data, other.data) && Objects.equals(processedDataList, other.processedDataList) && Objects.equals(failure, other.failure); + } + + @Override + public int hashCode() { + return Objects.hash(data, processedDataList, failure); + } + + static final class Fields { + static final XContentBuilderString DOCUMENT = new XContentBuilderString("doc"); + static final XContentBuilderString ERROR = new XContentBuilderString("error"); + static final XContentBuilderString VERBOSE = new XContentBuilderString("verbose"); + static final XContentBuilderString FAILURE = new XContentBuilderString("failure"); + static final XContentBuilderString MODIFIED = new XContentBuilderString("modified"); + static final XContentBuilderString PROCESSOR_STEPS = new XContentBuilderString("processor_steps"); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java index 2bf7c01bf24..8c5a5f1ef71 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java @@ -32,6 +32,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; public class SimulatePipelineRequest extends ActionRequest { private String id; + private boolean verbose; private BytesReference source; @Override @@ -51,6 +52,14 @@ public class SimulatePipelineRequest extends ActionRequest { this.id = id; } + public boolean verbose() { + return verbose; + } + + public void verbose(boolean verbose) { + this.verbose = verbose; + } + public BytesReference source() { return source; } @@ -63,6 +72,7 @@ public class SimulatePipelineRequest extends ActionRequest { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); id = in.readString(); + verbose = in.readBoolean(); source = in.readBytesReference(); } @@ -70,6 +80,7 @@ public class SimulatePipelineRequest extends ActionRequest { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(id); + out.writeBoolean(verbose); out.writeBytesReference(source); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java index 8f446b75238..7b140b345f4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java @@ -34,6 +34,11 @@ public class SimulatePipelineRequestBuilder extends ActionRequestBuilder documents; - private final Pipeline pipeline; - - public SimulatePipelineRequestPayload(Pipeline pipeline, List documents) { - this.pipeline = pipeline; - this.documents = Collections.unmodifiableList(documents); - } - - public String pipelineId() { - return pipeline.getId(); - } - - public Pipeline pipeline() { - return pipeline; - } - - - public List documents() { - return documents; - } - - public SimulatePipelineResponse execute() { - List responses = new ArrayList<>(); - for (Data data : documents) { - try { - pipeline.execute(data); - responses.add(new SimulatedItemResponse(data)); - } catch (Exception e) { - responses.add(new SimulatedItemResponse(e)); - } - } - return new SimulatePipelineResponse(pipeline.getId(), responses); - } - - public static class Factory { - - public SimulatePipelineRequestPayload create(String pipelineId, Map config, PipelineStore pipelineStore) throws IOException { - Pipeline pipeline; - // if pipeline `id` passed to request, fetch pipeline from store. - if (pipelineId != null) { - pipeline = pipelineStore.get(pipelineId); - } else { - Map pipelineConfig = (Map) config.get("pipeline"); - pipeline = (new Pipeline.Factory()).create("_pipeline_id", pipelineConfig, pipelineStore.getProcessorFactoryRegistry()); - } - - // distribute docs by shard key to SimulateShardPipelineResponse - List> docs = (List>) config.get("docs"); - - List dataList = new ArrayList<>(); - - for (int i = 0; i < docs.size(); i++) { - Map dataMap = docs.get(i); - Map document = (Map) dataMap.get("_source"); - Data data = new Data(ConfigurationUtils.readStringProperty(dataMap, "_index", null), - ConfigurationUtils.readStringProperty(dataMap, "_type", null), - ConfigurationUtils.readStringProperty(dataMap, "_id", null), - document); - dataList.add(data); - } - - return new SimulatePipelineRequestPayload(pipeline, dataList); - } - } -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java index aeed148d74a..6659635f7b0 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.StatusToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.plugin.ingest.simulate.SimulatedItemResponse; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -100,7 +101,7 @@ public class SimulatePipelineResponse extends ActionResponse implements StatusTo @Override public RestStatus status() { for (SimulatedItemResponse response : responses) { - if (response.failed()) { + if (response.isFailed()) { return RestStatus.BAD_REQUEST; } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java index c4f3484ca82..f43d0ec03d0 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java @@ -26,7 +26,10 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.plugin.ingest.simulate.ParsedSimulateRequest; import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.plugin.ingest.simulate.SimulateExecutionService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -34,32 +37,33 @@ import java.io.IOException; import java.util.Map; public class SimulatePipelineTransportAction extends HandledTransportAction { - private final PipelineStore pipelineStore; + private final SimulateExecutionService executionService; @Inject - public SimulatePipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PipelineStore pipelineStore) { + public SimulatePipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PipelineStore pipelineStore, SimulateExecutionService executionService) { super(settings, SimulatePipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, SimulatePipelineRequest::new); this.pipelineStore = pipelineStore; + this.executionService = executionService; } @Override protected void doExecute(SimulatePipelineRequest request, ActionListener listener) { Map source = XContentHelper.convertToMap(request.source(), false).v2(); - SimulatePipelineRequestPayload payload; - SimulatePipelineRequestPayload.Factory factory = new SimulatePipelineRequestPayload.Factory(); + ParsedSimulateRequest payload; + ParsedSimulateRequest.Parser parser = new ParsedSimulateRequest.Parser(); try { - payload = factory.create(request.id(), source, pipelineStore); + payload = parser.parse(request.id(), source, request.verbose(), pipelineStore); } catch (IOException e) { listener.onFailure(e); return; } - threadPool.executor(ThreadPool.Names.MANAGEMENT).execute(new Runnable() { + executionService.execute(payload, new SimulateExecutionService.Listener() { @Override - public void run() { - listener.onResponse(payload.execute()); + public void onResponse(SimulatePipelineResponse response) { + listener.onResponse(response); } }); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index d0d11ae60a0..61fcad9a3c3 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -33,7 +33,7 @@ import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequestBuilder; import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineAction; import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequestBuilder; import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineResponse; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulatedItemResponse; +import org.elasticsearch.plugin.ingest.simulate.SimulatedItemResponse; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java index 274a952f935..38c4e2493a2 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java @@ -59,7 +59,7 @@ public class ConfigurationUtilsTests extends ESTestCase { // TODO(talevy): Issue with generics. This test should fail, "int" is of type List public void testOptional_InvalidType() { - List val = ConfigurationUtils.readStringList(config, "int"); + List val = ConfigurationUtils.readList(config, "int"); assertThat(val, equalTo(Arrays.asList(2))); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/simulate/ParsedSimulateRequestParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/simulate/ParsedSimulateRequestParserTests.java new file mode 100644 index 00000000000..01f3beee391 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/simulate/ParsedSimulateRequestParserTests.java @@ -0,0 +1,97 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.simulate; + +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.processor.mutate.MutateProcessor; +import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.*; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ParsedSimulateRequestParserTests extends ESTestCase { + private static final ParsedSimulateRequest.Parser PARSER = new ParsedSimulateRequest.Parser(); + + private Map processorRegistry; + private PipelineStore store; + private Processor processor; + private Pipeline pipeline; + private Data data; + + @Before + public void init() throws IOException { + List uppercase = Collections.unmodifiableList(Collections.singletonList("foo")); + processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null); + pipeline = new Pipeline(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID, null, Collections.unmodifiableList(Arrays.asList(processor))); + data = new Data("_index", "_type", "_id", Collections.emptyMap()); + processorRegistry = new HashMap<>(); + processorRegistry.put("mutate", new MutateProcessor.Factory()); + store = mock(PipelineStore.class); + when(store.get("_id")).thenReturn(pipeline); + when(store.getProcessorFactoryRegistry()).thenReturn(processorRegistry); + } + + public void testParse_UsingPipelineStore() throws Exception { + ParsedSimulateRequest expectedRequest = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), false); + + Map raw = new HashMap<>(); + List> docs = new ArrayList<>(); + Map doc = new HashMap<>(); + doc.put("_index", "_index"); + doc.put("_type", "_type"); + doc.put("_id", "_id"); + docs.add(doc); + raw.put("docs", docs); + + ParsedSimulateRequest actualRequest = PARSER.parse("_id", raw, false, store); + assertThat(actualRequest, equalTo(expectedRequest)); + } + + public void testParse_ProvidedPipeline() throws Exception { + ParsedSimulateRequest expectedRequest = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), false); + + Map raw = new HashMap<>(); + List> docs = new ArrayList<>(); + Map doc = new HashMap<>(); + doc.put("_index", "_index"); + doc.put("_type", "_type"); + doc.put("_id", "_id"); + docs.add(doc); + + Map processorConfig = new HashMap<>(); + processorConfig.put("uppercase", Arrays.asList("foo")); + Map pipelineConfig = new HashMap<>(); + pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("mutate", processorConfig))); + + raw.put("docs", docs); + raw.put("pipeline", pipelineConfig); + + ParsedSimulateRequest actualRequest = PARSER.parse(null, raw, false, store); + assertThat(actualRequest, equalTo(expectedRequest)); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/simulate/SimulateExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/simulate/SimulateExecutionServiceTests.java new file mode 100644 index 00000000000..f622db68d95 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/simulate/SimulateExecutionServiceTests.java @@ -0,0 +1,130 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.simulate; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineResponse; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.util.Arrays; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.*; + +public class SimulateExecutionServiceTests extends ESTestCase { + + private PipelineStore store; + private ThreadPool threadPool; + private SimulateExecutionService executionService; + private Pipeline pipeline; + private Processor processor; + private Data data; + + @Before + public void setup() { + store = mock(PipelineStore.class); + threadPool = new ThreadPool( + Settings.builder() + .put("name", "_name") + .build() + ); + executionService = new SimulateExecutionService(threadPool); + processor = mock(Processor.class); + when(processor.getType()).thenReturn("mock"); + pipeline = new Pipeline("_id", "_description", Arrays.asList(processor, processor)); + data = new Data("_index", "_type", "_id", Collections.emptyMap()); + } + + @After + public void destroy() { + threadPool.shutdown(); + } + + public void testExecuteVerboseItem() throws Exception { + SimulatedItemResponse expectedItemResponse = new SimulatedItemResponse( + Arrays.asList(new ProcessedData("processor[mock]-0", data), new ProcessedData("processor[mock]-1", data))); + SimulatedItemResponse actualItemResponse = executionService.executeVerboseItem(pipeline, data); + verify(processor, times(2)).execute(data); + assertThat(actualItemResponse, equalTo(expectedItemResponse)); + } + + public void testExecuteItem_verboseSuccessful() throws Exception { + SimulatedItemResponse expectedItemResponse = new SimulatedItemResponse( + Arrays.asList(new ProcessedData("processor[mock]-0", data), new ProcessedData("processor[mock]-1", data))); + SimulatedItemResponse actualItemResponse = executionService.executeItem(pipeline, data, true); + verify(processor, times(2)).execute(data); + assertThat(actualItemResponse, equalTo(expectedItemResponse)); + } + + public void testExecuteItem_Simple() throws Exception { + SimulatedItemResponse expectedItemResponse = new SimulatedItemResponse(data); + SimulatedItemResponse actualItemResponse = executionService.executeItem(pipeline, data, false); + verify(processor, times(2)).execute(data); + assertThat(actualItemResponse, equalTo(expectedItemResponse)); + } + + public void testExecuteItem_Failure() throws Exception { + Exception e = new RuntimeException("processor failed"); + SimulatedItemResponse expectedItemResponse = new SimulatedItemResponse(e); + doThrow(e).when(processor).execute(data); + SimulatedItemResponse actualItemResponse = executionService.executeItem(pipeline, data, false); + verify(processor, times(1)).execute(data); + assertThat(actualItemResponse, equalTo(expectedItemResponse)); + } + + public void testExecute() throws Exception { + SimulateExecutionService.Listener listener = mock(SimulateExecutionService.Listener.class); + SimulatedItemResponse itemResponse = new SimulatedItemResponse(data); + ParsedSimulateRequest request = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), false); + executionService.execute(request, listener); + SimulatePipelineResponse response = new SimulatePipelineResponse("_id", Collections.singletonList(itemResponse)); + assertBusy(new Runnable() { + @Override + public void run() { + verify(processor, times(2)).execute(data); + verify(listener).onResponse(response); + } + }); + } + + public void testExecute_Verbose() throws Exception { + SimulateExecutionService.Listener listener = mock(SimulateExecutionService.Listener.class); + ParsedSimulateRequest request = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), true); + SimulatedItemResponse itemResponse = new SimulatedItemResponse( + Arrays.asList(new ProcessedData("processor[mock]-0", data), new ProcessedData("processor[mock]-1", data))); + executionService.execute(request, listener); + SimulatePipelineResponse response = new SimulatePipelineResponse("_id", Collections.singletonList(itemResponse)); + assertBusy(new Runnable() { + @Override + public void run() { + verify(processor, times(2)).execute(data); + verify(listener).onResponse(response); + } + }); + } +} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json index bf08435eb8e..bef89fed54d 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json @@ -13,6 +13,11 @@ } }, "params": { + "verbose": { + "type" : "boolean", + "description" : "Verbose mode. Display data output for each processor in executed pipeline", + "default" : false + } } }, "body": { diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml index eb68a439bcb..7c8764472e0 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml @@ -12,11 +12,10 @@ "description": "_description", "processors": [ { - "simple" : { - "path" : "field1", - "expected_value" : "_value", - "add_field" : "field2", - "add_field_value" : "_value" + "mutate" : { + "update" : { + "field2" : "_value" + } } } ] @@ -48,4 +47,144 @@ ] } - length: { docs: 1 } + - is_false: docs.0.error + - is_true: docs.0.modified + - match: { docs.0.foo: "bar" } + - match: { docs.0.field2: "_value" } + +--- +"Test simulate with provided pipeline definition": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.simulate: + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "mutate" : { + "update" : { + "field2" : "_value" + } + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "foo": "bar" + } + } + ] + } + - length: { docs: 1 } + +--- +"Test simulate with verbose flag": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.simulate: + verbose: true + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "mutate" : { + "update" : { + "field2" : "_value" + } + } + }, + { + "mutate" : { + "update" : { + "field3" : "third_val" + } + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "foo": "bar" + } + } + ] + } + - length: { docs: 1 } + - length: { docs.0.processor_steps: 2 } + - match: { docs.0.processor_steps.0.processor_id: "processor[mutate]-0" } + - is_false: docs.0.processor_steps.0.error + - is_true: docs.0.processor_steps.0.modified + - length: { docs.0.processor_steps.0.doc: 2 } + - match: { docs.0.processor_steps.0.doc.foo: "bar" } + - match: { docs.0.processor_steps.0.doc.field2: "_value" } + - length: { docs.0.processor_steps.1.doc: 3 } + - match: { docs.0.processor_steps.1.doc.foo: "bar" } + - match: { docs.0.processor_steps.1.doc.field2: "_value" } + - match: { docs.0.processor_steps.1.doc.field3: "third_val" } + +--- +"Test simulate with exception thrown": + - do: + cluster.health: + wait_for_status: green + + - do: + catch: request + ingest.simulate: + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "mutate" : { + "uppercase" : ["foo"] + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "not_foo": "bar" + } + }, + { + "_index": "index", + "_type": "type", + "_id": "id2", + "_source": { + "foo": "bar" + } + } + ] + } + - length: { docs: 2 } + - is_true: docs.0.error + - match: { docs.0.failure: "java.lang.NullPointerException" } + - is_false: docs.1.error + - is_true: docs.1.modified + - match: { docs.1.doc.foo: "BAR" } From 674084973dbdb5147c2d3e448dc1faf2ef861d7e Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Wed, 11 Nov 2015 21:51:45 -0800 Subject: [PATCH 048/347] moar updates --- .../java/org/elasticsearch/ingest/Data.java | 11 ++- .../plugin/ingest/IngestModule.java | 2 +- .../rest/RestSimulatePipelineAction.java | 6 +- .../simulate/ParsedSimulateRequest.java | 2 +- .../simulate/ProcessorResult.java} | 33 +++---- .../simulate/SimulateExecutionService.java | 31 +++--- .../simulate/SimulatePipelineResponse.java | 1 - .../SimulatePipelineTransportAction.java | 9 +- .../simulate/SimulatedItemResponse.java | 78 ++++++++------- .../elasticsearch/ingest/IngestClientIT.java | 2 +- .../ParsedSimulateRequestParserTests.java | 2 +- .../SimulateExecutionServiceTests.java | 26 ++--- .../rest-api-spec/api/ingest.simulate.json | 4 +- .../test/ingest/80_simulate.yaml | 94 ++++++++++++++++--- 14 files changed, 182 insertions(+), 119 deletions(-) rename plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/{ => transport}/simulate/ParsedSimulateRequest.java (98%) rename plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/{simulate/ProcessedData.java => transport/simulate/ProcessorResult.java} (81%) rename plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/{ => transport}/simulate/SimulateExecutionService.java (78%) rename plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/{ => transport}/simulate/SimulatedItemResponse.java (66%) rename plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/{ => transport}/simulate/ParsedSimulateRequestParserTests.java (98%) rename plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/{ => transport}/simulate/SimulateExecutionServiceTests.java (86%) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index dd1bfde7923..543fd9c1c4b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -22,7 +22,6 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import java.lang.reflect.Array; import java.util.*; /** @@ -131,6 +130,16 @@ public final class Data { return modified; } + public Map asMap() { + Map dataMap = new HashMap<>(); + dataMap.put("_index", index); + dataMap.put("_type", type); + dataMap.put("_id", id); + dataMap.put("_source", document); + + return dataMap; + } + @Override public boolean equals(Object obj) { if (obj == this) { return true; } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index e9f7cdd1f88..ae685c75d5a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -27,7 +27,7 @@ import org.elasticsearch.ingest.processor.geoip.GeoIpProcessor; import org.elasticsearch.ingest.processor.grok.GrokProcessor; import org.elasticsearch.ingest.processor.mutate.MutateProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; -import org.elasticsearch.plugin.ingest.simulate.SimulateExecutionService; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulateExecutionService; import java.util.HashMap; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java index c110da28b99..ac18c2dfeef 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java @@ -36,10 +36,10 @@ public class RestSimulatePipelineAction extends BaseRestHandler { @Inject public RestSimulatePipelineAction(Settings settings, RestController controller, Client client) { super(settings, controller, client); - controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/{id}/_simulate", this); + controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/_simulate/{id}", this); + controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/_simulate/{id}", this); controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/_simulate", this); - // controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}/_simulate", this); - // controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/_simulate", this); + controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/_simulate", this); } @Override diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/ParsedSimulateRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java similarity index 98% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/ParsedSimulateRequest.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java index d9f2dc4dc0c..e39596c777e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/ParsedSimulateRequest.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugin.ingest.simulate; +package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.Pipeline; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/ProcessedData.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ProcessorResult.java similarity index 81% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/ProcessedData.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ProcessorResult.java index 238a1e79d40..3f2073e480b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/ProcessedData.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ProcessorResult.java @@ -16,37 +16,38 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugin.ingest.simulate; +package org.elasticsearch.plugin.ingest.transport.simulate; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.xcontent.StatusToXContent; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.ingest.Data; -import org.elasticsearch.rest.RestStatus; import java.io.IOException; import java.util.Map; import java.util.Objects; -public class ProcessedData implements Streamable, StatusToXContent { +public class ProcessorResult implements Streamable, ToXContent { private String processorId; private Data data; private Throwable failure; - public ProcessedData() { + public ProcessorResult() { } - public ProcessedData(String processorId, Data data) { + public ProcessorResult(String processorId, Data data) { this.processorId = processorId; this.data = data; } - public ProcessedData(Throwable failure) { + public ProcessorResult(String processorId, Throwable failure) { + this.processorId = processorId; this.failure = failure; } @@ -67,7 +68,6 @@ public class ProcessedData implements Streamable, StatusToXContent { boolean isFailure = in.readBoolean(); if (isFailure) { this.failure = in.readThrowable(); - // TODO(talevy): check out mget for throwable limitations } else { this.processorId = in.readString(); String index = in.readString(); @@ -98,31 +98,22 @@ public class ProcessedData implements Streamable, StatusToXContent { builder.field(Fields.PROCESSOR_ID, processorId); builder.field(Fields.ERROR, isFailed()); if (isFailed()) { - builder.field(Fields.FAILURE, failure.toString()); + builder.field(Fields.ERROR_MESSAGE, ExceptionsHelper.detailedMessage(failure)); } else { builder.field(Fields.MODIFIED, data.isModified()); - builder.field(Fields.DOCUMENT, data.getDocument()); + builder.field(Fields.DOCUMENT, data.asMap()); } builder.endObject(); return builder; } - @Override - public RestStatus status() { - if (isFailed()) { - return RestStatus.BAD_REQUEST; - } else { - return RestStatus.OK; - } - } - @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } - ProcessedData other = (ProcessedData) obj; + ProcessorResult other = (ProcessorResult) obj; return Objects.equals(processorId, other.processorId) && Objects.equals(data, other.data) && Objects.equals(failure, other.failure); } @@ -135,7 +126,7 @@ public class ProcessedData implements Streamable, StatusToXContent { static final XContentBuilderString DOCUMENT = new XContentBuilderString("doc"); static final XContentBuilderString PROCESSOR_ID = new XContentBuilderString("processor_id"); static final XContentBuilderString ERROR = new XContentBuilderString("error"); - static final XContentBuilderString FAILURE = new XContentBuilderString("failure"); + static final XContentBuilderString ERROR_MESSAGE = new XContentBuilderString("error_message"); static final XContentBuilderString MODIFIED = new XContentBuilderString("modified"); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/SimulateExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java similarity index 78% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/SimulateExecutionService.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java index d7949532c0b..119bbc91ea8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/SimulateExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java @@ -17,13 +17,12 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.simulate; +package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineResponse; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; @@ -41,14 +40,10 @@ public class SimulateExecutionService { } - SimulatedItemResponse executeItem(Pipeline pipeline, Data data, boolean verbose) { + SimulatedItemResponse executeItem(Pipeline pipeline, Data data) { try { - if (verbose) { - return executeVerboseItem(pipeline, data); - } else { - pipeline.execute(data); - return new SimulatedItemResponse(data); - } + pipeline.execute(data); + return new SimulatedItemResponse(data); } catch (Exception e) { return new SimulatedItemResponse(e); } @@ -56,24 +51,32 @@ public class SimulateExecutionService { } SimulatedItemResponse executeVerboseItem(Pipeline pipeline, Data data) { - List processedDataList = new ArrayList<>(); + List processorResultList = new ArrayList<>(); Data currentData = new Data(data); for (int i = 0; i < pipeline.getProcessors().size(); i++) { Processor processor = pipeline.getProcessors().get(i); String processorId = "processor[" + processor.getType() + "]-" + i; - processor.execute(currentData); - processedDataList.add(new ProcessedData(processorId, currentData)); + try { + processor.execute(currentData); + processorResultList.add(new ProcessorResult(processorId, currentData)); + } catch (Exception e) { + processorResultList.add(new ProcessorResult(processorId, e)); + } currentData = new Data(currentData); } - return new SimulatedItemResponse(processedDataList); + return new SimulatedItemResponse(processorResultList); } SimulatePipelineResponse execute(ParsedSimulateRequest request) { List responses = new ArrayList<>(); for (Data data : request.getDocuments()) { - responses.add(executeItem(request.getPipeline(), data, request.isVerbose())); + if (request.isVerbose()) { + responses.add(executeVerboseItem(request.getPipeline(), data)); + } else { + responses.add(executeItem(request.getPipeline(), data)); + } } return new SimulatePipelineResponse(request.getPipeline().getId(), responses); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java index 6659635f7b0..04b988a3ce3 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.StatusToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.plugin.ingest.simulate.SimulatedItemResponse; import org.elasticsearch.rest.RestStatus; import java.io.IOException; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java index f43d0ec03d0..d52a92715a3 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java @@ -26,10 +26,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.plugin.ingest.simulate.ParsedSimulateRequest; import org.elasticsearch.plugin.ingest.PipelineStore; -import org.elasticsearch.plugin.ingest.simulate.SimulateExecutionService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -51,16 +48,16 @@ public class SimulatePipelineTransportAction extends HandledTransportAction listener) { Map source = XContentHelper.convertToMap(request.source(), false).v2(); - ParsedSimulateRequest payload; + ParsedSimulateRequest simulateRequest; ParsedSimulateRequest.Parser parser = new ParsedSimulateRequest.Parser(); try { - payload = parser.parse(request.id(), source, request.verbose(), pipelineStore); + simulateRequest = parser.parse(request.id(), source, request.verbose(), pipelineStore); } catch (IOException e) { listener.onFailure(e); return; } - executionService.execute(payload, new SimulateExecutionService.Listener() { + executionService.execute(simulateRequest, new SimulateExecutionService.Listener() { @Override public void onResponse(SimulatePipelineResponse response) { listener.onResponse(response); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/SimulatedItemResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java similarity index 66% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/SimulatedItemResponse.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java index 0fff29ed0c4..d18e16bd56b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/simulate/SimulatedItemResponse.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java @@ -16,16 +16,16 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugin.ingest.simulate; +package org.elasticsearch.plugin.ingest.transport.simulate; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.xcontent.StatusToXContent; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.ingest.Data; -import org.elasticsearch.rest.RestStatus; import java.io.IOException; import java.util.ArrayList; @@ -33,10 +33,10 @@ import java.util.List; import java.util.Map; import java.util.Objects; -public class SimulatedItemResponse implements Streamable, StatusToXContent { +public class SimulatedItemResponse implements Streamable, ToXContent { private Data data; - private List processedDataList; + private List processorResultList; private Throwable failure; public SimulatedItemResponse() { @@ -47,8 +47,8 @@ public class SimulatedItemResponse implements Streamable, StatusToXContent { this.data = data; } - public SimulatedItemResponse(List processedDataList) { - this.processedDataList = processedDataList; + public SimulatedItemResponse(List processorResultList) { + this.processorResultList = processorResultList; } public SimulatedItemResponse(Throwable failure) { @@ -56,19 +56,29 @@ public class SimulatedItemResponse implements Streamable, StatusToXContent { } public boolean isFailed() { - return this.failure != null; + if (failure != null) { + return true; + } else if (processorResultList != null) { + for (ProcessorResult result : processorResultList) { + if (result.isFailed()) { + return true; + } + } + } + + return false; } public boolean isVerbose() { - return this.processedDataList != null; + return this.processorResultList != null; } public Data getData() { return data; } - public List getProcessedDataList() { - return processedDataList; + public List getProcessorResultList() { + return processorResultList; } @Override @@ -77,14 +87,13 @@ public class SimulatedItemResponse implements Streamable, StatusToXContent { boolean isVerbose = in.readBoolean(); if (isFailed) { this.failure = in.readThrowable(); - // TODO(talevy): check out mget for throwable limitations } else if (isVerbose) { int size = in.readVInt(); - processedDataList = new ArrayList<>(); + processorResultList = new ArrayList<>(); for (int i = 0; i < size; i++) { - ProcessedData processedData = new ProcessedData(); - processedData.readFrom(in); - processedDataList.add(processedData); + ProcessorResult processorResult = new ProcessorResult(); + processorResult.readFrom(in); + processorResultList.add(processorResult); } } else { String index = in.readString(); @@ -100,11 +109,11 @@ public class SimulatedItemResponse implements Streamable, StatusToXContent { out.writeBoolean(isFailed()); out.writeBoolean(isVerbose()); - if (isFailed()) { + if (failure != null) { out.writeThrowable(failure); } else if (isVerbose()) { - out.writeVInt(processedDataList.size()); - for (ProcessedData p : processedDataList) { + out.writeVInt(processorResultList.size()); + for (ProcessorResult p : processorResultList) { p.writeTo(out); } } else { @@ -119,32 +128,22 @@ public class SimulatedItemResponse implements Streamable, StatusToXContent { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(Fields.ERROR, isFailed()); - builder.field(Fields.VERBOSE, isVerbose()); - if (isFailed()) { - builder.field(Fields.FAILURE, failure.toString()); + if (failure != null) { + builder.field(Fields.ERROR_MESSAGE, ExceptionsHelper.detailedMessage(failure)); } else if (isVerbose()) { - builder.startArray(Fields.PROCESSOR_STEPS); - for (ProcessedData processedData : processedDataList) { - builder.value(processedData); + builder.startArray(Fields.PROCESSOR_RESULTS); + for (ProcessorResult processorResult : processorResultList) { + builder.value(processorResult); } builder.endArray(); } else { builder.field(Fields.MODIFIED, data.isModified()); - builder.field(Fields.DOCUMENT, data.getDocument()); + builder.field(Fields.DOCUMENT, data.asMap()); } builder.endObject(); return builder; } - @Override - public RestStatus status() { - if (isFailed()) { - return RestStatus.BAD_REQUEST; - } else { - return RestStatus.OK; - } - } - @Override public boolean equals(Object obj) { if (obj == this) { return true; } @@ -152,20 +151,19 @@ public class SimulatedItemResponse implements Streamable, StatusToXContent { return false; } SimulatedItemResponse other = (SimulatedItemResponse) obj; - return Objects.equals(data, other.data) && Objects.equals(processedDataList, other.processedDataList) && Objects.equals(failure, other.failure); + return Objects.equals(data, other.data) && Objects.equals(processorResultList, other.processorResultList) && Objects.equals(failure, other.failure); } @Override public int hashCode() { - return Objects.hash(data, processedDataList, failure); + return Objects.hash(data, processorResultList, failure); } static final class Fields { static final XContentBuilderString DOCUMENT = new XContentBuilderString("doc"); static final XContentBuilderString ERROR = new XContentBuilderString("error"); - static final XContentBuilderString VERBOSE = new XContentBuilderString("verbose"); - static final XContentBuilderString FAILURE = new XContentBuilderString("failure"); + static final XContentBuilderString ERROR_MESSAGE = new XContentBuilderString("error_message"); static final XContentBuilderString MODIFIED = new XContentBuilderString("modified"); - static final XContentBuilderString PROCESSOR_STEPS = new XContentBuilderString("processor_steps"); + static final XContentBuilderString PROCESSOR_RESULTS = new XContentBuilderString("processor_results"); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 61fcad9a3c3..d0d11ae60a0 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -33,7 +33,7 @@ import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequestBuilder; import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineAction; import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequestBuilder; import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineResponse; -import org.elasticsearch.plugin.ingest.simulate.SimulatedItemResponse; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulatedItemResponse; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/simulate/ParsedSimulateRequestParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java similarity index 98% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/simulate/ParsedSimulateRequestParserTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java index 01f3beee391..d88bf7a5467 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/simulate/ParsedSimulateRequestParserTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.simulate; +package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.Pipeline; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/simulate/SimulateExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java similarity index 86% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/simulate/SimulateExecutionServiceTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java index f622db68d95..8ed2cc6abec 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/simulate/SimulateExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java @@ -17,14 +17,13 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.simulate; +package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.plugin.ingest.PipelineStore; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; @@ -67,23 +66,26 @@ public class SimulateExecutionServiceTests extends ESTestCase { public void testExecuteVerboseItem() throws Exception { SimulatedItemResponse expectedItemResponse = new SimulatedItemResponse( - Arrays.asList(new ProcessedData("processor[mock]-0", data), new ProcessedData("processor[mock]-1", data))); + Arrays.asList(new ProcessorResult("processor[mock]-0", data), new ProcessorResult("processor[mock]-1", data))); SimulatedItemResponse actualItemResponse = executionService.executeVerboseItem(pipeline, data); verify(processor, times(2)).execute(data); assertThat(actualItemResponse, equalTo(expectedItemResponse)); } - public void testExecuteItem_verboseSuccessful() throws Exception { - SimulatedItemResponse expectedItemResponse = new SimulatedItemResponse( - Arrays.asList(new ProcessedData("processor[mock]-0", data), new ProcessedData("processor[mock]-1", data))); - SimulatedItemResponse actualItemResponse = executionService.executeItem(pipeline, data, true); + public void testExecuteItem() throws Exception { + SimulatedItemResponse expectedItemResponse = new SimulatedItemResponse(data); + SimulatedItemResponse actualItemResponse = executionService.executeItem(pipeline, data); verify(processor, times(2)).execute(data); assertThat(actualItemResponse, equalTo(expectedItemResponse)); } - public void testExecuteItem_Simple() throws Exception { - SimulatedItemResponse expectedItemResponse = new SimulatedItemResponse(data); - SimulatedItemResponse actualItemResponse = executionService.executeItem(pipeline, data, false); + public void testExecuteVerboseItem_Failure() throws Exception { + Exception e = new RuntimeException("processor failed"); + SimulatedItemResponse expectedItemResponse = new SimulatedItemResponse( + Arrays.asList(new ProcessorResult("processor[mock]-0", e), new ProcessorResult("processor[mock]-1", data)) + ); + doThrow(e).doNothing().when(processor).execute(data); + SimulatedItemResponse actualItemResponse = executionService.executeVerboseItem(pipeline, data); verify(processor, times(2)).execute(data); assertThat(actualItemResponse, equalTo(expectedItemResponse)); } @@ -92,7 +94,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { Exception e = new RuntimeException("processor failed"); SimulatedItemResponse expectedItemResponse = new SimulatedItemResponse(e); doThrow(e).when(processor).execute(data); - SimulatedItemResponse actualItemResponse = executionService.executeItem(pipeline, data, false); + SimulatedItemResponse actualItemResponse = executionService.executeItem(pipeline, data); verify(processor, times(1)).execute(data); assertThat(actualItemResponse, equalTo(expectedItemResponse)); } @@ -116,7 +118,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { SimulateExecutionService.Listener listener = mock(SimulateExecutionService.Listener.class); ParsedSimulateRequest request = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), true); SimulatedItemResponse itemResponse = new SimulatedItemResponse( - Arrays.asList(new ProcessedData("processor[mock]-0", data), new ProcessedData("processor[mock]-1", data))); + Arrays.asList(new ProcessorResult("processor[mock]-0", data), new ProcessorResult("processor[mock]-1", data))); executionService.execute(request, listener); SimulatePipelineResponse response = new SimulatePipelineResponse("_id", Collections.singletonList(itemResponse)); assertBusy(new Runnable() { diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json index bef89fed54d..8431b245ef8 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json @@ -1,10 +1,10 @@ { "ingest.simulate": { "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/ingest.html", - "methods": [ "POST" ], + "methods": [ "GET", "POST" ], "url": { "path": "/_ingest/pipeline/_simulate", - "paths": [ "/_ingest/pipeline/_simulate", "/_ingest/pipeline/{id}/_simulate" ], + "paths": [ "/_ingest/pipeline/_simulate", "/_ingest/pipeline/_simulate/{id}" ], "parts": { "id": { "type" : "string", diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml index 7c8764472e0..7596469b7f2 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml @@ -49,8 +49,8 @@ - length: { docs: 1 } - is_false: docs.0.error - is_true: docs.0.modified - - match: { docs.0.foo: "bar" } - - match: { docs.0.field2: "_value" } + - match: { docs.0.doc._source.foo: "bar" } + - match: { docs.0.doc._source.field2: "_value" } --- @@ -130,17 +130,17 @@ ] } - length: { docs: 1 } - - length: { docs.0.processor_steps: 2 } - - match: { docs.0.processor_steps.0.processor_id: "processor[mutate]-0" } - - is_false: docs.0.processor_steps.0.error - - is_true: docs.0.processor_steps.0.modified - - length: { docs.0.processor_steps.0.doc: 2 } - - match: { docs.0.processor_steps.0.doc.foo: "bar" } - - match: { docs.0.processor_steps.0.doc.field2: "_value" } - - length: { docs.0.processor_steps.1.doc: 3 } - - match: { docs.0.processor_steps.1.doc.foo: "bar" } - - match: { docs.0.processor_steps.1.doc.field2: "_value" } - - match: { docs.0.processor_steps.1.doc.field3: "third_val" } + - length: { docs.0.processor_results: 2 } + - match: { docs.0.processor_results.0.processor_id: "processor[mutate]-0" } + - is_false: docs.0.processor_results.0.error + - is_true: docs.0.processor_results.0.modified + - length: { docs.0.processor_results.0.doc._source: 2 } + - match: { docs.0.processor_results.0.doc._source.foo: "bar" } + - match: { docs.0.processor_results.0.doc._source.field2: "_value" } + - length: { docs.0.processor_results.1.doc._source: 3 } + - match: { docs.0.processor_results.1.doc._source.foo: "bar" } + - match: { docs.0.processor_results.1.doc._source.field2: "_value" } + - match: { docs.0.processor_results.1.doc._source..field3: "third_val" } --- "Test simulate with exception thrown": @@ -184,7 +184,71 @@ } - length: { docs: 2 } - is_true: docs.0.error - - match: { docs.0.failure: "java.lang.NullPointerException" } + - match: { docs.0.error_message: "NullPointerException[null]" } - is_false: docs.1.error - is_true: docs.1.modified - - match: { docs.1.doc.foo: "BAR" } + - match: { docs.1.doc._source.foo: "BAR" } + +--- +"Test verbose simulate with exception thrown": + - do: + cluster.health: + wait_for_status: green + + - do: + catch: request + ingest.simulate: + verbose: true + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "mutate" : { + "convert" : { + "foo": "integer" + } + } + }, + { + "mutate" : { + "uppercase" : ["bar"] + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "foo": "bar", + "bar": "hello" + } + }, + { + "_index": "index", + "_type": "type", + "_id": "id2", + "_source": { + "foo": "5", + "bar": "hello" + } + } + ] + } + - length: { docs: 2 } + - is_true: docs.0.error + - is_false: docs.1.error + - length: { docs.0.processor_results: 2 } + - is_false: docs.1.processor_results.0.error + - match: { docs.0.processor_results.0.error_message: "NumberFormatException[For input string: \"bar\"]" } + - is_false: docs.1.processor_results.1.error + - match: { docs.0.processor_results.1.doc._source.foo: "bar" } + - match: { docs.1.processor_results.1.doc._source.bar: "HELLO" } + - match: { docs.1.processor_results.0.doc._source.foo: 5 } + - match: { docs.1.processor_results.0.doc._source.bar: "hello" } + - match: { docs.1.processor_results.1.doc._source.foo: 5 } + - match: { docs.1.processor_results.1.doc._source.bar: "HELLO" } From 5bd4493ea21eead46d34ac64bbeb5860602bbe94 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 12 Nov 2015 15:38:36 +0100 Subject: [PATCH 049/347] use ConfigurationUtils to read string value from config --- .../src/main/java/org/elasticsearch/ingest/Pipeline.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index a719c4ec727..2101158c215 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest; +import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; import java.io.IOException; @@ -73,7 +74,7 @@ public final class Pipeline { public final static class Factory { public Pipeline create(String id, Map config, Map processorRegistry) throws IOException { - String description = (String) config.get("description"); + String description = ConfigurationUtils.readStringProperty(config, "description"); List processors = new ArrayList<>(); @SuppressWarnings("unchecked") List>> processorConfigs = (List>>) config.get("processors"); From 75371b23813608399fd295357a049f5628a43d41 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 12 Nov 2015 15:45:17 +0100 Subject: [PATCH 050/347] restore initial simulate endpoint url, adapt get pipeline param name --- .../plugin/ingest/rest/RestGetPipelineAction.java | 7 ++----- .../plugin/ingest/rest/RestSimulatePipelineAction.java | 4 ++-- .../test/resources/rest-api-spec/api/ingest.simulate.json | 2 +- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestGetPipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestGetPipelineAction.java index a58366eedb0..6d444739900 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestGetPipelineAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestGetPipelineAction.java @@ -25,27 +25,24 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugin.ingest.transport.get.GetPipelineAction; import org.elasticsearch.plugin.ingest.transport.get.GetPipelineRequest; -import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; -import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; -import org.elasticsearch.rest.action.support.RestToXContentListener; public class RestGetPipelineAction extends BaseRestHandler { @Inject public RestGetPipelineAction(Settings settings, RestController controller, Client client) { super(settings, controller, client); - controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{ids}", this); + controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}", this); } @Override protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { GetPipelineRequest request = new GetPipelineRequest(); - request.ids(Strings.splitStringByCommaToArray(restRequest.param("ids"))); + request.ids(Strings.splitStringByCommaToArray(restRequest.param("id"))); client.execute(GetPipelineAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java index ac18c2dfeef..ed543b7a1eb 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java @@ -36,8 +36,8 @@ public class RestSimulatePipelineAction extends BaseRestHandler { @Inject public RestSimulatePipelineAction(Settings settings, RestController controller, Client client) { super(settings, controller, client); - controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/_simulate/{id}", this); - controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/_simulate/{id}", this); + controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/{id}/_simulate", this); + controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}/_simulate", this); controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/_simulate", this); controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/_simulate", this); } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json index 8431b245ef8..a4904cef80a 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json @@ -4,7 +4,7 @@ "methods": [ "GET", "POST" ], "url": { "path": "/_ingest/pipeline/_simulate", - "paths": [ "/_ingest/pipeline/_simulate", "/_ingest/pipeline/_simulate/{id}" ], + "paths": [ "/_ingest/pipeline/_simulate", "/_ingest/pipeline/{id}/_simulate/" ], "parts": { "id": { "type" : "string", From 979fa816180a91291bc447c61d57a7d55dc7ae32 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 12 Nov 2015 15:45:35 +0100 Subject: [PATCH 051/347] make description optional as part of a Pipeline --- .../ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index 7d7959c2921..b98a469d3c3 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -89,7 +89,7 @@ public final class Pipeline { public final static class Factory { public Pipeline create(String id, Map config, Map processorRegistry) throws IOException { - String description = ConfigurationUtils.readStringProperty(config, "description"); + String description = ConfigurationUtils.readOptionalStringProperty(config, "description"); List processors = new ArrayList<>(); @SuppressWarnings("unchecked") List>> processorConfigs = (List>>) config.get("processors"); From c4951ef74fa29c8bd47ddb6c076afeac9d974d85 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 12 Nov 2015 17:40:14 +0100 Subject: [PATCH 052/347] update get pipeline param names to id for consistency --- .../resources/rest-api-spec/api/ingest.get_pipeline.json | 6 +++--- .../test/resources/rest-api-spec/test/ingest/20_crud.yaml | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.get_pipeline.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.get_pipeline.json index 246c6535e92..71772a28a76 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.get_pipeline.json +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.get_pipeline.json @@ -3,10 +3,10 @@ "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/ingest.html", "methods": [ "GET" ], "url": { - "path": "/_ingest/pipeline/{ids}", - "paths": [ "/_ingest/pipeline/{ids}" ], + "path": "/_ingest/pipeline/{id}", + "paths": [ "/_ingest/pipeline/{id}" ], "parts": { - "ids": { + "id": { "type" : "string", "description" : "Comma separated list of pipeline ids. Wildcards supported", "required" : true diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml index 82177c615ab..daf13a34c1b 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml @@ -32,7 +32,7 @@ - do: ingest.get_pipeline: - ids: "my_pipeline" + id: "my_pipeline" - match: { my_pipeline._source.description: "_description" } - match: { my_pipeline._version: 1 } @@ -53,7 +53,7 @@ - do: catch: missing ingest.get_pipeline: - ids: "my_pipeline" + id: "my_pipeline" --- "Test invalid config": From af1de8e1cc9fc439722d76e3bcfece0305164d33 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Thu, 12 Nov 2015 11:37:17 -0800 Subject: [PATCH 053/347] updated with cosmetic changes --- .../ingest/processor/ConfigurationUtils.java | 20 ++++ .../rest/RestSimulatePipelineAction.java | 10 +- .../ingest/transport/TransportData.java | 99 +++++++++++++++++++ .../simulate/ParsedSimulateRequest.java | 54 +++++----- ...ponse.java => SimulateDocumentResult.java} | 65 +++++------- .../simulate/SimulateExecutionService.java | 48 ++++----- .../simulate/SimulatePipelineRequest.java | 12 +-- .../SimulatePipelineRequestBuilder.java | 6 +- .../simulate/SimulatePipelineResponse.java | 42 ++++---- .../SimulatePipelineTransportAction.java | 15 ++- ...sult.java => SimulateProcessorResult.java} | 47 ++++----- .../org/elasticsearch/ingest/DataTests.java | 26 +++++ .../elasticsearch/ingest/IngestClientIT.java | 6 +- .../ingest/transport/TransportDataTests.java | 43 ++++++++ .../ParsedSimulateRequestParserTests.java | 22 ++--- .../SimulateExecutionServiceTests.java | 47 ++++----- .../test/ingest/80_simulate.yaml | 23 ++--- 17 files changed, 366 insertions(+), 219 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java rename plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/{SimulatedItemResponse.java => SimulateDocumentResult.java} (63%) rename plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/{ProcessorResult.java => SimulateProcessorResult.java} (66%) create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java index 3a8dbbf448d..49fd90e7afe 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java @@ -111,6 +111,20 @@ public final class ConfigurationUtils { } } + /** + * Returns and removes the specified property of type map from the specified configuration map. + * + * If the property value isn't of type map an {@link IllegalArgumentException} is thrown. + * If the property is missing an {@link IllegalArgumentException} is thrown + */ + public static Map readMap(Map configuration, String propertyName) { + Object value = configuration.remove(propertyName); + if (value == null) { + throw new IllegalArgumentException("required property [" + propertyName + "] is missing"); + } + + return readMap(propertyName, value); + } /** * Returns and removes the specified property of type map from the specified configuration map. @@ -122,6 +136,11 @@ public final class ConfigurationUtils { if (value == null) { return null; } + + return readMap(propertyName, value); + } + + private static Map readMap(String propertyName, Object value) { if (value instanceof Map) { @SuppressWarnings("unchecked") Map map = (Map) value; @@ -130,4 +149,5 @@ public final class ConfigurationUtils { throw new IllegalArgumentException("property [" + propertyName + "] isn't a map, but of type [" + value.getClass().getName() + "]"); } } + } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java index ed543b7a1eb..0b86e35b522 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java @@ -29,7 +29,7 @@ import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; -import org.elasticsearch.rest.action.support.RestStatusToXContentListener; +import org.elasticsearch.rest.action.support.RestToXContentListener; public class RestSimulatePipelineAction extends BaseRestHandler { @@ -45,13 +45,13 @@ public class RestSimulatePipelineAction extends BaseRestHandler { @Override protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { SimulatePipelineRequest request = new SimulatePipelineRequest(); - request.id(restRequest.param("id")); - request.verbose(restRequest.paramAsBoolean("verbose", false)); + request.setId(restRequest.param("id")); + request.setVerbose(restRequest.paramAsBoolean("verbose", false)); if (RestActions.hasBodyContent(restRequest)) { - request.source(RestActions.getRestContent(restRequest)); + request.setSource(RestActions.getRestContent(restRequest)); } - client.execute(SimulatePipelineAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); + client.execute(SimulatePipelineAction.INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java new file mode 100644 index 00000000000..b9fa46fe939 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.ingest.Data; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +public class TransportData implements Streamable, ToXContent { + private Data data; + + public TransportData() { + + } + + public TransportData(Data data) { + this.data = data; + } + + public Data get() { + return data; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + String index = in.readString(); + String type = in.readString(); + String id = in.readString(); + Map doc = in.readMap(); + this.data = new Data(index, type, id, doc); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(data.getIndex()); + out.writeString(data.getType()); + out.writeString(data.getId()); + out.writeMap(data.getDocument()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(Fields.DOCUMENT); + builder.field(Fields.MODIFIED, data.isModified()); + builder.field(Fields.INDEX, data.getIndex()); + builder.field(Fields.TYPE, data.getType()); + builder.field(Fields.ID, data.getId()); + builder.field(Fields.SOURCE, data.getDocument()); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TransportData that = (TransportData) o; + return Objects.equals(data, that.data); + } + + @Override + public int hashCode() { + return Objects.hash(data); + } + + static final class Fields { + static final XContentBuilderString DOCUMENT = new XContentBuilderString("doc"); + static final XContentBuilderString MODIFIED = new XContentBuilderString("modified"); + static final XContentBuilderString INDEX = new XContentBuilderString("_index"); + static final XContentBuilderString TYPE = new XContentBuilderString("_type"); + static final XContentBuilderString ID = new XContentBuilderString("_id"); + static final XContentBuilderString SOURCE = new XContentBuilderString("_source"); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java index e39596c777e..7d02f686469 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java @@ -68,34 +68,44 @@ public class ParsedSimulateRequest { private static final Pipeline.Factory PIPELINE_FACTORY = new Pipeline.Factory(); public static final String SIMULATED_PIPELINE_ID = "_simulate_pipeline"; - public ParsedSimulateRequest parse(String pipelineId, Map config, boolean verbose, PipelineStore pipelineStore) throws IOException { - Pipeline pipeline; - // if pipeline `id` passed to request, fetch pipeline from store. - if (pipelineId != null) { - pipeline = pipelineStore.get(pipelineId); - } else { - Map pipelineConfig = ConfigurationUtils.readOptionalMap(config, "pipeline"); - pipeline = PIPELINE_FACTORY.create(SIMULATED_PIPELINE_ID, pipelineConfig, pipelineStore.getProcessorFactoryRegistry()); - } - - List> docs = ConfigurationUtils.readList(config, "docs"); - + private List parseDocs(Map config) { + List> docs = ConfigurationUtils.readList(config, Fields.DOCS); List dataList = new ArrayList<>(); - - for (int i = 0; i < docs.size(); i++) { - Map dataMap = docs.get(i); - Map document = ConfigurationUtils.readOptionalMap(dataMap, "_source"); - if (document == null) { - document = Collections.emptyMap(); - } - Data data = new Data(ConfigurationUtils.readOptionalStringProperty(dataMap, "_index"), - ConfigurationUtils.readOptionalStringProperty(dataMap, "_type"), - ConfigurationUtils.readOptionalStringProperty(dataMap, "_id"), + for (Map dataMap : docs) { + Map document = ConfigurationUtils.readMap(dataMap, Fields.SOURCE); + Data data = new Data(ConfigurationUtils.readStringProperty(dataMap, Fields.INDEX), + ConfigurationUtils.readStringProperty(dataMap, Fields.TYPE), + ConfigurationUtils.readStringProperty(dataMap, Fields.ID), document); dataList.add(data); } + return dataList; + } + public ParsedSimulateRequest parseWithPipelineId(String pipelineId, Map config, boolean verbose, PipelineStore pipelineStore) { + if (pipelineId == null) { + throw new IllegalArgumentException("param [pipeline] is null"); + } + Pipeline pipeline = pipelineStore.get(pipelineId); + List dataList = parseDocs(config); return new ParsedSimulateRequest(pipeline, dataList, verbose); + + } + + public ParsedSimulateRequest parse(Map config, boolean verbose, PipelineStore pipelineStore) throws IOException { + Map pipelineConfig = ConfigurationUtils.readMap(config, Fields.PIPELINE); + Pipeline pipeline = PIPELINE_FACTORY.create(SIMULATED_PIPELINE_ID, pipelineConfig, pipelineStore.getProcessorFactoryRegistry()); + List dataList = parseDocs(config); + return new ParsedSimulateRequest(pipeline, dataList, verbose); + } + + static final class Fields { + static final String PIPELINE = "pipeline"; + static final String DOCS = "docs"; + static final String SOURCE = "_source"; + static final String INDEX = "_index"; + static final String TYPE = "_type"; + static final String ID = "_id"; } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentResult.java similarity index 63% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentResult.java index d18e16bd56b..33ef7745404 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatedItemResponse.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentResult.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.plugin.ingest.transport.simulate; -import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -26,46 +26,39 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.ingest.Data; +import org.elasticsearch.plugin.ingest.transport.TransportData; import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.Map; import java.util.Objects; -public class SimulatedItemResponse implements Streamable, ToXContent { +public class SimulateDocumentResult implements Streamable, ToXContent { - private Data data; - private List processorResultList; + private TransportData data; + private List processorResultList; private Throwable failure; - public SimulatedItemResponse() { + public SimulateDocumentResult() { } - public SimulatedItemResponse(Data data) { - this.data = data; + public SimulateDocumentResult(Data data) { + this.data = new TransportData(data); } - public SimulatedItemResponse(List processorResultList) { + public SimulateDocumentResult(List processorResultList) { this.processorResultList = processorResultList; } - public SimulatedItemResponse(Throwable failure) { + public SimulateDocumentResult(Throwable failure) { this.failure = failure; } public boolean isFailed() { if (failure != null) { return true; - } else if (processorResultList != null) { - for (ProcessorResult result : processorResultList) { - if (result.isFailed()) { - return true; - } - } } - return false; } @@ -74,10 +67,10 @@ public class SimulatedItemResponse implements Streamable, ToXContent { } public Data getData() { - return data; + return data.get(); } - public List getProcessorResultList() { + public List getProcessorResultList() { return processorResultList; } @@ -91,16 +84,13 @@ public class SimulatedItemResponse implements Streamable, ToXContent { int size = in.readVInt(); processorResultList = new ArrayList<>(); for (int i = 0; i < size; i++) { - ProcessorResult processorResult = new ProcessorResult(); + SimulateProcessorResult processorResult = new SimulateProcessorResult(); processorResult.readFrom(in); processorResultList.add(processorResult); } } else { - String index = in.readString(); - String type = in.readString(); - String id = in.readString(); - Map doc = in.readMap(); - this.data = new Data(index, type, id, doc); + this.data = new TransportData(); + this.data.readFrom(in); } } @@ -113,32 +103,27 @@ public class SimulatedItemResponse implements Streamable, ToXContent { out.writeThrowable(failure); } else if (isVerbose()) { out.writeVInt(processorResultList.size()); - for (ProcessorResult p : processorResultList) { + for (SimulateProcessorResult p : processorResultList) { p.writeTo(out); } } else { - out.writeString(data.getIndex()); - out.writeString(data.getType()); - out.writeString(data.getId()); - out.writeMap(data.getDocument()); + data.writeTo(out); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Fields.ERROR, isFailed()); - if (failure != null) { - builder.field(Fields.ERROR_MESSAGE, ExceptionsHelper.detailedMessage(failure)); + if (isFailed()) { + ElasticsearchException.renderThrowable(builder, params, failure); } else if (isVerbose()) { builder.startArray(Fields.PROCESSOR_RESULTS); - for (ProcessorResult processorResult : processorResultList) { - builder.value(processorResult); + for (SimulateProcessorResult processorResult : processorResultList) { + processorResult.toXContent(builder, params); } builder.endArray(); } else { - builder.field(Fields.MODIFIED, data.isModified()); - builder.field(Fields.DOCUMENT, data.asMap()); + data.toXContent(builder, params); } builder.endObject(); return builder; @@ -150,7 +135,7 @@ public class SimulatedItemResponse implements Streamable, ToXContent { if (obj == null || getClass() != obj.getClass()) { return false; } - SimulatedItemResponse other = (SimulatedItemResponse) obj; + SimulateDocumentResult other = (SimulateDocumentResult) obj; return Objects.equals(data, other.data) && Objects.equals(processorResultList, other.processorResultList) && Objects.equals(failure, other.failure); } @@ -160,10 +145,6 @@ public class SimulatedItemResponse implements Streamable, ToXContent { } static final class Fields { - static final XContentBuilderString DOCUMENT = new XContentBuilderString("doc"); - static final XContentBuilderString ERROR = new XContentBuilderString("error"); - static final XContentBuilderString ERROR_MESSAGE = new XContentBuilderString("error_message"); - static final XContentBuilderString MODIFIED = new XContentBuilderString("modified"); static final XContentBuilderString PROCESSOR_RESULTS = new XContentBuilderString("processor_results"); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java index 119bbc91ea8..a4518273467 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java @@ -19,6 +19,7 @@ package org.elasticsearch.plugin.ingest.transport.simulate; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.Pipeline; @@ -30,7 +31,7 @@ import java.util.List; public class SimulateExecutionService { - static final String THREAD_POOL_NAME = ThreadPool.Names.MANAGEMENT; + private static final String THREAD_POOL_NAME = ThreadPool.Names.MANAGEMENT; private final ThreadPool threadPool; @@ -40,18 +41,18 @@ public class SimulateExecutionService { } - SimulatedItemResponse executeItem(Pipeline pipeline, Data data) { + SimulateDocumentResult executeItem(Pipeline pipeline, Data data) { try { pipeline.execute(data); - return new SimulatedItemResponse(data); + return new SimulateDocumentResult(data); } catch (Exception e) { - return new SimulatedItemResponse(e); + return new SimulateDocumentResult(e); } } - SimulatedItemResponse executeVerboseItem(Pipeline pipeline, Data data) { - List processorResultList = new ArrayList<>(); + SimulateDocumentResult executeVerboseItem(Pipeline pipeline, Data data) { + List processorResultList = new ArrayList<>(); Data currentData = new Data(data); for (int i = 0; i < pipeline.getProcessors().size(); i++) { Processor processor = pipeline.getProcessors().get(i); @@ -59,39 +60,30 @@ public class SimulateExecutionService { try { processor.execute(currentData); - processorResultList.add(new ProcessorResult(processorId, currentData)); + processorResultList.add(new SimulateProcessorResult(processorId, currentData)); } catch (Exception e) { - processorResultList.add(new ProcessorResult(processorId, e)); + processorResultList.add(new SimulateProcessorResult(processorId, e)); } currentData = new Data(currentData); } - return new SimulatedItemResponse(processorResultList); + return new SimulateDocumentResult(processorResultList); } - SimulatePipelineResponse execute(ParsedSimulateRequest request) { - List responses = new ArrayList<>(); - for (Data data : request.getDocuments()) { - if (request.isVerbose()) { - responses.add(executeVerboseItem(request.getPipeline(), data)); - } else { - responses.add(executeItem(request.getPipeline(), data)); - } - } - return new SimulatePipelineResponse(request.getPipeline().getId(), responses); - } - - public void execute(ParsedSimulateRequest request, Listener listener) { + public void execute(ParsedSimulateRequest request, ActionListener listener) { threadPool.executor(THREAD_POOL_NAME).execute(new Runnable() { @Override public void run() { - SimulatePipelineResponse response = execute(request); - listener.onResponse(response); + List responses = new ArrayList<>(); + for (Data data : request.getDocuments()) { + if (request.isVerbose()) { + responses.add(executeVerboseItem(request.getPipeline(), data)); + } else { + responses.add(executeItem(request.getPipeline(), data)); + } + } + listener.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(), responses)); } }); } - - public interface Listener { - void onResponse(SimulatePipelineResponse response); - } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java index 8c5a5f1ef71..4aec025b12d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java @@ -44,27 +44,27 @@ public class SimulatePipelineRequest extends ActionRequest { return validationException; } - public String id() { + public String getId() { return id; } - public void id(String id) { + public void setId(String id) { this.id = id; } - public boolean verbose() { + public boolean isVerbose() { return verbose; } - public void verbose(boolean verbose) { + public void setVerbose(boolean verbose) { this.verbose = verbose; } - public BytesReference source() { + public BytesReference getSource() { return source; } - public void source(BytesReference source) { + public void setSource(BytesReference source) { this.source = source; } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java index 7b140b345f4..07998291922 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java @@ -30,17 +30,17 @@ public class SimulatePipelineRequestBuilder extends ActionRequestBuilder responses; + private List responses; public SimulatePipelineResponse() { } - public SimulatePipelineResponse(String pipelineId, List responses) { + public SimulatePipelineResponse(String pipelineId, List responses) { this.pipelineId = pipelineId; this.responses = Collections.unmodifiableList(responses); } - public String pipelineId() { + public String getPipelineId() { return pipelineId; } - public void pipelineId(String pipelineId) { + public void setPipelineId(String pipelineId) { this.pipelineId = pipelineId; } - public List responses() { + public List getResponses() { return responses; } - public void responses(List responses) { + public void setResponses(List responses) { this.responses = responses; } @@ -67,7 +67,7 @@ public class SimulatePipelineResponse extends ActionResponse implements StatusTo super.writeTo(out); out.writeString(pipelineId); out.writeVInt(responses.size()); - for (SimulatedItemResponse response : responses) { + for (SimulateDocumentResult response : responses) { response.writeTo(out); } } @@ -79,7 +79,7 @@ public class SimulatePipelineResponse extends ActionResponse implements StatusTo int responsesLength = in.readVInt(); responses = new ArrayList<>(); for (int i = 0; i < responsesLength; i++) { - SimulatedItemResponse response = new SimulatedItemResponse(); + SimulateDocumentResult response = new SimulateDocumentResult(); response.readFrom(in); responses.add(response); } @@ -88,25 +88,15 @@ public class SimulatePipelineResponse extends ActionResponse implements StatusTo @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startArray("docs"); - for (SimulatedItemResponse response : responses) { - builder.value(response); + builder.startArray(Fields.DOCUMENTS); + for (SimulateDocumentResult response : responses) { + response.toXContent(builder, params); } builder.endArray(); return builder; } - @Override - public RestStatus status() { - for (SimulatedItemResponse response : responses) { - if (response.isFailed()) { - return RestStatus.BAD_REQUEST; - } - } - return RestStatus.OK; - } - @Override public boolean equals(Object o) { if (this == o) return true; @@ -120,4 +110,8 @@ public class SimulatePipelineResponse extends ActionResponse implements StatusTo public int hashCode() { return Objects.hash(pipelineId, responses); } + + static final class Fields { + static final XContentBuilderString DOCUMENTS = new XContentBuilderString("docs"); + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java index d52a92715a3..305c4122203 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java @@ -46,22 +46,21 @@ public class SimulatePipelineTransportAction extends HandledTransportAction listener) { - Map source = XContentHelper.convertToMap(request.source(), false).v2(); + Map source = XContentHelper.convertToMap(request.getSource(), false).v2(); ParsedSimulateRequest simulateRequest; ParsedSimulateRequest.Parser parser = new ParsedSimulateRequest.Parser(); try { - simulateRequest = parser.parse(request.id(), source, request.verbose(), pipelineStore); + if (request.getId() != null) { + simulateRequest = parser.parseWithPipelineId(request.getId(), source, request.isVerbose(), pipelineStore); + } else { + simulateRequest = parser.parse(source, request.isVerbose(), pipelineStore); + } } catch (IOException e) { listener.onFailure(e); return; } - executionService.execute(simulateRequest, new SimulateExecutionService.Listener() { - @Override - public void onResponse(SimulatePipelineResponse response) { - listener.onResponse(response); - } - }); + executionService.execute(simulateRequest, listener); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ProcessorResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java similarity index 66% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ProcessorResult.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java index 3f2073e480b..ca97ec5a604 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ProcessorResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.plugin.ingest.transport.simulate; -import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -26,27 +26,27 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.ingest.Data; +import org.elasticsearch.plugin.ingest.transport.TransportData; import java.io.IOException; -import java.util.Map; import java.util.Objects; -public class ProcessorResult implements Streamable, ToXContent { +public class SimulateProcessorResult implements Streamable, ToXContent { private String processorId; - private Data data; + private TransportData data; private Throwable failure; - public ProcessorResult() { + public SimulateProcessorResult() { } - public ProcessorResult(String processorId, Data data) { + public SimulateProcessorResult(String processorId, Data data) { this.processorId = processorId; - this.data = data; + this.data = new TransportData(data); } - public ProcessorResult(String processorId, Throwable failure) { + public SimulateProcessorResult(String processorId, Throwable failure) { this.processorId = processorId; this.failure = failure; } @@ -56,7 +56,7 @@ public class ProcessorResult implements Streamable, ToXContent { } public Data getData() { - return data; + return data.get(); } public String getProcessorId() { @@ -69,12 +69,8 @@ public class ProcessorResult implements Streamable, ToXContent { if (isFailure) { this.failure = in.readThrowable(); } else { - this.processorId = in.readString(); - String index = in.readString(); - String type = in.readString(); - String id = in.readString(); - Map doc = in.readMap(); - this.data = new Data(index, type, id, doc); + this.data = new TransportData(); + this.data.readFrom(in); } } @@ -85,10 +81,7 @@ public class ProcessorResult implements Streamable, ToXContent { out.writeThrowable(failure); } else { out.writeString(processorId); - out.writeString(data.getIndex()); - out.writeString(data.getType()); - out.writeString(data.getId()); - out.writeMap(data.getDocument()); + data.writeTo(out); } } @@ -96,12 +89,10 @@ public class ProcessorResult implements Streamable, ToXContent { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(Fields.PROCESSOR_ID, processorId); - builder.field(Fields.ERROR, isFailed()); if (isFailed()) { - builder.field(Fields.ERROR_MESSAGE, ExceptionsHelper.detailedMessage(failure)); + ElasticsearchException.renderThrowable(builder, params, failure); } else { - builder.field(Fields.MODIFIED, data.isModified()); - builder.field(Fields.DOCUMENT, data.asMap()); + data.toXContent(builder, params); } builder.endObject(); return builder; @@ -109,11 +100,13 @@ public class ProcessorResult implements Streamable, ToXContent { @Override public boolean equals(Object obj) { - if (obj == this) { return true; } + if (obj == this) { + return true; + } if (obj == null || getClass() != obj.getClass()) { return false; } - ProcessorResult other = (ProcessorResult) obj; + SimulateProcessorResult other = (SimulateProcessorResult) obj; return Objects.equals(processorId, other.processorId) && Objects.equals(data, other.data) && Objects.equals(failure, other.failure); } @@ -123,10 +116,6 @@ public class ProcessorResult implements Streamable, ToXContent { } static final class Fields { - static final XContentBuilderString DOCUMENT = new XContentBuilderString("doc"); static final XContentBuilderString PROCESSOR_ID = new XContentBuilderString("processor_id"); - static final XContentBuilderString ERROR = new XContentBuilderString("error"); - static final XContentBuilderString ERROR_MESSAGE = new XContentBuilderString("error_message"); - static final XContentBuilderString MODIFIED = new XContentBuilderString("modified"); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java index e6cae34035e..918064a17c7 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.ingest; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -84,4 +85,29 @@ public class DataTests extends ESTestCase { data.addField("fizz.new", "bar"); assertThat(data.getProperty("fizz.new"), equalTo("bar")); } + + public void testEquals() { + Data otherData = new Data(data); + assertThat(otherData, equalTo(data)); + } + + public void testNotEqualsDiffIndex() { + Data otherData = new Data(data.getIndex() + "foo", data.getType(), data.getId(), data.getDocument()); + assertThat(otherData, not(equalTo(data))); + } + + public void testNotEqualsDiffType() { + Data otherData = new Data(data.getIndex(), data.getType() + "foo", data.getId(), data.getDocument()); + assertThat(otherData, not(equalTo(data))); + } + + public void testNotEqualsDiffId() { + Data otherData = new Data(data.getIndex(), data.getType(), data.getId() + "foo", data.getDocument()); + assertThat(otherData, not(equalTo(data))); + } + + public void testNotEqualsDiffDocument() { + Data otherData = new Data(data.getIndex(), data.getType(), data.getId(), Collections.emptyMap()); + assertThat(otherData, not(equalTo(data))); + } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index d0d11ae60a0..97ed3339090 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -33,7 +33,7 @@ import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequestBuilder; import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineAction; import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequestBuilder; import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineResponse; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulatedItemResponse; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulateDocumentResult; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -104,8 +104,8 @@ public class IngestClientIT extends ESIntegTestCase { Map expectedDoc = new HashMap<>(); expectedDoc.put("foo", "bar"); Data expectedData = new Data("index", "type", "id", expectedDoc); - SimulatedItemResponse expectedResponse = new SimulatedItemResponse(expectedData); - List expectedResponses = Arrays.asList(expectedResponse); + SimulateDocumentResult expectedResponse = new SimulateDocumentResult(expectedData); + List expectedResponses = Arrays.asList(expectedResponse); SimulatePipelineResponse expected = new SimulatePipelineResponse("_id", expectedResponses); assertThat(response, equalTo(expected)); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java new file mode 100644 index 00000000000..08940ee3ce2 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport; + +import org.elasticsearch.ingest.Data; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +public class TransportDataTests extends ESTestCase { + + public void testEquals() throws Exception { + Data data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); + Data otherData = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); + assertThat(data, equalTo(otherData)); + } + + public void testNotEquals() throws Exception { + Data data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); + Data otherData = new Data("_index2", "_type", "_id", Collections.emptyMap()); + assertThat(data, not(equalTo(otherData))); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java index d88bf7a5467..03c4c3af7f8 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java @@ -35,28 +35,26 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class ParsedSimulateRequestParserTests extends ESTestCase { - private static final ParsedSimulateRequest.Parser PARSER = new ParsedSimulateRequest.Parser(); - - private Map processorRegistry; private PipelineStore store; - private Processor processor; + private ParsedSimulateRequest.Parser parser; private Pipeline pipeline; private Data data; @Before public void init() throws IOException { + parser = new ParsedSimulateRequest.Parser(); List uppercase = Collections.unmodifiableList(Collections.singletonList("foo")); - processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null); + Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null); pipeline = new Pipeline(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID, null, Collections.unmodifiableList(Arrays.asList(processor))); - data = new Data("_index", "_type", "_id", Collections.emptyMap()); - processorRegistry = new HashMap<>(); + data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); + Map processorRegistry = new HashMap<>(); processorRegistry.put("mutate", new MutateProcessor.Factory()); store = mock(PipelineStore.class); when(store.get("_id")).thenReturn(pipeline); when(store.getProcessorFactoryRegistry()).thenReturn(processorRegistry); } - public void testParse_UsingPipelineStore() throws Exception { + public void testParseUsingPipelineStore() throws Exception { ParsedSimulateRequest expectedRequest = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), false); Map raw = new HashMap<>(); @@ -65,14 +63,15 @@ public class ParsedSimulateRequestParserTests extends ESTestCase { doc.put("_index", "_index"); doc.put("_type", "_type"); doc.put("_id", "_id"); + doc.put("_source", data.getDocument()); docs.add(doc); raw.put("docs", docs); - ParsedSimulateRequest actualRequest = PARSER.parse("_id", raw, false, store); + ParsedSimulateRequest actualRequest = parser.parseWithPipelineId("_id", raw, false, store); assertThat(actualRequest, equalTo(expectedRequest)); } - public void testParse_ProvidedPipeline() throws Exception { + public void testParseWithProvidedPipeline() throws Exception { ParsedSimulateRequest expectedRequest = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), false); Map raw = new HashMap<>(); @@ -81,6 +80,7 @@ public class ParsedSimulateRequestParserTests extends ESTestCase { doc.put("_index", "_index"); doc.put("_type", "_type"); doc.put("_id", "_id"); + doc.put("_source", data.getDocument()); docs.add(doc); Map processorConfig = new HashMap<>(); @@ -91,7 +91,7 @@ public class ParsedSimulateRequestParserTests extends ESTestCase { raw.put("docs", docs); raw.put("pipeline", pipelineConfig); - ParsedSimulateRequest actualRequest = PARSER.parse(null, raw, false, store); + ParsedSimulateRequest actualRequest = parser.parse(raw, false, store); assertThat(actualRequest, equalTo(expectedRequest)); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java index 8ed2cc6abec..cf108b0db84 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java @@ -19,11 +19,12 @@ package org.elasticsearch.plugin.ingest.transport.simulate; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.plugin.ingest.transport.TransportData; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; @@ -37,26 +38,28 @@ import static org.mockito.Mockito.*; public class SimulateExecutionServiceTests extends ESTestCase { - private PipelineStore store; private ThreadPool threadPool; private SimulateExecutionService executionService; private Pipeline pipeline; private Processor processor; private Data data; + private TransportData transportData; + private ActionListener listener; @Before public void setup() { - store = mock(PipelineStore.class); threadPool = new ThreadPool( Settings.builder() - .put("name", "_name") + .put("name", getClass().getName()) .build() ); executionService = new SimulateExecutionService(threadPool); processor = mock(Processor.class); when(processor.getType()).thenReturn("mock"); pipeline = new Pipeline("_id", "_description", Arrays.asList(processor, processor)); - data = new Data("_index", "_type", "_id", Collections.emptyMap()); + data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); + transportData = new TransportData(data); + listener = mock(ActionListener.class); } @After @@ -65,43 +68,42 @@ public class SimulateExecutionServiceTests extends ESTestCase { } public void testExecuteVerboseItem() throws Exception { - SimulatedItemResponse expectedItemResponse = new SimulatedItemResponse( - Arrays.asList(new ProcessorResult("processor[mock]-0", data), new ProcessorResult("processor[mock]-1", data))); - SimulatedItemResponse actualItemResponse = executionService.executeVerboseItem(pipeline, data); + SimulateDocumentResult expectedItemResponse = new SimulateDocumentResult( + Arrays.asList(new SimulateProcessorResult("processor[mock]-0", data), new SimulateProcessorResult("processor[mock]-1", data))); + SimulateDocumentResult actualItemResponse = executionService.executeVerboseItem(pipeline, data); verify(processor, times(2)).execute(data); assertThat(actualItemResponse, equalTo(expectedItemResponse)); } public void testExecuteItem() throws Exception { - SimulatedItemResponse expectedItemResponse = new SimulatedItemResponse(data); - SimulatedItemResponse actualItemResponse = executionService.executeItem(pipeline, data); + SimulateDocumentResult expectedItemResponse = new SimulateDocumentResult(data); + SimulateDocumentResult actualItemResponse = executionService.executeItem(pipeline, data); verify(processor, times(2)).execute(data); assertThat(actualItemResponse, equalTo(expectedItemResponse)); } - public void testExecuteVerboseItem_Failure() throws Exception { + public void testExecuteVerboseItemWithFailure() throws Exception { Exception e = new RuntimeException("processor failed"); - SimulatedItemResponse expectedItemResponse = new SimulatedItemResponse( - Arrays.asList(new ProcessorResult("processor[mock]-0", e), new ProcessorResult("processor[mock]-1", data)) + SimulateDocumentResult expectedItemResponse = new SimulateDocumentResult( + Arrays.asList(new SimulateProcessorResult("processor[mock]-0", e), new SimulateProcessorResult("processor[mock]-1", data)) ); doThrow(e).doNothing().when(processor).execute(data); - SimulatedItemResponse actualItemResponse = executionService.executeVerboseItem(pipeline, data); + SimulateDocumentResult actualItemResponse = executionService.executeVerboseItem(pipeline, data); verify(processor, times(2)).execute(data); assertThat(actualItemResponse, equalTo(expectedItemResponse)); } - public void testExecuteItem_Failure() throws Exception { + public void testExecuteItemWithFailure() throws Exception { Exception e = new RuntimeException("processor failed"); - SimulatedItemResponse expectedItemResponse = new SimulatedItemResponse(e); + SimulateDocumentResult expectedItemResponse = new SimulateDocumentResult(e); doThrow(e).when(processor).execute(data); - SimulatedItemResponse actualItemResponse = executionService.executeItem(pipeline, data); + SimulateDocumentResult actualItemResponse = executionService.executeItem(pipeline, data); verify(processor, times(1)).execute(data); assertThat(actualItemResponse, equalTo(expectedItemResponse)); } public void testExecute() throws Exception { - SimulateExecutionService.Listener listener = mock(SimulateExecutionService.Listener.class); - SimulatedItemResponse itemResponse = new SimulatedItemResponse(data); + SimulateDocumentResult itemResponse = new SimulateDocumentResult(data); ParsedSimulateRequest request = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), false); executionService.execute(request, listener); SimulatePipelineResponse response = new SimulatePipelineResponse("_id", Collections.singletonList(itemResponse)); @@ -114,11 +116,10 @@ public class SimulateExecutionServiceTests extends ESTestCase { }); } - public void testExecute_Verbose() throws Exception { - SimulateExecutionService.Listener listener = mock(SimulateExecutionService.Listener.class); + public void testExecuteWithVerbose() throws Exception { ParsedSimulateRequest request = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), true); - SimulatedItemResponse itemResponse = new SimulatedItemResponse( - Arrays.asList(new ProcessorResult("processor[mock]-0", data), new ProcessorResult("processor[mock]-1", data))); + SimulateDocumentResult itemResponse = new SimulateDocumentResult( + Arrays.asList(new SimulateProcessorResult("processor[mock]-0", data), new SimulateProcessorResult("processor[mock]-1", data))); executionService.execute(request, listener); SimulatePipelineResponse response = new SimulatePipelineResponse("_id", Collections.singletonList(itemResponse)); assertBusy(new Runnable() { diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml index 7596469b7f2..156ba8cfd67 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml @@ -47,8 +47,7 @@ ] } - length: { docs: 1 } - - is_false: docs.0.error - - is_true: docs.0.modified + - is_true: docs.0.doc.modified - match: { docs.0.doc._source.foo: "bar" } - match: { docs.0.doc._source.field2: "_value" } @@ -132,8 +131,7 @@ - length: { docs: 1 } - length: { docs.0.processor_results: 2 } - match: { docs.0.processor_results.0.processor_id: "processor[mutate]-0" } - - is_false: docs.0.processor_results.0.error - - is_true: docs.0.processor_results.0.modified + - is_true: docs.0.processor_results.0.doc.modified - length: { docs.0.processor_results.0.doc._source: 2 } - match: { docs.0.processor_results.0.doc._source.foo: "bar" } - match: { docs.0.processor_results.0.doc._source.field2: "_value" } @@ -149,7 +147,6 @@ wait_for_status: green - do: - catch: request ingest.simulate: body: > { @@ -183,10 +180,8 @@ ] } - length: { docs: 2 } - - is_true: docs.0.error - - match: { docs.0.error_message: "NullPointerException[null]" } - - is_false: docs.1.error - - is_true: docs.1.modified + - match: { docs.0.error.type: "null_pointer_exception" } + - is_true: docs.1.doc.modified - match: { docs.1.doc._source.foo: "BAR" } --- @@ -196,7 +191,6 @@ wait_for_status: green - do: - catch: request ingest.simulate: verbose: true body: > @@ -240,12 +234,11 @@ ] } - length: { docs: 2 } - - is_true: docs.0.error - - is_false: docs.1.error - length: { docs.0.processor_results: 2 } - - is_false: docs.1.processor_results.0.error - - match: { docs.0.processor_results.0.error_message: "NumberFormatException[For input string: \"bar\"]" } - - is_false: docs.1.processor_results.1.error + - match: { docs.0.processor_results.0.error.type: "number_format_exception" } + - match: { docs.0.processor_results.1.doc._index: "index" } + - match: { docs.0.processor_results.1.doc._type: "type" } + - match: { docs.0.processor_results.1.doc._id: "id" } - match: { docs.0.processor_results.1.doc._source.foo: "bar" } - match: { docs.1.processor_results.1.doc._source.bar: "HELLO" } - match: { docs.1.processor_results.0.doc._source.foo: 5 } From 20384aedf09c6bbda424fd50dd1d4d23c19a2d14 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Thu, 12 Nov 2015 17:31:08 -0800 Subject: [PATCH 054/347] split out SimulateDocumentResult into subclasses, add tests for equalTo and streamable --- .../java/org/elasticsearch/ingest/Data.java | 10 -- .../simulate/ParsedSimulateRequest.java | 11 +- .../simulate/SimulateDocumentResult.java | 122 +----------------- .../simulate/SimulateExecutionService.java | 6 +- .../SimulateFailedDocumentResult.java | 84 ++++++++++++ .../simulate/SimulatePipelineRequest.java | 9 ++ .../simulate/SimulatePipelineResponse.java | 45 ++++--- .../simulate/SimulateProcessorResult.java | 9 +- .../SimulateSimpleDocumentResult.java | 84 ++++++++++++ .../SimulateVerboseDocumentResult.java | 101 +++++++++++++++ .../elasticsearch/ingest/IngestClientIT.java | 7 +- .../elasticsearch/ingest/PipelineTests.java | 67 ++++++++++ .../ParsedSimulateRequestParserTests.java | 28 ++-- .../SimulateExecutionServiceTests.java | 12 +- .../SimulateFailedDocumentResultTests.java | 69 ++++++++++ .../SimulatePipelineResponseTests.java | 80 ++++++++++++ .../SimulateProcessorResultTests.java | 87 +++++++++++++ .../SimulateSimpleDocumentResultTests.java | 67 ++++++++++ .../test/ingest/80_simulate.yaml | 27 ++++ 19 files changed, 744 insertions(+), 181 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResult.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResult.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateVerboseDocumentResult.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResultTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResultTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index 543fd9c1c4b..b360af7d459 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -130,16 +130,6 @@ public final class Data { return modified; } - public Map asMap() { - Map dataMap = new HashMap<>(); - dataMap.put("_index", index); - dataMap.put("_type", type); - dataMap.put("_id", id); - dataMap.put("_source", document); - - return dataMap; - } - @Override public boolean equals(Object obj) { if (obj == this) { return true; } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java index 7d02f686469..47af2db583d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java @@ -26,6 +26,8 @@ import org.elasticsearch.plugin.ingest.PipelineStore; import java.io.IOException; import java.util.*; +import static org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequest.Fields; + public class ParsedSimulateRequest { private final List documents; private final Pipeline pipeline; @@ -98,14 +100,5 @@ public class ParsedSimulateRequest { List dataList = parseDocs(config); return new ParsedSimulateRequest(pipeline, dataList, verbose); } - - static final class Fields { - static final String PIPELINE = "pipeline"; - static final String DOCS = "docs"; - static final String SOURCE = "_source"; - static final String INDEX = "_index"; - static final String TYPE = "_type"; - static final String ID = "_id"; - } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentResult.java index 33ef7745404..98fdf380bc7 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentResult.java @@ -18,133 +18,23 @@ */ package org.elasticsearch.plugin.ingest.transport.simulate; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.ingest.Data; -import org.elasticsearch.plugin.ingest.transport.TransportData; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; -public class SimulateDocumentResult implements Streamable, ToXContent { - - private TransportData data; - private List processorResultList; - private Throwable failure; - - public SimulateDocumentResult() { +public abstract class SimulateDocumentResult implements Streamable, ToXContent { + public int getStreamId() { + return -1; } - public SimulateDocumentResult(Data data) { - this.data = new TransportData(data); - } + public abstract void readFrom(StreamInput in) throws IOException; - public SimulateDocumentResult(List processorResultList) { - this.processorResultList = processorResultList; - } + public abstract void writeTo(StreamOutput out) throws IOException; - public SimulateDocumentResult(Throwable failure) { - this.failure = failure; - } - - public boolean isFailed() { - if (failure != null) { - return true; - } - return false; - } - - public boolean isVerbose() { - return this.processorResultList != null; - } - - public Data getData() { - return data.get(); - } - - public List getProcessorResultList() { - return processorResultList; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - boolean isFailed = in.readBoolean(); - boolean isVerbose = in.readBoolean(); - if (isFailed) { - this.failure = in.readThrowable(); - } else if (isVerbose) { - int size = in.readVInt(); - processorResultList = new ArrayList<>(); - for (int i = 0; i < size; i++) { - SimulateProcessorResult processorResult = new SimulateProcessorResult(); - processorResult.readFrom(in); - processorResultList.add(processorResult); - } - } else { - this.data = new TransportData(); - this.data.readFrom(in); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeBoolean(isFailed()); - out.writeBoolean(isVerbose()); - - if (failure != null) { - out.writeThrowable(failure); - } else if (isVerbose()) { - out.writeVInt(processorResultList.size()); - for (SimulateProcessorResult p : processorResultList) { - p.writeTo(out); - } - } else { - data.writeTo(out); - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (isFailed()) { - ElasticsearchException.renderThrowable(builder, params, failure); - } else if (isVerbose()) { - builder.startArray(Fields.PROCESSOR_RESULTS); - for (SimulateProcessorResult processorResult : processorResultList) { - processorResult.toXContent(builder, params); - } - builder.endArray(); - } else { - data.toXContent(builder, params); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { return true; } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - SimulateDocumentResult other = (SimulateDocumentResult) obj; - return Objects.equals(data, other.data) && Objects.equals(processorResultList, other.processorResultList) && Objects.equals(failure, other.failure); - } - - @Override - public int hashCode() { - return Objects.hash(data, processorResultList, failure); - } - - static final class Fields { - static final XContentBuilderString PROCESSOR_RESULTS = new XContentBuilderString("processor_results"); - } + public abstract XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException; } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java index a4518273467..a8d34b4a150 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java @@ -44,9 +44,9 @@ public class SimulateExecutionService { SimulateDocumentResult executeItem(Pipeline pipeline, Data data) { try { pipeline.execute(data); - return new SimulateDocumentResult(data); + return new SimulateSimpleDocumentResult(data); } catch (Exception e) { - return new SimulateDocumentResult(e); + return new SimulateFailedDocumentResult(e); } } @@ -67,7 +67,7 @@ public class SimulateExecutionService { currentData = new Data(currentData); } - return new SimulateDocumentResult(processorResultList); + return new SimulateVerboseDocumentResult(processorResultList); } public void execute(ParsedSimulateRequest request, ActionListener listener) { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResult.java new file mode 100644 index 00000000000..e0bf7555b6b --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResult.java @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class SimulateFailedDocumentResult extends SimulateDocumentResult { + public static final int STREAM_ID = 2; + + private Throwable failure; + + public SimulateFailedDocumentResult() { + + } + + public SimulateFailedDocumentResult(Throwable failure) { + this.failure = failure; + } + + @Override + public int getStreamId() { + return STREAM_ID; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + int streamId = in.readVInt(); + if (streamId != STREAM_ID) { + throw new IOException("stream_id [" + streamId + "] does not match " + getClass().getName() + " [stream_id=" + STREAM_ID + "]"); + } + this.failure = in.readThrowable(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(STREAM_ID); + out.writeThrowable(failure); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + ElasticsearchException.renderThrowable(builder, params, failure); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SimulateFailedDocumentResult that = (SimulateFailedDocumentResult) o; + + return Objects.equals((failure == null) ? null : failure.getClass(), + (that.failure == null) ? null : that.failure.getClass()); + } + + @Override + public int hashCode() { + return Objects.hash(failure); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java index 4aec025b12d..3394de413c8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java @@ -83,4 +83,13 @@ public class SimulatePipelineRequest extends ActionRequest { out.writeBoolean(verbose); out.writeBytesReference(source); } + + public static final class Fields { + static final String PIPELINE = "pipeline"; + static final String DOCS = "docs"; + static final String SOURCE = "_source"; + static final String INDEX = "_index"; + static final String TYPE = "_type"; + static final String ID = "_id"; + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java index 5d5bb57bc1f..5922f9d906e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java @@ -33,9 +33,8 @@ import java.util.List; import java.util.Objects; public class SimulatePipelineResponse extends ActionResponse implements ToXContent { - private String pipelineId; - private List responses; + private List results; public SimulatePipelineResponse() { @@ -43,7 +42,7 @@ public class SimulatePipelineResponse extends ActionResponse implements ToXConte public SimulatePipelineResponse(String pipelineId, List responses) { this.pipelineId = pipelineId; - this.responses = Collections.unmodifiableList(responses); + this.results = Collections.unmodifiableList(responses); } public String getPipelineId() { @@ -54,20 +53,21 @@ public class SimulatePipelineResponse extends ActionResponse implements ToXConte this.pipelineId = pipelineId; } - public List getResponses() { - return responses; + public List getResults() { + return results; } - public void setResponses(List responses) { - this.responses = responses; + public void setResults(List results) { + this.results = results; } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(pipelineId); - out.writeVInt(responses.size()); - for (SimulateDocumentResult response : responses) { + out.writeVInt(results.size()); + for (SimulateDocumentResult response : results) { + out.writeVInt(response.getStreamId()); response.writeTo(out); } } @@ -77,11 +77,24 @@ public class SimulatePipelineResponse extends ActionResponse implements ToXConte super.readFrom(in); this.pipelineId = in.readString(); int responsesLength = in.readVInt(); - responses = new ArrayList<>(); + results = new ArrayList<>(); for (int i = 0; i < responsesLength; i++) { - SimulateDocumentResult response = new SimulateDocumentResult(); - response.readFrom(in); - responses.add(response); + SimulateDocumentResult result; + switch (in.readVInt()) { + case SimulateSimpleDocumentResult.STREAM_ID: + result = new SimulateSimpleDocumentResult(); + break; + case SimulateVerboseDocumentResult.STREAM_ID: + result = new SimulateVerboseDocumentResult(); + break; + case SimulateFailedDocumentResult.STREAM_ID: + result = new SimulateFailedDocumentResult(); + break; + default: + throw new IOException("Cannot read result from stream"); + } + result.readFrom(in); + results.add(result); } } @@ -89,7 +102,7 @@ public class SimulatePipelineResponse extends ActionResponse implements ToXConte @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startArray(Fields.DOCUMENTS); - for (SimulateDocumentResult response : responses) { + for (SimulateDocumentResult response : results) { response.toXContent(builder, params); } builder.endArray(); @@ -103,12 +116,12 @@ public class SimulatePipelineResponse extends ActionResponse implements ToXConte if (o == null || getClass() != o.getClass()) return false; SimulatePipelineResponse that = (SimulatePipelineResponse) o; return Objects.equals(pipelineId, that.pipelineId) && - Objects.equals(responses, that.responses); + Objects.equals(results, that.results); } @Override public int hashCode() { - return Objects.hash(pipelineId, responses); + return Objects.hash(pipelineId, results); } static final class Fields { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java index ca97ec5a604..7b196f69276 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java @@ -51,7 +51,7 @@ public class SimulateProcessorResult implements Streamable, ToXContent { this.failure = failure; } - public boolean isFailed() { + private boolean isFailed() { return this.failure != null; } @@ -66,6 +66,7 @@ public class SimulateProcessorResult implements Streamable, ToXContent { @Override public void readFrom(StreamInput in) throws IOException { boolean isFailure = in.readBoolean(); + this.processorId = in.readString(); if (isFailure) { this.failure = in.readThrowable(); } else { @@ -77,10 +78,10 @@ public class SimulateProcessorResult implements Streamable, ToXContent { @Override public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(isFailed()); + out.writeString(processorId); if (isFailed()) { out.writeThrowable(failure); } else { - out.writeString(processorId); data.writeTo(out); } } @@ -107,7 +108,9 @@ public class SimulateProcessorResult implements Streamable, ToXContent { return false; } SimulateProcessorResult other = (SimulateProcessorResult) obj; - return Objects.equals(processorId, other.processorId) && Objects.equals(data, other.data) && Objects.equals(failure, other.failure); + + return Objects.equals(processorId, other.processorId) && Objects.equals(data, other.data) && + Objects.equals((failure == null) ? null : failure.getClass(), (other.failure == null) ? null : other.failure.getClass()); } @Override diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResult.java new file mode 100644 index 00000000000..9d432bc2855 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResult.java @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.plugin.ingest.transport.TransportData; + +import java.io.IOException; +import java.util.Objects; + +public class SimulateSimpleDocumentResult extends SimulateDocumentResult { + public static final int STREAM_ID = 0; + + private TransportData transportData; + + public SimulateSimpleDocumentResult() { + + } + + public SimulateSimpleDocumentResult(Data data) { + this.transportData = new TransportData(data); + } + + @Override + public int getStreamId() { + return STREAM_ID; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + int streamId = in.readVInt(); + if (streamId != STREAM_ID) { + throw new IOException("stream_id [" + streamId + "] does not match " + getClass().getName() + " [stream_id=" + STREAM_ID + "]"); + } + this.transportData = new TransportData(); + this.transportData.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(STREAM_ID); + transportData.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + transportData.toXContent(builder, params); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SimulateSimpleDocumentResult that = (SimulateSimpleDocumentResult) o; + return Objects.equals(transportData, that.transportData); + } + + @Override + public int hashCode() { + return Objects.hash(transportData); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateVerboseDocumentResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateVerboseDocumentResult.java new file mode 100644 index 00000000000..f3da751e78b --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateVerboseDocumentResult.java @@ -0,0 +1,101 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +public class SimulateVerboseDocumentResult extends SimulateDocumentResult { + public static final int STREAM_ID = 1; + + private List processorResults; + + public SimulateVerboseDocumentResult() { + + } + + public SimulateVerboseDocumentResult(List processorResults) { + this.processorResults = processorResults; + } + + @Override + public int getStreamId() { + return STREAM_ID; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + int streamId = in.readVInt(); + if (streamId != STREAM_ID) { + throw new IOException("stream_id [" + streamId + "] does not match " + getClass().getName() + " [stream_id=" + STREAM_ID + "]"); + } + int size = in.readVInt(); + processorResults = new ArrayList<>(); + for (int i = 0; i < size; i++) { + SimulateProcessorResult processorResult = new SimulateProcessorResult(); + processorResult.readFrom(in); + processorResults.add(processorResult); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(STREAM_ID); + out.writeVInt(processorResults.size()); + for (SimulateProcessorResult result : processorResults) { + result.writeTo(out); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray(Fields.PROCESSOR_RESULTS); + for (SimulateProcessorResult processorResult : processorResults) { + processorResult.toXContent(builder, params); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SimulateVerboseDocumentResult that = (SimulateVerboseDocumentResult) o; + return Objects.equals(processorResults, that.processorResults); + } + + @Override + public int hashCode() { + return Objects.hash(processorResults); + } + + static final class Fields { + static final XContentBuilderString PROCESSOR_RESULTS = new XContentBuilderString("processor_results"); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 97ed3339090..2001b673d06 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -30,10 +30,7 @@ import org.elasticsearch.plugin.ingest.transport.get.GetPipelineRequestBuilder; import org.elasticsearch.plugin.ingest.transport.get.GetPipelineResponse; import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequestBuilder; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineAction; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequestBuilder; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineResponse; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulateDocumentResult; +import org.elasticsearch.plugin.ingest.transport.simulate.*; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -104,7 +101,7 @@ public class IngestClientIT extends ESIntegTestCase { Map expectedDoc = new HashMap<>(); expectedDoc.put("foo", "bar"); Data expectedData = new Data("index", "type", "id", expectedDoc); - SimulateDocumentResult expectedResponse = new SimulateDocumentResult(expectedData); + SimulateDocumentResult expectedResponse = new SimulateSimpleDocumentResult(expectedData); List expectedResponses = Arrays.asList(expectedResponse); SimulatePipelineResponse expected = new SimulatePipelineResponse("_id", expectedResponses); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java new file mode 100644 index 00000000000..84d0953495b --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.processor.mutate.MutateProcessor; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.*; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; +import static org.mockito.Mockito.mock; + +public class PipelineTests extends ESTestCase { + private Processor updateProcessor; + private Processor lowercaseProcessor; + private Pipeline pipeline; + + @Before + public void setup() { + Map update = Collections.singletonMap("foo", 123); + List lowercase = Collections.singletonList("foo"); + updateProcessor = new MutateProcessor(update, null, null, null, null, null, null, null, null, null); + lowercaseProcessor = new MutateProcessor(null, null, null, null, null, null, null, null, null, lowercase); + pipeline = new Pipeline("id", "description", Arrays.asList(updateProcessor, lowercaseProcessor)); + } + + public void testEquals() throws Exception { + Pipeline other = new Pipeline(pipeline.getId(), pipeline.getDescription(), pipeline.getProcessors()); + assertThat(pipeline, equalTo(other)); + } + + public void testNotEqualsDiffId() throws Exception { + Pipeline other = new Pipeline(pipeline.getId() + "foo", pipeline.getDescription(), pipeline.getProcessors()); + assertThat(pipeline, not(equalTo(other))); + } + + public void testNotEqualsDiffDescription() throws Exception { + Pipeline other = new Pipeline(pipeline.getId(), pipeline.getDescription() + "foo", pipeline.getProcessors()); + assertThat(pipeline, not(equalTo(other))); + } + + public void testNotEqualsDiffProcessors() throws Exception { + Pipeline other = new Pipeline(pipeline.getId(), pipeline.getDescription() + "foo", Collections.singletonList(updateProcessor)); + assertThat(pipeline, not(equalTo(other))); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java index 03c4c3af7f8..09c145a56b6 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java @@ -34,6 +34,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import static org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequest.Fields; + public class ParsedSimulateRequestParserTests extends ESTestCase { private PipelineStore store; private ParsedSimulateRequest.Parser parser; @@ -43,9 +45,9 @@ public class ParsedSimulateRequestParserTests extends ESTestCase { @Before public void init() throws IOException { parser = new ParsedSimulateRequest.Parser(); - List uppercase = Collections.unmodifiableList(Collections.singletonList("foo")); + List uppercase = Collections.singletonList("foo"); Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null); - pipeline = new Pipeline(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID, null, Collections.unmodifiableList(Arrays.asList(processor))); + pipeline = new Pipeline(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID, null, Arrays.asList(processor)); data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); Map processorRegistry = new HashMap<>(); processorRegistry.put("mutate", new MutateProcessor.Factory()); @@ -60,12 +62,12 @@ public class ParsedSimulateRequestParserTests extends ESTestCase { Map raw = new HashMap<>(); List> docs = new ArrayList<>(); Map doc = new HashMap<>(); - doc.put("_index", "_index"); - doc.put("_type", "_type"); - doc.put("_id", "_id"); - doc.put("_source", data.getDocument()); + doc.put(Fields.INDEX, "_index"); + doc.put(Fields.TYPE, "_type"); + doc.put(Fields.ID, "_id"); + doc.put(Fields.SOURCE, data.getDocument()); docs.add(doc); - raw.put("docs", docs); + raw.put(Fields.DOCS, docs); ParsedSimulateRequest actualRequest = parser.parseWithPipelineId("_id", raw, false, store); assertThat(actualRequest, equalTo(expectedRequest)); @@ -77,10 +79,10 @@ public class ParsedSimulateRequestParserTests extends ESTestCase { Map raw = new HashMap<>(); List> docs = new ArrayList<>(); Map doc = new HashMap<>(); - doc.put("_index", "_index"); - doc.put("_type", "_type"); - doc.put("_id", "_id"); - doc.put("_source", data.getDocument()); + doc.put(Fields.INDEX, "_index"); + doc.put(Fields.TYPE, "_type"); + doc.put(Fields.ID, "_id"); + doc.put(Fields.SOURCE, data.getDocument()); docs.add(doc); Map processorConfig = new HashMap<>(); @@ -88,8 +90,8 @@ public class ParsedSimulateRequestParserTests extends ESTestCase { Map pipelineConfig = new HashMap<>(); pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("mutate", processorConfig))); - raw.put("docs", docs); - raw.put("pipeline", pipelineConfig); + raw.put(Fields.DOCS, docs); + raw.put(Fields.PIPELINE, pipelineConfig); ParsedSimulateRequest actualRequest = parser.parse(raw, false, store); assertThat(actualRequest, equalTo(expectedRequest)); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java index cf108b0db84..e7754f94380 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java @@ -68,7 +68,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { } public void testExecuteVerboseItem() throws Exception { - SimulateDocumentResult expectedItemResponse = new SimulateDocumentResult( + SimulateDocumentResult expectedItemResponse = new SimulateVerboseDocumentResult( Arrays.asList(new SimulateProcessorResult("processor[mock]-0", data), new SimulateProcessorResult("processor[mock]-1", data))); SimulateDocumentResult actualItemResponse = executionService.executeVerboseItem(pipeline, data); verify(processor, times(2)).execute(data); @@ -76,7 +76,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { } public void testExecuteItem() throws Exception { - SimulateDocumentResult expectedItemResponse = new SimulateDocumentResult(data); + SimulateDocumentResult expectedItemResponse = new SimulateSimpleDocumentResult(data); SimulateDocumentResult actualItemResponse = executionService.executeItem(pipeline, data); verify(processor, times(2)).execute(data); assertThat(actualItemResponse, equalTo(expectedItemResponse)); @@ -84,7 +84,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { public void testExecuteVerboseItemWithFailure() throws Exception { Exception e = new RuntimeException("processor failed"); - SimulateDocumentResult expectedItemResponse = new SimulateDocumentResult( + SimulateDocumentResult expectedItemResponse = new SimulateVerboseDocumentResult( Arrays.asList(new SimulateProcessorResult("processor[mock]-0", e), new SimulateProcessorResult("processor[mock]-1", data)) ); doThrow(e).doNothing().when(processor).execute(data); @@ -95,7 +95,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { public void testExecuteItemWithFailure() throws Exception { Exception e = new RuntimeException("processor failed"); - SimulateDocumentResult expectedItemResponse = new SimulateDocumentResult(e); + SimulateDocumentResult expectedItemResponse = new SimulateFailedDocumentResult(e); doThrow(e).when(processor).execute(data); SimulateDocumentResult actualItemResponse = executionService.executeItem(pipeline, data); verify(processor, times(1)).execute(data); @@ -103,7 +103,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { } public void testExecute() throws Exception { - SimulateDocumentResult itemResponse = new SimulateDocumentResult(data); + SimulateDocumentResult itemResponse = new SimulateSimpleDocumentResult(data); ParsedSimulateRequest request = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), false); executionService.execute(request, listener); SimulatePipelineResponse response = new SimulatePipelineResponse("_id", Collections.singletonList(itemResponse)); @@ -118,7 +118,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { public void testExecuteWithVerbose() throws Exception { ParsedSimulateRequest request = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), true); - SimulateDocumentResult itemResponse = new SimulateDocumentResult( + SimulateDocumentResult itemResponse = new SimulateVerboseDocumentResult( Arrays.asList(new SimulateProcessorResult("processor[mock]-0", data), new SimulateProcessorResult("processor[mock]-1", data))); executionService.execute(request, listener); SimulatePipelineResponse response = new SimulatePipelineResponse("_id", Collections.singletonList(itemResponse)); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResultTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResultTests.java new file mode 100644 index 00000000000..c766a8979fe --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResultTests.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +public class SimulateFailedDocumentResultTests extends ESTestCase { + + @Before + public void setup() { + } + + public void testEqualsExact() { + Throwable throwable = new Exception("foo"); + SimulateDocumentResult result = new SimulateFailedDocumentResult(throwable); + SimulateDocumentResult otherResult = new SimulateFailedDocumentResult(throwable); + assertThat(result, equalTo(otherResult)); + } + + public void testEqualsSameExceptionClass() { + SimulateDocumentResult result = new SimulateFailedDocumentResult(new IllegalArgumentException("foo")); + SimulateDocumentResult otherResult = new SimulateFailedDocumentResult(new IllegalArgumentException("bar")); + assertThat(result, equalTo(otherResult)); + } + + public void testNotEqualsDiffExceptionClass() { + SimulateDocumentResult result = new SimulateFailedDocumentResult(new IllegalArgumentException("foo")); + SimulateDocumentResult otherResult = new SimulateFailedDocumentResult(new NullPointerException("foo")); + assertThat(result, not(equalTo(otherResult))); + } + + public void testStreamable() throws IOException { + SimulateDocumentResult result = new SimulateFailedDocumentResult(new IllegalArgumentException("foo")); + + BytesStreamOutput out = new BytesStreamOutput(); + result.writeTo(out); + + StreamInput streamInput = StreamInput.wrap(out.bytes()); + SimulateDocumentResult otherResult = new SimulateFailedDocumentResult(); + otherResult.readFrom(streamInput); + + assertThat(result, equalTo(otherResult)); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java new file mode 100644 index 00000000000..2352f3db0ba --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +public class SimulatePipelineResponseTests extends ESTestCase { + private Data data; + private SimulateDocumentResult documentResult; + private SimulatePipelineResponse response; + + @Before + public void setup() { + data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); + documentResult = new SimulateSimpleDocumentResult(data); + response = new SimulatePipelineResponse("_id", Collections.singletonList(documentResult)); + } + + public void testEquals() { + SimulatePipelineResponse otherResponse = new SimulatePipelineResponse("_id", Collections.singletonList(documentResult)); + assertThat(response, equalTo(otherResponse)); + } + + public void testNotEqualsId() { + SimulatePipelineResponse otherResponse = new SimulatePipelineResponse(response.getPipelineId() + "foo", response.getResults()); + assertThat(response, not(equalTo(otherResponse))); + } + + public void testNotEqualsResults() { + SimulatePipelineResponse otherResponse = new SimulatePipelineResponse(response.getPipelineId(), Arrays.asList(documentResult, documentResult)); + assertThat(response, not(equalTo(otherResponse))); + } + + public void testStreamable() throws IOException { + List results = Arrays.asList( + new SimulateSimpleDocumentResult(data), + new SimulateFailedDocumentResult(new IllegalArgumentException("foo")), + new SimulateVerboseDocumentResult(Collections.singletonList(new SimulateProcessorResult("pid", data))) + ); + + response = new SimulatePipelineResponse("_id", results); + BytesStreamOutput out = new BytesStreamOutput(); + response.writeTo(out); + + StreamInput streamInput = StreamInput.wrap(out.bytes()); + SimulatePipelineResponse otherResponse = new SimulatePipelineResponse(); + otherResponse.readFrom(streamInput); + + assertThat(response, equalTo(otherResponse)); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java new file mode 100644 index 00000000000..1a20da74416 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +public class SimulateProcessorResultTests extends ESTestCase { + private Data data; + private SimulateProcessorResult result; + private SimulateProcessorResult failedResult; + private String processorId; + private Throwable throwable; + + @Before + public void setup() { + data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); + processorId = "id"; + throwable = new IllegalArgumentException("foo"); + result = new SimulateProcessorResult(processorId, data); + failedResult = new SimulateProcessorResult(processorId, throwable); + } + + public void testEqualsData() { + SimulateProcessorResult otherResult = new SimulateProcessorResult(new String(processorId), new Data(data)); + assertThat(result, equalTo(otherResult)); + } + + public void testEqualsSameClassThrowable() { + SimulateProcessorResult otherFailedResult = new SimulateProcessorResult(new String(processorId), new IllegalArgumentException("foo")); + assertThat(failedResult, equalTo(otherFailedResult)); + } + + public void testNotEqualsThrowable() { + SimulateProcessorResult otherFailedResult = new SimulateProcessorResult(new String(processorId), new NullPointerException("foo")); + assertThat(failedResult, not(equalTo(otherFailedResult))); + } + + public void testStreamableWithThrowable() throws IOException { + BytesStreamOutput out = new BytesStreamOutput(); + failedResult.writeTo(out); + + StreamInput streamInput = StreamInput.wrap(out.bytes()); + SimulateProcessorResult otherFailedResult = new SimulateProcessorResult(); + otherFailedResult.readFrom(streamInput); + + assertThat(failedResult, equalTo(otherFailedResult)); + } + + public void testStreamableWithData() throws IOException { + BytesStreamOutput out = new BytesStreamOutput(); + result.writeTo(out); + + StreamInput streamInput = StreamInput.wrap(out.bytes()); + SimulateProcessorResult otherResult = new SimulateProcessorResult(); + otherResult.readFrom(streamInput); + + assertThat(result, equalTo(otherResult)); + + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResultTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResultTests.java new file mode 100644 index 00000000000..7c7a497bbcc --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResultTests.java @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +public class SimulateSimpleDocumentResultTests extends ESTestCase { + private Data data; + + @Before + public void setup() { + data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); + } + + public void testEquals() { + SimulateDocumentResult result = new SimulateSimpleDocumentResult(data); + SimulateDocumentResult otherResult = new SimulateSimpleDocumentResult(data); + assertThat(result, equalTo(otherResult)); + } + + public void testNotEqualsDiffData() { + Data otherData = new Data(data.getIndex() + "foo", data.getType(), data.getId(), data.getDocument()); + SimulateDocumentResult result = new SimulateSimpleDocumentResult(data); + SimulateDocumentResult otherResult = new SimulateSimpleDocumentResult(otherData); + assertThat(result, not(equalTo(otherResult))); + } + + public void testStreamable() throws IOException { + SimulateDocumentResult result = new SimulateSimpleDocumentResult(data); + + BytesStreamOutput out = new BytesStreamOutput(); + result.writeTo(out); + + StreamInput streamInput = StreamInput.wrap(out.bytes()); + SimulateDocumentResult otherResult = new SimulateSimpleDocumentResult(); + otherResult.readFrom(streamInput); + + assertThat(result, equalTo(otherResult)); + } +} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml index 156ba8cfd67..371aeedd3ef 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml @@ -87,6 +87,33 @@ } - length: { docs: 1 } +--- +"Test simulate with no provided pipeline or pipeline_id": + - do: + cluster.health: + wait_for_status: green + + - do: + catch: request + ingest.simulate: + body: > + { + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "foo": "bar" + } + } + ] + } + - length: { error: 3 } + - match: { status: 400 } + - match: { error.type: "illegal_argument_exception" } + - match: { error.reason: "required property [pipeline] is missing" } + --- "Test simulate with verbose flag": - do: From d093600729e4669a2ee44c020e841f6c9113a721 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 13 Nov 2015 16:22:24 +0100 Subject: [PATCH 055/347] simplify serialization for simulate response depending on verbose flag Removed equals and hashcode whenever they wouldn't be reliable because of exception comparison. at the end of the day we use them for testing and we can simplify our tests without requiring equals and hashcode in prod code, which also would require more tests if maintained. Add equals/hashcode test for Data/TransportData and randomize existing serialization tests --- .../ingest/transport/TransportData.java | 17 +-- .../simulate/SimulateDocumentResult.java | 18 +-- .../SimulateDocumentSimpleResult.java | 95 +++++++++++++++ ...ava => SimulateDocumentVerboseResult.java} | 49 +++----- .../simulate/SimulateExecutionService.java | 12 +- .../SimulateFailedDocumentResult.java | 84 ------------- .../simulate/SimulatePipelineResponse.java | 53 ++------ .../simulate/SimulateProcessorResult.java | 83 ++++++------- .../SimulateSimpleDocumentResult.java | 84 ------------- .../elasticsearch/ingest/IngestClientIT.java | 29 +++-- .../ingest/transport/TransportDataTests.java | 80 ++++++++++-- .../SimulateDocumentSimpleResultTests.java | 58 +++++++++ .../SimulateExecutionServiceTests.java | 83 ++++++------- .../SimulateFailedDocumentResultTests.java | 69 ----------- .../SimulatePipelineResponseTests.java | 114 ++++++++++++------ .../SimulateProcessorResultTests.java | 70 ++++------- .../SimulateSimpleDocumentResultTests.java | 67 ---------- 17 files changed, 455 insertions(+), 610 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java rename plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/{SimulateVerboseDocumentResult.java => SimulateDocumentVerboseResult.java} (62%) delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResult.java delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResult.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResultTests.java delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResultTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java index b9fa46fe939..d3753a99306 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java @@ -21,7 +21,7 @@ package org.elasticsearch.plugin.ingest.transport; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; @@ -31,12 +31,11 @@ import java.io.IOException; import java.util.Map; import java.util.Objects; -public class TransportData implements Streamable, ToXContent { - private Data data; +public class TransportData implements Writeable, ToXContent { - public TransportData() { + private static final TransportData PROTOTYPE = new TransportData(null); - } + private final Data data; public TransportData(Data data) { this.data = data; @@ -46,13 +45,17 @@ public class TransportData implements Streamable, ToXContent { return data; } + public static TransportData readTransportDataFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + @Override - public void readFrom(StreamInput in) throws IOException { + public TransportData readFrom(StreamInput in) throws IOException { String index = in.readString(); String type = in.readString(); String id = in.readString(); Map doc = in.readMap(); - this.data = new Data(index, type, id, doc); + return new TransportData(new Data(index, type, id, doc)); } @Override diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentResult.java index 98fdf380bc7..ff9ad829aad 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentResult.java @@ -18,23 +18,9 @@ */ package org.elasticsearch.plugin.ingest.transport.simulate; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import java.io.IOException; +public interface SimulateDocumentResult extends Writeable, ToXContent { -public abstract class SimulateDocumentResult implements Streamable, ToXContent { - - public int getStreamId() { - return -1; - } - - public abstract void readFrom(StreamInput in) throws IOException; - - public abstract void writeTo(StreamOutput out) throws IOException; - - public abstract XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException; } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java new file mode 100644 index 00000000000..1783b10f998 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java @@ -0,0 +1,95 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.plugin.ingest.transport.TransportData; + +import java.io.IOException; + +public class SimulateDocumentSimpleResult implements SimulateDocumentResult { + + private static final SimulateDocumentSimpleResult PROTOTYPE = new SimulateDocumentSimpleResult((Data)null); + + private TransportData data; + private Exception failure; + + public SimulateDocumentSimpleResult(Data data) { + this.data = new TransportData(data); + } + + private SimulateDocumentSimpleResult(TransportData data) { + this.data = data; + } + + public SimulateDocumentSimpleResult(Exception failure) { + this.failure = failure; + } + + public Data getData() { + if (data == null) { + return null; + } + return data.get(); + } + + public Exception getFailure() { + return failure; + } + + public static SimulateDocumentSimpleResult readSimulateDocumentSimpleResult(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + @Override + public SimulateDocumentSimpleResult readFrom(StreamInput in) throws IOException { + if (in.readBoolean()) { + Exception exception = in.readThrowable(); + return new SimulateDocumentSimpleResult(exception); + } + return new SimulateDocumentSimpleResult(TransportData.readTransportDataFrom(in)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + if (failure == null) { + out.writeBoolean(false); + data.writeTo(out); + } else { + out.writeBoolean(true); + out.writeThrowable(failure); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (failure == null) { + data.toXContent(builder, params); + } else { + ElasticsearchException.renderThrowable(builder, params, failure); + } + builder.endObject(); + return builder; + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateVerboseDocumentResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentVerboseResult.java similarity index 62% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateVerboseDocumentResult.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentVerboseResult.java index f3da751e78b..eac308d9f35 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateVerboseDocumentResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentVerboseResult.java @@ -25,45 +25,39 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; -import java.util.Objects; -public class SimulateVerboseDocumentResult extends SimulateDocumentResult { - public static final int STREAM_ID = 1; +public class SimulateDocumentVerboseResult implements SimulateDocumentResult { - private List processorResults; + private static final SimulateDocumentVerboseResult PROTOTYPE = new SimulateDocumentVerboseResult(Collections.emptyList()); - public SimulateVerboseDocumentResult() { + private final List processorResults; - } - - public SimulateVerboseDocumentResult(List processorResults) { + public SimulateDocumentVerboseResult(List processorResults) { this.processorResults = processorResults; } - @Override - public int getStreamId() { - return STREAM_ID; + public List getProcessorResults() { + return processorResults; + } + + public static SimulateDocumentVerboseResult readSimulateDocumentVerboseResultFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); } @Override - public void readFrom(StreamInput in) throws IOException { - int streamId = in.readVInt(); - if (streamId != STREAM_ID) { - throw new IOException("stream_id [" + streamId + "] does not match " + getClass().getName() + " [stream_id=" + STREAM_ID + "]"); - } + public SimulateDocumentVerboseResult readFrom(StreamInput in) throws IOException { int size = in.readVInt(); - processorResults = new ArrayList<>(); + List processorResults = new ArrayList<>(); for (int i = 0; i < size; i++) { - SimulateProcessorResult processorResult = new SimulateProcessorResult(); - processorResult.readFrom(in); - processorResults.add(processorResult); + processorResults.add(SimulateProcessorResult.readSimulateProcessorResultFrom(in)); } + return new SimulateDocumentVerboseResult(processorResults); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(STREAM_ID); out.writeVInt(processorResults.size()); for (SimulateProcessorResult result : processorResults) { result.writeTo(out); @@ -82,19 +76,6 @@ public class SimulateVerboseDocumentResult extends SimulateDocumentResult { return builder; } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - SimulateVerboseDocumentResult that = (SimulateVerboseDocumentResult) o; - return Objects.equals(processorResults, that.processorResults); - } - - @Override - public int hashCode() { - return Objects.hash(processorResults); - } - static final class Fields { static final XContentBuilderString PROCESSOR_RESULTS = new XContentBuilderString("processor_results"); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java index a8d34b4a150..e71b20d66b1 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java @@ -40,18 +40,16 @@ public class SimulateExecutionService { this.threadPool = threadPool; } - SimulateDocumentResult executeItem(Pipeline pipeline, Data data) { try { pipeline.execute(data); - return new SimulateSimpleDocumentResult(data); + return new SimulateDocumentSimpleResult(data); } catch (Exception e) { - return new SimulateFailedDocumentResult(e); + return new SimulateDocumentSimpleResult(e); } - } - SimulateDocumentResult executeVerboseItem(Pipeline pipeline, Data data) { + SimulateDocumentVerboseResult executeVerboseItem(Pipeline pipeline, Data data) { List processorResultList = new ArrayList<>(); Data currentData = new Data(data); for (int i = 0; i < pipeline.getProcessors().size(); i++) { @@ -67,7 +65,7 @@ public class SimulateExecutionService { currentData = new Data(currentData); } - return new SimulateVerboseDocumentResult(processorResultList); + return new SimulateDocumentVerboseResult(processorResultList); } public void execute(ParsedSimulateRequest request, ActionListener listener) { @@ -82,7 +80,7 @@ public class SimulateExecutionService { responses.add(executeItem(request.getPipeline(), data)); } } - listener.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(), responses)); + listener.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(), request.isVerbose(), responses)); } }); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResult.java deleted file mode 100644 index e0bf7555b6b..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResult.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugin.ingest.transport.simulate; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class SimulateFailedDocumentResult extends SimulateDocumentResult { - public static final int STREAM_ID = 2; - - private Throwable failure; - - public SimulateFailedDocumentResult() { - - } - - public SimulateFailedDocumentResult(Throwable failure) { - this.failure = failure; - } - - @Override - public int getStreamId() { - return STREAM_ID; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - int streamId = in.readVInt(); - if (streamId != STREAM_ID) { - throw new IOException("stream_id [" + streamId + "] does not match " + getClass().getName() + " [stream_id=" + STREAM_ID + "]"); - } - this.failure = in.readThrowable(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(STREAM_ID); - out.writeThrowable(failure); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - ElasticsearchException.renderThrowable(builder, params, failure); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - SimulateFailedDocumentResult that = (SimulateFailedDocumentResult) o; - - return Objects.equals((failure == null) ? null : failure.getClass(), - (that.failure == null) ? null : that.failure.getClass()); - } - - @Override - public int hashCode() { - return Objects.hash(failure); - } -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java index 5922f9d906e..097595f3a32 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java @@ -30,18 +30,19 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Objects; public class SimulatePipelineResponse extends ActionResponse implements ToXContent { private String pipelineId; + private boolean verbose; private List results; public SimulatePipelineResponse() { } - public SimulatePipelineResponse(String pipelineId, List responses) { + public SimulatePipelineResponse(String pipelineId, boolean verbose, List responses) { this.pipelineId = pipelineId; + this.verbose = verbose; this.results = Collections.unmodifiableList(responses); } @@ -49,25 +50,21 @@ public class SimulatePipelineResponse extends ActionResponse implements ToXConte return pipelineId; } - public void setPipelineId(String pipelineId) { - this.pipelineId = pipelineId; - } - public List getResults() { return results; } - public void setResults(List results) { - this.results = results; + public boolean isVerbose() { + return verbose; } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(pipelineId); + out.writeBoolean(verbose); out.writeVInt(results.size()); for (SimulateDocumentResult response : results) { - out.writeVInt(response.getStreamId()); response.writeTo(out); } } @@ -76,27 +73,18 @@ public class SimulatePipelineResponse extends ActionResponse implements ToXConte public void readFrom(StreamInput in) throws IOException { super.readFrom(in); this.pipelineId = in.readString(); + boolean verbose = in.readBoolean(); int responsesLength = in.readVInt(); results = new ArrayList<>(); for (int i = 0; i < responsesLength; i++) { - SimulateDocumentResult result; - switch (in.readVInt()) { - case SimulateSimpleDocumentResult.STREAM_ID: - result = new SimulateSimpleDocumentResult(); - break; - case SimulateVerboseDocumentResult.STREAM_ID: - result = new SimulateVerboseDocumentResult(); - break; - case SimulateFailedDocumentResult.STREAM_ID: - result = new SimulateFailedDocumentResult(); - break; - default: - throw new IOException("Cannot read result from stream"); + SimulateDocumentResult simulateDocumentResult; + if (verbose) { + simulateDocumentResult = SimulateDocumentVerboseResult.readSimulateDocumentVerboseResultFrom(in); + } else { + simulateDocumentResult = SimulateDocumentSimpleResult.readSimulateDocumentSimpleResult(in); } - result.readFrom(in); - results.add(result); + results.add(simulateDocumentResult); } - } @Override @@ -106,24 +94,9 @@ public class SimulatePipelineResponse extends ActionResponse implements ToXConte response.toXContent(builder, params); } builder.endArray(); - return builder; } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - SimulatePipelineResponse that = (SimulatePipelineResponse) o; - return Objects.equals(pipelineId, that.pipelineId) && - Objects.equals(results, that.results); - } - - @Override - public int hashCode() { - return Objects.hash(pipelineId, results); - } - static final class Fields { static final XContentBuilderString DOCUMENTS = new XContentBuilderString("docs"); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java index 7b196f69276..138f1ae553c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java @@ -21,7 +21,7 @@ package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; @@ -29,33 +29,34 @@ import org.elasticsearch.ingest.Data; import org.elasticsearch.plugin.ingest.transport.TransportData; import java.io.IOException; -import java.util.Objects; -public class SimulateProcessorResult implements Streamable, ToXContent { +public class SimulateProcessorResult implements Writeable, ToXContent { + + private static final SimulateProcessorResult PROTOTYPE = new SimulateProcessorResult(null, (Data)null); private String processorId; private TransportData data; - private Throwable failure; - - public SimulateProcessorResult() { - - } + private Exception failure; public SimulateProcessorResult(String processorId, Data data) { this.processorId = processorId; this.data = new TransportData(data); } - public SimulateProcessorResult(String processorId, Throwable failure) { + private SimulateProcessorResult(String processorId, TransportData data) { + this.processorId = processorId; + this.data = data; + } + + public SimulateProcessorResult(String processorId, Exception failure) { this.processorId = processorId; this.failure = failure; } - private boolean isFailed() { - return this.failure != null; - } - public Data getData() { + if (data == null) { + return null; + } return data.get(); } @@ -63,26 +64,33 @@ public class SimulateProcessorResult implements Streamable, ToXContent { return processorId; } + public Exception getFailure() { + return failure; + } + + public static SimulateProcessorResult readSimulateProcessorResultFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + @Override - public void readFrom(StreamInput in) throws IOException { - boolean isFailure = in.readBoolean(); - this.processorId = in.readString(); - if (isFailure) { - this.failure = in.readThrowable(); - } else { - this.data = new TransportData(); - this.data.readFrom(in); + public SimulateProcessorResult readFrom(StreamInput in) throws IOException { + String processorId = in.readString(); + if (in.readBoolean()) { + Exception exception = in.readThrowable(); + return new SimulateProcessorResult(processorId, exception); } + return new SimulateProcessorResult(processorId, TransportData.readTransportDataFrom(in)); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeBoolean(isFailed()); out.writeString(processorId); - if (isFailed()) { - out.writeThrowable(failure); - } else { + if (failure == null) { + out.writeBoolean(false); data.writeTo(out); + } else { + out.writeBoolean(true); + out.writeThrowable(failure); } } @@ -90,34 +98,15 @@ public class SimulateProcessorResult implements Streamable, ToXContent { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(Fields.PROCESSOR_ID, processorId); - if (isFailed()) { - ElasticsearchException.renderThrowable(builder, params, failure); - } else { + if (failure == null) { data.toXContent(builder, params); + } else { + ElasticsearchException.renderThrowable(builder, params, failure); } builder.endObject(); return builder; } - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - SimulateProcessorResult other = (SimulateProcessorResult) obj; - - return Objects.equals(processorId, other.processorId) && Objects.equals(data, other.data) && - Objects.equals((failure == null) ? null : failure.getClass(), (other.failure == null) ? null : other.failure.getClass()); - } - - @Override - public int hashCode() { - return Objects.hash(processorId, data, failure); - } - static final class Fields { static final XContentBuilderString PROCESSOR_ID = new XContentBuilderString("processor_id"); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResult.java deleted file mode 100644 index 9d432bc2855..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResult.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugin.ingest.transport.simulate; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.ingest.Data; -import org.elasticsearch.plugin.ingest.transport.TransportData; - -import java.io.IOException; -import java.util.Objects; - -public class SimulateSimpleDocumentResult extends SimulateDocumentResult { - public static final int STREAM_ID = 0; - - private TransportData transportData; - - public SimulateSimpleDocumentResult() { - - } - - public SimulateSimpleDocumentResult(Data data) { - this.transportData = new TransportData(data); - } - - @Override - public int getStreamId() { - return STREAM_ID; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - int streamId = in.readVInt(); - if (streamId != STREAM_ID) { - throw new IOException("stream_id [" + streamId + "] does not match " + getClass().getName() + " [stream_id=" + STREAM_ID + "]"); - } - this.transportData = new TransportData(); - this.transportData.readFrom(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(STREAM_ID); - transportData.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - transportData.toXContent(builder, params); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - SimulateSimpleDocumentResult that = (SimulateSimpleDocumentResult) o; - return Objects.equals(transportData, that.transportData); - } - - @Override - public int hashCode() { - return Objects.hash(transportData); - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 2001b673d06..9b61ffa4fe3 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -30,16 +30,20 @@ import org.elasticsearch.plugin.ingest.transport.get.GetPipelineRequestBuilder; import org.elasticsearch.plugin.ingest.transport.get.GetPipelineResponse; import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequestBuilder; -import org.elasticsearch.plugin.ingest.transport.simulate.*; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulateDocumentSimpleResult; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineAction; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequestBuilder; +import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineResponse; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import java.util.*; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.*; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNull.notNullValue; @@ -53,7 +57,6 @@ public class IngestClientIT extends ESIntegTestCase { @Override protected Collection> transportClientPlugins() { return nodePlugins(); - } public void testSimulate() throws Exception { @@ -98,14 +101,14 @@ public class IngestClientIT extends ESIntegTestCase { .endObject().bytes()) .get(); - Map expectedDoc = new HashMap<>(); - expectedDoc.put("foo", "bar"); - Data expectedData = new Data("index", "type", "id", expectedDoc); - SimulateDocumentResult expectedResponse = new SimulateSimpleDocumentResult(expectedData); - List expectedResponses = Arrays.asList(expectedResponse); - SimulatePipelineResponse expected = new SimulatePipelineResponse("_id", expectedResponses); - - assertThat(response, equalTo(expected)); + assertThat(response.isVerbose(), equalTo(false)); + assertThat(response.getPipelineId(), equalTo("_id")); + assertThat(response.getResults().size(), equalTo(1)); + assertThat(response.getResults().get(0), instanceOf(SimulateDocumentSimpleResult.class)); + SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) response.getResults().get(0); + Data expectedData = new Data("index", "type", "id", Collections.singletonMap("foo", "bar")); + assertThat(simulateDocumentSimpleResult.getData(), equalTo(expectedData)); + assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); } public void test() throws Exception { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java index 08940ee3ce2..89ef7731327 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java @@ -19,25 +19,89 @@ package org.elasticsearch.plugin.ingest.transport; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.ingest.Data; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; import java.util.Collections; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; public class TransportDataTests extends ESTestCase { - public void testEquals() throws Exception { - Data data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); - Data otherData = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); - assertThat(data, equalTo(otherData)); + public void testEqualsAndHashcode() throws Exception { + String index = randomAsciiOfLengthBetween(1, 10); + String type = randomAsciiOfLengthBetween(1, 10); + String id = randomAsciiOfLengthBetween(1, 10); + String fieldName = randomAsciiOfLengthBetween(1, 10); + String fieldValue = randomAsciiOfLengthBetween(1, 10); + Data data = new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue)); + TransportData transportData = new TransportData(data); + + boolean changed = false; + String otherIndex; + if (randomBoolean()) { + otherIndex = randomAsciiOfLengthBetween(1, 10); + changed = true; + } else { + otherIndex = index; + } + String otherType; + if (randomBoolean()) { + otherType = randomAsciiOfLengthBetween(1, 10); + changed = true; + } else { + otherType = type; + } + String otherId; + if (randomBoolean()) { + otherId = randomAsciiOfLengthBetween(1, 10); + changed = true; + } else { + otherId = id; + } + Map document; + if (randomBoolean()) { + document = Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); + changed = true; + } else { + document = Collections.singletonMap(fieldName, fieldValue); + } + + Data otherData = new Data(otherIndex, otherType, otherId, document); + TransportData otherTransportData = new TransportData(otherData); + if (changed) { + assertThat(data, not(equalTo(otherData))); + assertThat(otherData, not(equalTo(data))); + assertThat(transportData, not(equalTo(otherTransportData))); + assertThat(otherTransportData, not(equalTo(transportData))); + } else { + assertThat(data, equalTo(otherData)); + assertThat(otherData, equalTo(data)); + assertThat(transportData, equalTo(otherTransportData)); + assertThat(otherTransportData, equalTo(transportData)); + Data thirdData = new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue)); + TransportData thirdTransportData = new TransportData(thirdData); + assertThat(thirdData, equalTo(data)); + assertThat(data, equalTo(thirdData)); + assertThat(thirdTransportData, equalTo(transportData)); + assertThat(transportData, equalTo(thirdTransportData)); + } } - public void testNotEquals() throws Exception { - Data data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); - Data otherData = new Data("_index2", "_type", "_id", Collections.emptyMap()); - assertThat(data, not(equalTo(otherData))); + public void testSerialization() throws IOException { + Data data = new Data(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), + Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); + TransportData transportData = new TransportData(data); + + BytesStreamOutput out = new BytesStreamOutput(); + transportData.writeTo(out); + StreamInput streamInput = StreamInput.wrap(out.bytes()); + TransportData otherTransportData = TransportData.readTransportDataFrom(streamInput); + assertThat(otherTransportData, equalTo(transportData)); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java new file mode 100644 index 00000000000..e0b1c1c0f88 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.ingest.Data; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; + +public class SimulateDocumentSimpleResultTests extends ESTestCase { + + public void testSerialization() throws IOException { + boolean isFailure = randomBoolean(); + SimulateDocumentSimpleResult simulateDocumentSimpleResult; + if (isFailure) { + simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(new IllegalArgumentException("test")); + } else { + Data data = new Data(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), + Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); + simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(data); + } + + BytesStreamOutput out = new BytesStreamOutput(); + simulateDocumentSimpleResult.writeTo(out); + StreamInput streamInput = StreamInput.wrap(out.bytes()); + SimulateDocumentSimpleResult otherSimulateDocumentSimpleResult = SimulateDocumentSimpleResult.readSimulateDocumentSimpleResult(streamInput); + + assertThat(otherSimulateDocumentSimpleResult.getData(), equalTo(simulateDocumentSimpleResult.getData())); + if (isFailure) { + assertThat(otherSimulateDocumentSimpleResult.getFailure(), instanceOf(IllegalArgumentException.class)); + IllegalArgumentException e = (IllegalArgumentException) otherSimulateDocumentSimpleResult.getFailure(); + assertThat(e.getMessage(), equalTo("test")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java index e7754f94380..bd2e6397a74 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java @@ -19,12 +19,10 @@ package org.elasticsearch.plugin.ingest.transport.simulate; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.plugin.ingest.transport.TransportData; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; @@ -33,7 +31,7 @@ import org.junit.Before; import java.util.Arrays; import java.util.Collections; -import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.*; import static org.mockito.Mockito.*; public class SimulateExecutionServiceTests extends ESTestCase { @@ -43,8 +41,6 @@ public class SimulateExecutionServiceTests extends ESTestCase { private Pipeline pipeline; private Processor processor; private Data data; - private TransportData transportData; - private ActionListener listener; @Before public void setup() { @@ -58,8 +54,6 @@ public class SimulateExecutionServiceTests extends ESTestCase { when(processor.getType()).thenReturn("mock"); pipeline = new Pipeline("_id", "_description", Arrays.asList(processor, processor)); data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); - transportData = new TransportData(data); - listener = mock(ActionListener.class); } @After @@ -68,66 +62,61 @@ public class SimulateExecutionServiceTests extends ESTestCase { } public void testExecuteVerboseItem() throws Exception { - SimulateDocumentResult expectedItemResponse = new SimulateVerboseDocumentResult( - Arrays.asList(new SimulateProcessorResult("processor[mock]-0", data), new SimulateProcessorResult("processor[mock]-1", data))); SimulateDocumentResult actualItemResponse = executionService.executeVerboseItem(pipeline, data); verify(processor, times(2)).execute(data); - assertThat(actualItemResponse, equalTo(expectedItemResponse)); + assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); + SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; + assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(2)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getProcessorId(), equalTo("processor[mock]-0")); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getData(), not(sameInstance(data))); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getData(), equalTo(data)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(), nullValue()); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorId(), equalTo("processor[mock]-1")); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), not(sameInstance(data))); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), equalTo(data)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getFailure(), nullValue()); } public void testExecuteItem() throws Exception { - SimulateDocumentResult expectedItemResponse = new SimulateSimpleDocumentResult(data); SimulateDocumentResult actualItemResponse = executionService.executeItem(pipeline, data); verify(processor, times(2)).execute(data); - assertThat(actualItemResponse, equalTo(expectedItemResponse)); + assertThat(actualItemResponse, instanceOf(SimulateDocumentSimpleResult.class)); + SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) actualItemResponse; + assertThat(simulateDocumentSimpleResult.getData(), equalTo(data)); + assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); } public void testExecuteVerboseItemWithFailure() throws Exception { Exception e = new RuntimeException("processor failed"); - SimulateDocumentResult expectedItemResponse = new SimulateVerboseDocumentResult( - Arrays.asList(new SimulateProcessorResult("processor[mock]-0", e), new SimulateProcessorResult("processor[mock]-1", data)) - ); doThrow(e).doNothing().when(processor).execute(data); SimulateDocumentResult actualItemResponse = executionService.executeVerboseItem(pipeline, data); verify(processor, times(2)).execute(data); - assertThat(actualItemResponse, equalTo(expectedItemResponse)); + assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); + SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; + assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(2)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getProcessorId(), equalTo("processor[mock]-0")); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getData(), nullValue()); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(), instanceOf(RuntimeException.class)); + RuntimeException runtimeException = (RuntimeException) simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(); + assertThat(runtimeException.getMessage(), equalTo("processor failed")); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorId(), equalTo("processor[mock]-1")); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), not(sameInstance(data))); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), equalTo(data)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getFailure(), nullValue()); + runtimeException = (RuntimeException) simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(); + assertThat(runtimeException.getMessage(), equalTo("processor failed")); } public void testExecuteItemWithFailure() throws Exception { Exception e = new RuntimeException("processor failed"); - SimulateDocumentResult expectedItemResponse = new SimulateFailedDocumentResult(e); doThrow(e).when(processor).execute(data); SimulateDocumentResult actualItemResponse = executionService.executeItem(pipeline, data); verify(processor, times(1)).execute(data); - assertThat(actualItemResponse, equalTo(expectedItemResponse)); - } - - public void testExecute() throws Exception { - SimulateDocumentResult itemResponse = new SimulateSimpleDocumentResult(data); - ParsedSimulateRequest request = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), false); - executionService.execute(request, listener); - SimulatePipelineResponse response = new SimulatePipelineResponse("_id", Collections.singletonList(itemResponse)); - assertBusy(new Runnable() { - @Override - public void run() { - verify(processor, times(2)).execute(data); - verify(listener).onResponse(response); - } - }); - } - - public void testExecuteWithVerbose() throws Exception { - ParsedSimulateRequest request = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), true); - SimulateDocumentResult itemResponse = new SimulateVerboseDocumentResult( - Arrays.asList(new SimulateProcessorResult("processor[mock]-0", data), new SimulateProcessorResult("processor[mock]-1", data))); - executionService.execute(request, listener); - SimulatePipelineResponse response = new SimulatePipelineResponse("_id", Collections.singletonList(itemResponse)); - assertBusy(new Runnable() { - @Override - public void run() { - verify(processor, times(2)).execute(data); - verify(listener).onResponse(response); - } - }); + assertThat(actualItemResponse, instanceOf(SimulateDocumentSimpleResult.class)); + SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) actualItemResponse; + assertThat(simulateDocumentSimpleResult.getData(), nullValue()); + assertThat(simulateDocumentSimpleResult.getFailure(), instanceOf(RuntimeException.class)); + RuntimeException runtimeException = (RuntimeException) simulateDocumentSimpleResult.getFailure(); + assertThat(runtimeException.getMessage(), equalTo("processor failed")); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResultTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResultTests.java deleted file mode 100644 index c766a8979fe..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateFailedDocumentResultTests.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugin.ingest.transport.simulate; - -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; - -import java.io.IOException; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; - -public class SimulateFailedDocumentResultTests extends ESTestCase { - - @Before - public void setup() { - } - - public void testEqualsExact() { - Throwable throwable = new Exception("foo"); - SimulateDocumentResult result = new SimulateFailedDocumentResult(throwable); - SimulateDocumentResult otherResult = new SimulateFailedDocumentResult(throwable); - assertThat(result, equalTo(otherResult)); - } - - public void testEqualsSameExceptionClass() { - SimulateDocumentResult result = new SimulateFailedDocumentResult(new IllegalArgumentException("foo")); - SimulateDocumentResult otherResult = new SimulateFailedDocumentResult(new IllegalArgumentException("bar")); - assertThat(result, equalTo(otherResult)); - } - - public void testNotEqualsDiffExceptionClass() { - SimulateDocumentResult result = new SimulateFailedDocumentResult(new IllegalArgumentException("foo")); - SimulateDocumentResult otherResult = new SimulateFailedDocumentResult(new NullPointerException("foo")); - assertThat(result, not(equalTo(otherResult))); - } - - public void testStreamable() throws IOException { - SimulateDocumentResult result = new SimulateFailedDocumentResult(new IllegalArgumentException("foo")); - - BytesStreamOutput out = new BytesStreamOutput(); - result.writeTo(out); - - StreamInput streamInput = StreamInput.wrap(out.bytes()); - SimulateDocumentResult otherResult = new SimulateFailedDocumentResult(); - otherResult.readFrom(streamInput); - - assertThat(result, equalTo(otherResult)); - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java index 2352f3db0ba..0a325ca3268 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java @@ -23,58 +23,96 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.ingest.Data; import org.elasticsearch.test.ESTestCase; -import org.junit.Before; import java.io.IOException; -import java.util.Arrays; +import java.util.ArrayList; import java.util.Collections; +import java.util.Iterator; import java.util.List; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.nullValue; public class SimulatePipelineResponseTests extends ESTestCase { - private Data data; - private SimulateDocumentResult documentResult; - private SimulatePipelineResponse response; - @Before - public void setup() { - data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); - documentResult = new SimulateSimpleDocumentResult(data); - response = new SimulatePipelineResponse("_id", Collections.singletonList(documentResult)); - } + public void testSerialization() throws IOException { + boolean isVerbose = randomBoolean(); + int numResults = randomIntBetween(1, 10); + List results = new ArrayList<>(numResults); + for (int i = 0; i < numResults; i++) { + boolean isFailure = randomBoolean(); + Data data = new Data(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), + Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); + if (isVerbose) { + int numProcessors = randomIntBetween(1, 10); + List processorResults = new ArrayList<>(numProcessors); + for (int j = 0; j < numProcessors; j++) { + String processorId = randomAsciiOfLengthBetween(1, 10); + SimulateProcessorResult processorResult; + if (isFailure) { + processorResult = new SimulateProcessorResult(processorId, new IllegalArgumentException("test")); + } else { + processorResult = new SimulateProcessorResult(processorId, data); + } + processorResults.add(processorResult); + } + results.add(new SimulateDocumentVerboseResult(processorResults)); + } else { + results.add(new SimulateDocumentSimpleResult(data)); + SimulateDocumentSimpleResult simulateDocumentSimpleResult; + if (isFailure) { + simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(new IllegalArgumentException("test")); + } else { + simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(data); + } + results.add(simulateDocumentSimpleResult); + } + } - public void testEquals() { - SimulatePipelineResponse otherResponse = new SimulatePipelineResponse("_id", Collections.singletonList(documentResult)); - assertThat(response, equalTo(otherResponse)); - } - - public void testNotEqualsId() { - SimulatePipelineResponse otherResponse = new SimulatePipelineResponse(response.getPipelineId() + "foo", response.getResults()); - assertThat(response, not(equalTo(otherResponse))); - } - - public void testNotEqualsResults() { - SimulatePipelineResponse otherResponse = new SimulatePipelineResponse(response.getPipelineId(), Arrays.asList(documentResult, documentResult)); - assertThat(response, not(equalTo(otherResponse))); - } - - public void testStreamable() throws IOException { - List results = Arrays.asList( - new SimulateSimpleDocumentResult(data), - new SimulateFailedDocumentResult(new IllegalArgumentException("foo")), - new SimulateVerboseDocumentResult(Collections.singletonList(new SimulateProcessorResult("pid", data))) - ); - - response = new SimulatePipelineResponse("_id", results); + SimulatePipelineResponse response = new SimulatePipelineResponse(randomAsciiOfLengthBetween(1, 10), isVerbose, results); BytesStreamOutput out = new BytesStreamOutput(); response.writeTo(out); - StreamInput streamInput = StreamInput.wrap(out.bytes()); SimulatePipelineResponse otherResponse = new SimulatePipelineResponse(); otherResponse.readFrom(streamInput); - assertThat(response, equalTo(otherResponse)); + assertThat(otherResponse.getPipelineId(), equalTo(response.getPipelineId())); + assertThat(otherResponse.getResults().size(), equalTo(response.getResults().size())); + + Iterator expectedResultIterator = response.getResults().iterator(); + for (SimulateDocumentResult result : otherResponse.getResults()) { + if (isVerbose) { + SimulateDocumentVerboseResult expectedSimulateDocumentVerboseResult = (SimulateDocumentVerboseResult) expectedResultIterator.next(); + assertThat(result, instanceOf(SimulateDocumentVerboseResult.class)); + SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) result; + assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(expectedSimulateDocumentVerboseResult.getProcessorResults().size())); + Iterator expectedProcessorResultIterator = expectedSimulateDocumentVerboseResult.getProcessorResults().iterator(); + for (SimulateProcessorResult simulateProcessorResult : simulateDocumentVerboseResult.getProcessorResults()) { + SimulateProcessorResult expectedProcessorResult = expectedProcessorResultIterator.next(); + assertThat(simulateProcessorResult.getProcessorId(), equalTo(expectedProcessorResult.getProcessorId())); + assertThat(simulateProcessorResult.getData(), equalTo(expectedProcessorResult.getData())); + if (expectedProcessorResult.getFailure() == null) { + assertThat(simulateProcessorResult.getFailure(), nullValue()); + } else { + assertThat(simulateProcessorResult.getFailure(), instanceOf(IllegalArgumentException.class)); + IllegalArgumentException e = (IllegalArgumentException) simulateProcessorResult.getFailure(); + assertThat(e.getMessage(), equalTo("test")); + } + } + } else { + SimulateDocumentSimpleResult expectedSimulateDocumentSimpleResult = (SimulateDocumentSimpleResult) expectedResultIterator.next(); + assertThat(result, instanceOf(SimulateDocumentSimpleResult.class)); + SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) result; + assertThat(simulateDocumentSimpleResult.getData(), equalTo(expectedSimulateDocumentSimpleResult.getData())); + if (expectedSimulateDocumentSimpleResult.getFailure() == null) { + assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); + } else { + assertThat(simulateDocumentSimpleResult.getFailure(), instanceOf(IllegalArgumentException.class)); + IllegalArgumentException e = (IllegalArgumentException) simulateDocumentSimpleResult.getFailure(); + assertThat(e.getMessage(), equalTo("test")); + } + } + } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java index 1a20da74416..55024219347 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java @@ -23,65 +23,37 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.ingest.Data; import org.elasticsearch.test.ESTestCase; -import org.junit.Before; import java.io.IOException; import java.util.Collections; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.instanceOf; public class SimulateProcessorResultTests extends ESTestCase { - private Data data; - private SimulateProcessorResult result; - private SimulateProcessorResult failedResult; - private String processorId; - private Throwable throwable; - @Before - public void setup() { - data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); - processorId = "id"; - throwable = new IllegalArgumentException("foo"); - result = new SimulateProcessorResult(processorId, data); - failedResult = new SimulateProcessorResult(processorId, throwable); - } + public void testSerialization() throws IOException { + String processorId = randomAsciiOfLengthBetween(1, 10); + boolean isFailure = randomBoolean(); + SimulateProcessorResult simulateProcessorResult; + if (isFailure) { + simulateProcessorResult = new SimulateProcessorResult(processorId, new IllegalArgumentException("test")); + } else { + Data data = new Data(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), + Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); + simulateProcessorResult = new SimulateProcessorResult(processorId, data); + } - public void testEqualsData() { - SimulateProcessorResult otherResult = new SimulateProcessorResult(new String(processorId), new Data(data)); - assertThat(result, equalTo(otherResult)); - } - - public void testEqualsSameClassThrowable() { - SimulateProcessorResult otherFailedResult = new SimulateProcessorResult(new String(processorId), new IllegalArgumentException("foo")); - assertThat(failedResult, equalTo(otherFailedResult)); - } - - public void testNotEqualsThrowable() { - SimulateProcessorResult otherFailedResult = new SimulateProcessorResult(new String(processorId), new NullPointerException("foo")); - assertThat(failedResult, not(equalTo(otherFailedResult))); - } - - public void testStreamableWithThrowable() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); - failedResult.writeTo(out); - + simulateProcessorResult.writeTo(out); StreamInput streamInput = StreamInput.wrap(out.bytes()); - SimulateProcessorResult otherFailedResult = new SimulateProcessorResult(); - otherFailedResult.readFrom(streamInput); - - assertThat(failedResult, equalTo(otherFailedResult)); - } - - public void testStreamableWithData() throws IOException { - BytesStreamOutput out = new BytesStreamOutput(); - result.writeTo(out); - - StreamInput streamInput = StreamInput.wrap(out.bytes()); - SimulateProcessorResult otherResult = new SimulateProcessorResult(); - otherResult.readFrom(streamInput); - - assertThat(result, equalTo(otherResult)); - + SimulateProcessorResult otherSimulateProcessorResult = SimulateProcessorResult.readSimulateProcessorResultFrom(streamInput); + assertThat(otherSimulateProcessorResult.getProcessorId(), equalTo(simulateProcessorResult.getProcessorId())); + assertThat(otherSimulateProcessorResult.getData(), equalTo(simulateProcessorResult.getData())); + if (isFailure) { + assertThat(otherSimulateProcessorResult.getFailure(), instanceOf(IllegalArgumentException.class)); + IllegalArgumentException e = (IllegalArgumentException) otherSimulateProcessorResult.getFailure(); + assertThat(e.getMessage(), equalTo("test")); + } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResultTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResultTests.java deleted file mode 100644 index 7c7a497bbcc..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateSimpleDocumentResultTests.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugin.ingest.transport.simulate; - -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.Data; -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; - -import java.io.IOException; -import java.util.Collections; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; - -public class SimulateSimpleDocumentResultTests extends ESTestCase { - private Data data; - - @Before - public void setup() { - data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); - } - - public void testEquals() { - SimulateDocumentResult result = new SimulateSimpleDocumentResult(data); - SimulateDocumentResult otherResult = new SimulateSimpleDocumentResult(data); - assertThat(result, equalTo(otherResult)); - } - - public void testNotEqualsDiffData() { - Data otherData = new Data(data.getIndex() + "foo", data.getType(), data.getId(), data.getDocument()); - SimulateDocumentResult result = new SimulateSimpleDocumentResult(data); - SimulateDocumentResult otherResult = new SimulateSimpleDocumentResult(otherData); - assertThat(result, not(equalTo(otherResult))); - } - - public void testStreamable() throws IOException { - SimulateDocumentResult result = new SimulateSimpleDocumentResult(data); - - BytesStreamOutput out = new BytesStreamOutput(); - result.writeTo(out); - - StreamInput streamInput = StreamInput.wrap(out.bytes()); - SimulateDocumentResult otherResult = new SimulateSimpleDocumentResult(); - otherResult.readFrom(streamInput); - - assertThat(result, equalTo(otherResult)); - } -} From 97f4f27b14e6afe3833fecfce79ca39877f2227a Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 13 Nov 2015 19:35:09 +0100 Subject: [PATCH 056/347] remove equals/hashcode as part of Pipeline and adapt tests Only MutateProcessor implemented equals / hashcode hence we would only use that one in our tests, since they relied on them. Better to not rely on equals/hashcode, drop them and mock processor/pipeline in our tests that need them. That also allow to make MutateProcessor constructor package private as the other processors. --- .../org/elasticsearch/ingest/Pipeline.java | 15 --- .../processor/mutate/MutateProcessor.java | 2 +- .../simulate/ParsedSimulateRequest.java | 15 --- .../org/elasticsearch/ingest/DataTests.java | 71 +++++++--- .../elasticsearch/ingest/PipelineTests.java | 67 --------- .../ingest/transport/TransportDataTests.java | 15 +-- .../ParsedSimulateRequestParserTests.java | 127 ++++++++++++------ 7 files changed, 141 insertions(+), 171 deletions(-) delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index b98a469d3c3..7b44f7d5a7f 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -71,21 +71,6 @@ public final class Pipeline { return processors; } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Pipeline pipeline = (Pipeline) o; - return Objects.equals(id, pipeline.id) && - Objects.equals(description, pipeline.description) && - Objects.equals(processors, pipeline.processors); - } - - @Override - public int hashCode() { - return Objects.hash(id, description, processors); - } - public final static class Factory { public Pipeline create(String id, Map config, Map processorRegistry) throws IOException { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java index 4a950bea083..2daadd5fee5 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java @@ -45,7 +45,7 @@ public final class MutateProcessor implements Processor { private final List uppercase; private final List lowercase; - public MutateProcessor(Map update, Map rename, Map convert, + MutateProcessor(Map update, Map rename, Map convert, Map split, List gsub, Map join, List remove, List trim, List uppercase, List lowercase) { this.update = update; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java index 47af2db583d..50897435217 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java @@ -51,21 +51,6 @@ public class ParsedSimulateRequest { return verbose; } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ParsedSimulateRequest that = (ParsedSimulateRequest) o; - return Objects.equals(verbose, that.verbose) && - Objects.equals(documents, that.documents) && - Objects.equals(pipeline, that.pipeline); - } - - @Override - public int hashCode() { - return Objects.hash(documents, pipeline, verbose); - } - public static class Parser { private static final Pipeline.Factory PIPELINE_FACTORY = new Pipeline.Factory(); public static final String SIMULATED_PIPELINE_ID = "_simulate_pipeline"; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java index 918064a17c7..d17a354b139 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java @@ -26,7 +26,8 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; public class DataTests extends ESTestCase { @@ -86,28 +87,54 @@ public class DataTests extends ESTestCase { assertThat(data.getProperty("fizz.new"), equalTo("bar")); } - public void testEquals() { - Data otherData = new Data(data); - assertThat(otherData, equalTo(data)); - } + public void testEqualsAndHashcode() throws Exception { + String index = randomAsciiOfLengthBetween(1, 10); + String type = randomAsciiOfLengthBetween(1, 10); + String id = randomAsciiOfLengthBetween(1, 10); + String fieldName = randomAsciiOfLengthBetween(1, 10); + String fieldValue = randomAsciiOfLengthBetween(1, 10); + Data data = new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue)); - public void testNotEqualsDiffIndex() { - Data otherData = new Data(data.getIndex() + "foo", data.getType(), data.getId(), data.getDocument()); - assertThat(otherData, not(equalTo(data))); - } + boolean changed = false; + String otherIndex; + if (randomBoolean()) { + otherIndex = randomAsciiOfLengthBetween(1, 10); + changed = true; + } else { + otherIndex = index; + } + String otherType; + if (randomBoolean()) { + otherType = randomAsciiOfLengthBetween(1, 10); + changed = true; + } else { + otherType = type; + } + String otherId; + if (randomBoolean()) { + otherId = randomAsciiOfLengthBetween(1, 10); + changed = true; + } else { + otherId = id; + } + Map document; + if (randomBoolean()) { + document = Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); + changed = true; + } else { + document = Collections.singletonMap(fieldName, fieldValue); + } - public void testNotEqualsDiffType() { - Data otherData = new Data(data.getIndex(), data.getType() + "foo", data.getId(), data.getDocument()); - assertThat(otherData, not(equalTo(data))); - } - - public void testNotEqualsDiffId() { - Data otherData = new Data(data.getIndex(), data.getType(), data.getId() + "foo", data.getDocument()); - assertThat(otherData, not(equalTo(data))); - } - - public void testNotEqualsDiffDocument() { - Data otherData = new Data(data.getIndex(), data.getType(), data.getId(), Collections.emptyMap()); - assertThat(otherData, not(equalTo(data))); + Data otherData = new Data(otherIndex, otherType, otherId, document); + if (changed) { + assertThat(data, not(equalTo(otherData))); + assertThat(otherData, not(equalTo(data))); + } else { + assertThat(data, equalTo(otherData)); + assertThat(otherData, equalTo(data)); + Data thirdData = new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue)); + assertThat(thirdData, equalTo(data)); + assertThat(data, equalTo(thirdData)); + } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java deleted file mode 100644 index 84d0953495b..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest; - -import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.ingest.processor.mutate.MutateProcessor; -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; - -import java.util.*; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.not; -import static org.mockito.Mockito.mock; - -public class PipelineTests extends ESTestCase { - private Processor updateProcessor; - private Processor lowercaseProcessor; - private Pipeline pipeline; - - @Before - public void setup() { - Map update = Collections.singletonMap("foo", 123); - List lowercase = Collections.singletonList("foo"); - updateProcessor = new MutateProcessor(update, null, null, null, null, null, null, null, null, null); - lowercaseProcessor = new MutateProcessor(null, null, null, null, null, null, null, null, null, lowercase); - pipeline = new Pipeline("id", "description", Arrays.asList(updateProcessor, lowercaseProcessor)); - } - - public void testEquals() throws Exception { - Pipeline other = new Pipeline(pipeline.getId(), pipeline.getDescription(), pipeline.getProcessors()); - assertThat(pipeline, equalTo(other)); - } - - public void testNotEqualsDiffId() throws Exception { - Pipeline other = new Pipeline(pipeline.getId() + "foo", pipeline.getDescription(), pipeline.getProcessors()); - assertThat(pipeline, not(equalTo(other))); - } - - public void testNotEqualsDiffDescription() throws Exception { - Pipeline other = new Pipeline(pipeline.getId(), pipeline.getDescription() + "foo", pipeline.getProcessors()); - assertThat(pipeline, not(equalTo(other))); - } - - public void testNotEqualsDiffProcessors() throws Exception { - Pipeline other = new Pipeline(pipeline.getId(), pipeline.getDescription() + "foo", Collections.singletonList(updateProcessor)); - assertThat(pipeline, not(equalTo(other))); - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java index 89ef7731327..1cc3f6baada 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java @@ -39,8 +39,7 @@ public class TransportDataTests extends ESTestCase { String id = randomAsciiOfLengthBetween(1, 10); String fieldName = randomAsciiOfLengthBetween(1, 10); String fieldValue = randomAsciiOfLengthBetween(1, 10); - Data data = new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue)); - TransportData transportData = new TransportData(data); + TransportData transportData = new TransportData(new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue))); boolean changed = false; String otherIndex; @@ -72,22 +71,14 @@ public class TransportDataTests extends ESTestCase { document = Collections.singletonMap(fieldName, fieldValue); } - Data otherData = new Data(otherIndex, otherType, otherId, document); - TransportData otherTransportData = new TransportData(otherData); + TransportData otherTransportData = new TransportData(new Data(otherIndex, otherType, otherId, document)); if (changed) { - assertThat(data, not(equalTo(otherData))); - assertThat(otherData, not(equalTo(data))); assertThat(transportData, not(equalTo(otherTransportData))); assertThat(otherTransportData, not(equalTo(transportData))); } else { - assertThat(data, equalTo(otherData)); - assertThat(otherData, equalTo(data)); assertThat(transportData, equalTo(otherTransportData)); assertThat(otherTransportData, equalTo(transportData)); - Data thirdData = new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue)); - TransportData thirdTransportData = new TransportData(thirdData); - assertThat(thirdData, equalTo(data)); - assertThat(data, equalTo(thirdData)); + TransportData thirdTransportData = new TransportData(new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue))); assertThat(thirdTransportData, equalTo(transportData)); assertThat(transportData, equalTo(thirdTransportData)); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java index 09c145a56b6..7f44fc08b9b 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.ingest.Data; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.ingest.processor.mutate.MutateProcessor; import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -30,70 +29,120 @@ import org.junit.Before; import java.io.IOException; import java.util.*; +import static org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequest.Fields; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import static org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequest.Fields; - public class ParsedSimulateRequestParserTests extends ESTestCase { + private PipelineStore store; - private ParsedSimulateRequest.Parser parser; - private Pipeline pipeline; - private Data data; @Before public void init() throws IOException { - parser = new ParsedSimulateRequest.Parser(); - List uppercase = Collections.singletonList("foo"); - Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null); - pipeline = new Pipeline(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID, null, Arrays.asList(processor)); - data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); + Pipeline pipeline = new Pipeline(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID, null, Collections.singletonList(mock(Processor.class))); Map processorRegistry = new HashMap<>(); - processorRegistry.put("mutate", new MutateProcessor.Factory()); + processorRegistry.put("mock_processor", mock(Processor.Factory.class)); store = mock(PipelineStore.class); - when(store.get("_id")).thenReturn(pipeline); + when(store.get(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID)).thenReturn(pipeline); when(store.getProcessorFactoryRegistry()).thenReturn(processorRegistry); } public void testParseUsingPipelineStore() throws Exception { - ParsedSimulateRequest expectedRequest = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), false); + int numDocs = randomIntBetween(1, 10); - Map raw = new HashMap<>(); + Map requestContent = new HashMap<>(); List> docs = new ArrayList<>(); - Map doc = new HashMap<>(); - doc.put(Fields.INDEX, "_index"); - doc.put(Fields.TYPE, "_type"); - doc.put(Fields.ID, "_id"); - doc.put(Fields.SOURCE, data.getDocument()); - docs.add(doc); - raw.put(Fields.DOCS, docs); + List> expectedDocs = new ArrayList<>(); + requestContent.put(Fields.DOCS, docs); + for (int i = 0; i < numDocs; i++) { + Map doc = new HashMap<>(); + String index = randomAsciiOfLengthBetween(1, 10); + String type = randomAsciiOfLengthBetween(1, 10); + String id = randomAsciiOfLengthBetween(1, 10); + doc.put(Fields.INDEX, index); + doc.put(Fields.TYPE, type); + doc.put(Fields.ID, id); + String fieldName = randomAsciiOfLengthBetween(1, 10); + String fieldValue = randomAsciiOfLengthBetween(1, 10); + doc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue)); + docs.add(doc); + Map expectedDoc = new HashMap<>(); + expectedDoc.put(Fields.INDEX, index); + expectedDoc.put(Fields.TYPE, type); + expectedDoc.put(Fields.ID, id); + expectedDoc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue)); + expectedDocs.add(expectedDoc); + } - ParsedSimulateRequest actualRequest = parser.parseWithPipelineId("_id", raw, false, store); - assertThat(actualRequest, equalTo(expectedRequest)); + ParsedSimulateRequest actualRequest = new ParsedSimulateRequest.Parser().parseWithPipelineId(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID, requestContent, false, store); + assertThat(actualRequest.isVerbose(), equalTo(false)); + assertThat(actualRequest.getDocuments().size(), equalTo(numDocs)); + Iterator> expectedDocsIterator = expectedDocs.iterator(); + for (Data data : actualRequest.getDocuments()) { + Map expectedDocument = expectedDocsIterator.next(); + assertThat(data.getDocument(), equalTo(expectedDocument.get(Fields.SOURCE))); + assertThat(data.getIndex(), equalTo(expectedDocument.get(Fields.INDEX))); + assertThat(data.getType(), equalTo(expectedDocument.get(Fields.TYPE))); + assertThat(data.getId(), equalTo(expectedDocument.get(Fields.ID))); + } + + assertThat(actualRequest.getPipeline().getId(), equalTo(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID)); + assertThat(actualRequest.getPipeline().getDescription(), nullValue()); + assertThat(actualRequest.getPipeline().getProcessors().size(), equalTo(1)); } public void testParseWithProvidedPipeline() throws Exception { - ParsedSimulateRequest expectedRequest = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), false); + int numDocs = randomIntBetween(1, 10); - Map raw = new HashMap<>(); + Map requestContent = new HashMap<>(); List> docs = new ArrayList<>(); - Map doc = new HashMap<>(); - doc.put(Fields.INDEX, "_index"); - doc.put(Fields.TYPE, "_type"); - doc.put(Fields.ID, "_id"); - doc.put(Fields.SOURCE, data.getDocument()); - docs.add(doc); + List> expectedDocs = new ArrayList<>(); + requestContent.put(Fields.DOCS, docs); + for (int i = 0; i < numDocs; i++) { + Map doc = new HashMap<>(); + String index = randomAsciiOfLengthBetween(1, 10); + String type = randomAsciiOfLengthBetween(1, 10); + String id = randomAsciiOfLengthBetween(1, 10); + doc.put(Fields.INDEX, index); + doc.put(Fields.TYPE, type); + doc.put(Fields.ID, id); + String fieldName = randomAsciiOfLengthBetween(1, 10); + String fieldValue = randomAsciiOfLengthBetween(1, 10); + doc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue)); + docs.add(doc); + Map expectedDoc = new HashMap<>(); + expectedDoc.put(Fields.INDEX, index); + expectedDoc.put(Fields.TYPE, type); + expectedDoc.put(Fields.ID, id); + expectedDoc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue)); + expectedDocs.add(expectedDoc); + } - Map processorConfig = new HashMap<>(); - processorConfig.put("uppercase", Arrays.asList("foo")); Map pipelineConfig = new HashMap<>(); - pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("mutate", processorConfig))); + List> processors = new ArrayList<>(); + int numProcessors = randomIntBetween(1, 10); + for (int i = 0; i < numProcessors; i++) { + processors.add(Collections.singletonMap("mock_processor", Collections.emptyMap())); + } + pipelineConfig.put("processors", processors); + requestContent.put(Fields.PIPELINE, pipelineConfig); - raw.put(Fields.DOCS, docs); - raw.put(Fields.PIPELINE, pipelineConfig); + ParsedSimulateRequest actualRequest = new ParsedSimulateRequest.Parser().parse(requestContent, false, store); + assertThat(actualRequest.isVerbose(), equalTo(false)); + assertThat(actualRequest.getDocuments().size(), equalTo(numDocs)); + Iterator> expectedDocsIterator = expectedDocs.iterator(); + for (Data data : actualRequest.getDocuments()) { + Map expectedDocument = expectedDocsIterator.next(); + assertThat(data.getDocument(), equalTo(expectedDocument.get(Fields.SOURCE))); + assertThat(data.getIndex(), equalTo(expectedDocument.get(Fields.INDEX))); + assertThat(data.getType(), equalTo(expectedDocument.get(Fields.TYPE))); + assertThat(data.getId(), equalTo(expectedDocument.get(Fields.ID))); + } - ParsedSimulateRequest actualRequest = parser.parse(raw, false, store); - assertThat(actualRequest, equalTo(expectedRequest)); + assertThat(actualRequest.getPipeline().getId(), equalTo(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID)); + assertThat(actualRequest.getPipeline().getDescription(), nullValue()); + assertThat(actualRequest.getPipeline().getProcessors().size(), equalTo(numProcessors)); } } From 26569045efae8f143e033115afc3736dc8da835e Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 13 Nov 2015 19:36:18 +0100 Subject: [PATCH 057/347] remove leftover equals/hashcode --- .../processor/mutate/MutateProcessor.java | 22 ------------------- 1 file changed, 22 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java index 2daadd5fee5..9b9d98e3fef 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java @@ -277,28 +277,6 @@ public final class MutateProcessor implements Processor { } } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - MutateProcessor that = (MutateProcessor) o; - return Objects.equals(update, that.update) && - Objects.equals(rename, that.rename) && - Objects.equals(convert, that.convert) && - Objects.equals(split, that.split) && - Objects.equals(gsub, that.gsub) && - Objects.equals(join, that.join) && - Objects.equals(remove, that.remove) && - Objects.equals(trim, that.trim) && - Objects.equals(uppercase, that.uppercase) && - Objects.equals(lowercase, that.lowercase); - } - - @Override - public int hashCode() { - return Objects.hash(update, rename, convert, split, gsub, join, remove, trim, uppercase, lowercase); - } - public static final class Factory implements Processor.Factory { @Override public MutateProcessor create(Map config) throws IOException { From 446fa0c10b6e73358b5a50612ed5af62d7a886b6 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 13 Nov 2015 19:37:06 +0100 Subject: [PATCH 058/347] remove unnecessary line breaks --- .../java/org/elasticsearch/ingest/processor/Processor.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java index 46a9d43e280..fc268b2b128 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java @@ -59,11 +59,8 @@ public interface Processor { default void setConfigDirectory(Path configDirectory) { } - @Override default void close() throws IOException { } - } - } From de33f5a911ba154c32a780004f7ad7efa0f50330 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Fri, 13 Nov 2015 15:14:10 -0800 Subject: [PATCH 059/347] adds tests and guards against null values in some mutate methods --- .../processor/mutate/MutateProcessor.java | 16 +++++-- .../org/elasticsearch/ingest/DataTests.java | 6 +++ .../mutate/MutateProcessorTests.java | 45 +++++++++++++++++++ .../test/ingest/80_simulate.yaml | 2 +- 4 files changed, 64 insertions(+), 5 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java index 9b9d98e3fef..4b4e7a193ce 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java @@ -201,7 +201,9 @@ public final class MutateProcessor implements Processor { private void doSplit(Data data) { for(Map.Entry entry : split.entrySet()) { Object oldVal = data.getProperty(entry.getKey()); - if (oldVal instanceof String) { + if (oldVal == null) { + throw new IllegalArgumentException("Cannot split field. [" + entry.getKey() + "] is null."); + } else if (oldVal instanceof String) { data.addField(entry.getKey(), Arrays.asList(((String) oldVal).split(entry.getValue()))); } else { throw new IllegalArgumentException("Cannot split a field that is not a String type"); @@ -247,7 +249,9 @@ public final class MutateProcessor implements Processor { private void doTrim(Data data) { for(String field : trim) { Object val = data.getProperty(field); - if (val instanceof String) { + if (val == null) { + throw new IllegalArgumentException("Cannot trim field. [" + field + "] is null."); + } else if (val instanceof String) { data.addField(field, ((String) val).trim()); } else { throw new IllegalArgumentException("Cannot trim field:" + field + " with type: " + val.getClass()); @@ -258,7 +262,9 @@ public final class MutateProcessor implements Processor { private void doUppercase(Data data) { for(String field : uppercase) { Object val = data.getProperty(field); - if (val instanceof String) { + if (val == null) { + throw new IllegalArgumentException("Cannot uppercase field. [" + field + "] is null."); + } else if (val instanceof String) { data.addField(field, ((String) val).toUpperCase(Locale.ROOT)); } else { throw new IllegalArgumentException("Cannot uppercase field:" + field + " with type: " + val.getClass()); @@ -269,7 +275,9 @@ public final class MutateProcessor implements Processor { private void doLowercase(Data data) { for(String field : lowercase) { Object val = data.getProperty(field); - if (val instanceof String) { + if (val == null) { + throw new IllegalArgumentException("Cannot lowercase field. [" + field + "] is null."); + } else if (val instanceof String) { data.addField(field, ((String) val).toLowerCase(Locale.ROOT)); } else { throw new IllegalArgumentException("Cannot lowercase field:" + field + " with type: " + val.getClass()); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java index d17a354b139..754c70d6525 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java @@ -28,6 +28,7 @@ import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; public class DataTests extends ESTestCase { @@ -51,6 +52,11 @@ public class DataTests extends ESTestCase { assertThat(data.getProperty("fizz.buzz"), equalTo("hello world")); } + public void testGetPropertyNotFound() { + data.getProperty("not.here"); + assertThat(data.getProperty("not.here"), nullValue()); + } + public void testContainsProperty() { assertTrue(data.containsProperty("fizz")); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java index f5420eb30ee..371b4621485 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java @@ -110,6 +110,18 @@ public class MutateProcessorTests extends ESTestCase { assertThat(data.getProperty("ip"), equalTo(Arrays.asList("127", "0", "0", "1"))); } + public void testSplitNullValue() throws IOException { + Map split = new HashMap<>(); + split.put("not.found", "\\."); + Processor processor = new MutateProcessor(null, null, null, split, null, null, null, null, null, null); + try { + processor.execute(data); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Cannot split field. [not.found] is null.")); + } + } + public void testGsub() throws IOException { List gsubExpressions = Collections.singletonList(new GsubExpression("ip", Pattern.compile("\\."), "-")); Processor processor = new MutateProcessor(null, null, null, null, gsubExpressions, null, null, null, null, null); @@ -156,6 +168,17 @@ public class MutateProcessorTests extends ESTestCase { assertThat(data.getProperty("to_strip"), equalTo("clean")); } + public void testTrimNullValue() throws IOException { + List trim = Collections.singletonList("not.found"); + Processor processor = new MutateProcessor(null, null, null, null, null, null, null, trim, null, null); + try { + processor.execute(data); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Cannot trim field. [not.found] is null.")); + } + } + public void testUppercase() throws IOException { List uppercase = Collections.singletonList("foo"); Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null); @@ -164,6 +187,17 @@ public class MutateProcessorTests extends ESTestCase { assertThat(data.getProperty("foo"), equalTo("BAR")); } + public void testUppercaseNullValue() throws IOException { + List uppercase = Collections.singletonList("not.found"); + Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null); + try { + processor.execute(data); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Cannot uppercase field. [not.found] is null.")); + } + } + public void testLowercase() throws IOException { List lowercase = Collections.singletonList("alpha"); Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, null, lowercase); @@ -171,4 +205,15 @@ public class MutateProcessorTests extends ESTestCase { assertThat(data.getDocument().size(), equalTo(7)); assertThat(data.getProperty("alpha"), equalTo("abcd")); } + + public void testLowercaseNullValue() throws IOException { + List lowercase = Collections.singletonList("not.found"); + Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, null, lowercase); + try { + processor.execute(data); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Cannot lowercase field. [not.found] is null.")); + } + } } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml index 371aeedd3ef..c27d1438030 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml @@ -207,7 +207,7 @@ ] } - length: { docs: 2 } - - match: { docs.0.error.type: "null_pointer_exception" } + - match: { docs.0.error.type: "illegal_argument_exception" } - is_true: docs.1.doc.modified - match: { docs.1.doc._source.foo: "BAR" } From 5169d9d80f7b329e5338642785da4dd7a68b04c9 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 16 Nov 2015 10:28:29 +0100 Subject: [PATCH 060/347] minor formatting changes --- .../ingest/processor/ConfigurationUtils.java | 1 - .../plugin/ingest/transport/TransportData.java | 8 ++++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java index 49fd90e7afe..af001decc08 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java @@ -149,5 +149,4 @@ public final class ConfigurationUtils { throw new IllegalArgumentException("property [" + propertyName + "] isn't a map, but of type [" + value.getClass().getName() + "]"); } } - } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java index d3753a99306..cc9a8513e03 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java @@ -80,8 +80,12 @@ public class TransportData implements Writeable, ToXContent { @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } TransportData that = (TransportData) o; return Objects.equals(data, that.data); } From 9d7d5bd9bcdb0ac7eae0975b6bb6e9f49af4272c Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 17 Nov 2015 15:20:58 +0100 Subject: [PATCH 061/347] Throw a proper error when add field fails due to existing field type mismatch Instead of throwing ClassCastException whenever we try and add a field to a parent that is not a Map, we now throw a clearer error (IAE). --- .../java/org/elasticsearch/ingest/Data.java | 17 ++++++-- .../org/elasticsearch/ingest/DataTests.java | 39 ++++++++++++++----- 2 files changed, 43 insertions(+), 13 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index b360af7d459..2f9132f83c0 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -101,10 +101,21 @@ public final class Data { Map inner = document; for (int i = 0; i < pathElements.length - 1; i++) { - if (!inner.containsKey(pathElements[i])) { - inner.put(pathElements[i], new HashMap()); + String pathElement = pathElements[i]; + if (inner.containsKey(pathElement)) { + Object object = inner.get(pathElement); + if (object instanceof Map) { + @SuppressWarnings("unchecked") + Map stringObjectMap = (Map) object; + inner = stringObjectMap; + } else { + throw new IllegalArgumentException("cannot add field to parent [" + pathElement + "] of type [" + object.getClass().getName() + "], [" + Map.class.getName() + "] expected instead."); + } + } else { + Map newInnerMap = new HashMap<>(); + inner.put(pathElement, newInnerMap); + inner = newInnerMap; } - inner = (Map) inner.get(pathElements[i]); } inner.put(writeKey, value); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java index 754c70d6525..2b3d7b4c6f9 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java @@ -26,9 +26,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.*; public class DataTests extends ESTestCase { @@ -53,7 +51,6 @@ public class DataTests extends ESTestCase { } public void testGetPropertyNotFound() { - data.getProperty("not.here"); assertThat(data.getProperty("not.here"), nullValue()); } @@ -61,15 +58,15 @@ public class DataTests extends ESTestCase { assertTrue(data.containsProperty("fizz")); } - public void testContainsProperty_Nested() { + public void testContainsPropertyNested() { assertTrue(data.containsProperty("fizz.buzz")); } - public void testContainsProperty_NotFound() { + public void testContainsPropertyNotFound() { assertFalse(data.containsProperty("doesnotexist")); } - public void testContainsProperty_NestedNotFound() { + public void testContainsPropertyNestedNotFound() { assertFalse(data.containsProperty("fizz.doesnotexist")); } @@ -78,19 +75,41 @@ public class DataTests extends ESTestCase { assertThat(data.getDocument().get("new_field"), equalTo("foo")); } + @SuppressWarnings("unchecked") public void testNestedAddField() { data.addField("a.b.c.d", "foo"); - assertThat(data.getProperty("a.b.c.d"), equalTo("foo")); + assertThat(data.getDocument().get("a"), instanceOf(Map.class)); + Map a = (Map) data.getDocument().get("a"); + assertThat(a.get("b"), instanceOf(Map.class)); + Map b = (Map) a.get("b"); + assertThat(b.get("c"), instanceOf(Map.class)); + Map c = (Map) b.get("c"); + assertThat(c.get("d"), instanceOf(String.class)); + String d = (String) c.get("d"); + assertThat(d, equalTo("foo")); } public void testAddFieldOnExistingField() { data.addField("foo", "newbar"); - assertThat(data.getProperty("foo"), equalTo("newbar")); + assertThat(data.getDocument().get("foo"), equalTo("newbar")); } + @SuppressWarnings("unchecked") public void testAddFieldOnExistingParent() { data.addField("fizz.new", "bar"); - assertThat(data.getProperty("fizz.new"), equalTo("bar")); + assertThat(data.getDocument().get("fizz"), instanceOf(Map.class)); + Map innerMap = (Map) data.getDocument().get("fizz"); + assertThat(innerMap.get("new"), instanceOf(String.class)); + String value = (String) innerMap.get("new"); + assertThat(value, equalTo("bar")); + } + + public void testAddFieldOnExistingParentTypeMismatch() { + try { + data.addField("fizz.buzz.new", "bar"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("cannot add field to parent [buzz] of type [java.lang.String], [java.util.Map] expected instead.")); + } } public void testEqualsAndHashcode() throws Exception { From ba7e536e1dcb8fced4dbef2a2ce40118528b16f6 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 17 Nov 2015 15:45:34 +0100 Subject: [PATCH 062/347] better error and tests for empty and null values in Data containsProperty addField and getProperty --- .../java/org/elasticsearch/ingest/Data.java | 15 +++++-- .../org/elasticsearch/ingest/DataTests.java | 44 +++++++++++++++++++ 2 files changed, 56 insertions(+), 3 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index 2f9132f83c0..1c347be4d62 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -55,6 +55,10 @@ public final class Data { } public boolean containsProperty(String path) { + if (path == null || path.length() == 0) { + return false; + } + boolean containsProperty = false; String[] pathElements = Strings.splitStringToArray(path, '.'); if (pathElements.length == 0) { @@ -86,17 +90,22 @@ public final class Data { /** * add `value` to path in document. If path does not exist, - * nested hashmaps will be put in as parent key values until + * nested maps will be put in as parent key values until * leaf key name in path is reached. * * @param path The path within the document in dot-notation * @param value The value to put in for the path key */ public void addField(String path, Object value) { + if (path == null || path.length() == 0) { + throw new IllegalArgumentException("cannot add null or empty field"); + } + if (value == null) { + throw new IllegalArgumentException("cannot add null value to field [" + path + "]"); + } modified = true; - String[] pathElements = Strings.splitStringToArray(path, '.'); - + assert pathElements.length > 0; String writeKey = pathElements[pathElements.length - 1]; Map inner = document; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java index 2b3d7b4c6f9..511d465495c 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java @@ -54,6 +54,14 @@ public class DataTests extends ESTestCase { assertThat(data.getProperty("not.here"), nullValue()); } + public void testGetPropertyNull() { + assertNull(data.getProperty(null)); + } + + public void testGetPropertyEmpty() { + assertNull(data.getProperty("")); + } + public void testContainsProperty() { assertTrue(data.containsProperty("fizz")); } @@ -70,6 +78,14 @@ public class DataTests extends ESTestCase { assertFalse(data.containsProperty("fizz.doesnotexist")); } + public void testContainsPropertyNull() { + assertFalse(data.containsProperty(null)); + } + + public void testContainsPropertyEmpty() { + assertFalse(data.containsProperty("")); + } + public void testSimpleAddField() { data.addField("new_field", "foo"); assertThat(data.getDocument().get("new_field"), equalTo("foo")); @@ -107,11 +123,39 @@ public class DataTests extends ESTestCase { public void testAddFieldOnExistingParentTypeMismatch() { try { data.addField("fizz.buzz.new", "bar"); + fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add field to parent [buzz] of type [java.lang.String], [java.util.Map] expected instead.")); } } + public void testAddFieldNullName() { + try { + data.addField(null, "bar"); + fail("add field should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("cannot add null or empty field")); + } + } + + public void testAddFieldEmptyName() { + try { + data.addField("", "bar"); + fail("add field should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("cannot add null or empty field")); + } + } + + public void testAddFieldNullValue() { + try { + data.addField("new_field", null); + fail("add field should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("cannot add null value to field [new_field]")); + } + } + public void testEqualsAndHashcode() throws Exception { String index = randomAsciiOfLengthBetween(1, 10); String type = randomAsciiOfLengthBetween(1, 10); From 044a86d6c6fb2e53f8b4fa46847864b506b9c0ae Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 17 Nov 2015 17:01:53 +0100 Subject: [PATCH 063/347] remove dependency on core and resolve generics compiler warning --- .../java/org/elasticsearch/ingest/Data.java | 84 +++++++++---------- 1 file changed, 41 insertions(+), 43 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index 1c347be4d62..49b15c1f6da 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -20,9 +20,10 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.xcontent.support.XContentMapValues; -import java.util.*; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; /** * Represents the data and meta data (like id and type) of a single document that is going to be indexed. @@ -49,47 +50,44 @@ public final class Data { @SuppressWarnings("unchecked") public T getProperty(String path) { - // TODO: we should not rely on any core class, so we should have custom map extract value logic: - // also XContentMapValues has no support to get specific values from arrays, see: https://github.com/elastic/elasticsearch/issues/14324 - return (T) XContentMapValues.extractValue(path, document); - } - - public boolean containsProperty(String path) { - if (path == null || path.length() == 0) { - return false; - } - - boolean containsProperty = false; - String[] pathElements = Strings.splitStringToArray(path, '.'); - if (pathElements.length == 0) { - return false; - } - - Map inner = document; - - for (int i = 0; i < pathElements.length; i++) { - if (inner == null) { - containsProperty = false; - break; - } - if (i == pathElements.length - 1) { - containsProperty = inner.containsKey(pathElements[i]); - break; - } - - Object obj = inner.get(pathElements[i]); - if (obj instanceof Map) { - inner = (Map) obj; - } else { - inner = null; - } - } - - return containsProperty; + Object property = get(path); + return (T) property; } /** - * add `value` to path in document. If path does not exist, + * + * @param path The path within the document in dot-notation + * @return true if the document contains the property, false otherwise + */ + public boolean containsProperty(String path) { + return getProperty(path) != null; + } + + private Object get(String path) { + if (path == null || path.length() == 0) { + return null; + } + String[] pathElements = Strings.splitStringToArray(path, '.'); + assert pathElements.length > 0; + + Map innerMap = document; + for (int i = 0; i < pathElements.length - 1; i++) { + Object obj = innerMap.get(pathElements[i]); + if (obj instanceof Map) { + @SuppressWarnings("unchecked") + Map stringObjectMap = (Map) obj; + innerMap = stringObjectMap; + } else { + return null; + } + } + + String leafKey = pathElements[pathElements.length - 1]; + return innerMap.get(leafKey); + } + + /** + * Adds the provided value to path in document. If path does not exist, * nested maps will be put in as parent key values until * leaf key name in path is reached. * @@ -106,9 +104,8 @@ public final class Data { modified = true; String[] pathElements = Strings.splitStringToArray(path, '.'); assert pathElements.length > 0; - String writeKey = pathElements[pathElements.length - 1]; - Map inner = document; + Map inner = document; for (int i = 0; i < pathElements.length - 1; i++) { String pathElement = pathElements[i]; if (inner.containsKey(pathElement)) { @@ -127,7 +124,8 @@ public final class Data { } } - inner.put(writeKey, value); + String leafKey = pathElements[pathElements.length - 1]; + inner.put(leafKey, value); } public String getIndex() { From e616e8398a239226a2e837dc23c47abed60fdf35 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 17 Nov 2015 17:04:23 +0100 Subject: [PATCH 064/347] add missing javadocs --- .../src/main/java/org/elasticsearch/ingest/Data.java | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index 49b15c1f6da..e796eaa077d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -48,6 +48,11 @@ public final class Data { this(other.index, other.type, other.id, new HashMap<>(other.document)); } + /** + * Returns the value contained in the document for the provided path + * @param path The path within the document in dot-notation + * @return the value for the provided path if existing, null otherwise + */ @SuppressWarnings("unchecked") public T getProperty(String path) { Object property = get(path); @@ -55,7 +60,7 @@ public final class Data { } /** - * + * Checks whether the document contains a value for the provided path * @param path The path within the document in dot-notation * @return true if the document contains the property, false otherwise */ @@ -87,10 +92,9 @@ public final class Data { } /** - * Adds the provided value to path in document. If path does not exist, - * nested maps will be put in as parent key values until + * Adds the provided value to the provided path in the document. + * If path does not exist, nested maps will be put in as parent key values until * leaf key name in path is reached. - * * @param path The path within the document in dot-notation * @param value The value to put in for the path key */ From 126df4ca9a540dd5c6b0c216f597828a26c04e3e Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 17 Nov 2015 17:36:28 +0100 Subject: [PATCH 065/347] type safety in Data#getProperty and proper error when type found is not the expected one --- .../java/org/elasticsearch/ingest/Data.java | 15 +++++++--- .../ingest/processor/date/DateProcessor.java | 2 +- .../processor/geoip/GeoIpProcessor.java | 2 +- .../ingest/processor/grok/GrokProcessor.java | 2 +- .../processor/mutate/MutateProcessor.java | 16 +++++------ .../org/elasticsearch/ingest/DataTests.java | 28 +++++++++++++++---- .../processor/date/DateProcessorTests.java | 18 ++++++------ .../mutate/MutateProcessorTests.java | 26 ++++++++--------- 8 files changed, 67 insertions(+), 42 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index e796eaa077d..f00487f546b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -51,12 +51,19 @@ public final class Data { /** * Returns the value contained in the document for the provided path * @param path The path within the document in dot-notation + * @param clazz The expected class of the field value * @return the value for the provided path if existing, null otherwise + * @throws IllegalArgumentException if the field is present but is not of the type provided as argument. */ - @SuppressWarnings("unchecked") - public T getProperty(String path) { + public T getProperty(String path, Class clazz) { Object property = get(path); - return (T) property; + if (property == null) { + return null; + } + if (clazz.isInstance(property)) { + return clazz.cast(property); + } + throw new IllegalArgumentException("field [" + path + "] of type [" + property.getClass().getName() + "] cannot be cast to [" + clazz.getName() + "]"); } /** @@ -65,7 +72,7 @@ public final class Data { * @return true if the document contains the property, false otherwise */ public boolean containsProperty(String path) { - return getProperty(path) != null; + return get(path) != null; } private Object get(String path) { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java index d19433b10ae..9c7fe955f7d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java @@ -57,7 +57,7 @@ public final class DateProcessor implements Processor { @Override public void execute(Data data) { - String value = data.getProperty(matchField); + String value = data.getProperty(matchField, String.class); // TODO(talevy): handle custom timestamp fields DateTime dateTime = null; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index a50bde0e6cb..2070176b3e3 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -61,7 +61,7 @@ public final class GeoIpProcessor implements Processor { @Override public void execute(Data data) { - String ip = data.getProperty(sourceField); + String ip = data.getProperty(sourceField, String.class); final InetAddress ipAddress; try { ipAddress = InetAddress.getByName(ip); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index c0688c42220..8b92ad89b88 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -46,7 +46,7 @@ public final class GrokProcessor implements Processor { @Override public void execute(Data data) { - Object field = data.getProperty(matchField); + Object field = data.getProperty(matchField, Object.class); // TODO(talevy): handle invalid field types if (field instanceof String) { Map matches = grok.captures((String) field); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java index 4b4e7a193ce..5c23c21f7c4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java @@ -148,7 +148,7 @@ public final class MutateProcessor implements Processor { private void doRename(Data data) { for(Map.Entry entry : rename.entrySet()) { if (data.containsProperty(entry.getKey())) { - Object oldVal = data.getProperty(entry.getKey()); + Object oldVal = data.getProperty(entry.getKey(), Object.class); data.getDocument().remove(entry.getKey()); data.addField(entry.getValue(), oldVal); } @@ -179,7 +179,7 @@ public final class MutateProcessor implements Processor { for(Map.Entry entry : convert.entrySet()) { String toType = entry.getValue(); - Object oldVal = data.getProperty(entry.getKey()); + Object oldVal = data.getProperty(entry.getKey(), Object.class); Object newVal; if (oldVal instanceof List) { @@ -200,7 +200,7 @@ public final class MutateProcessor implements Processor { private void doSplit(Data data) { for(Map.Entry entry : split.entrySet()) { - Object oldVal = data.getProperty(entry.getKey()); + Object oldVal = data.getProperty(entry.getKey(), Object.class); if (oldVal == null) { throw new IllegalArgumentException("Cannot split field. [" + entry.getKey() + "] is null."); } else if (oldVal instanceof String) { @@ -213,7 +213,7 @@ public final class MutateProcessor implements Processor { private void doGsub(Data data) { for (GsubExpression gsubExpression : gsub) { - String oldVal = data.getProperty(gsubExpression.getFieldName()); + String oldVal = data.getProperty(gsubExpression.getFieldName(), String.class); if (oldVal == null) { throw new IllegalArgumentException("Field \"" + gsubExpression.getFieldName() + "\" is null, cannot match pattern."); } @@ -226,7 +226,7 @@ public final class MutateProcessor implements Processor { @SuppressWarnings("unchecked") private void doJoin(Data data) { for(Map.Entry entry : join.entrySet()) { - Object oldVal = data.getProperty(entry.getKey()); + Object oldVal = data.getProperty(entry.getKey(), Object.class); if (oldVal instanceof List) { String joined = (String) ((List) oldVal) .stream() @@ -248,7 +248,7 @@ public final class MutateProcessor implements Processor { private void doTrim(Data data) { for(String field : trim) { - Object val = data.getProperty(field); + Object val = data.getProperty(field, Object.class); if (val == null) { throw new IllegalArgumentException("Cannot trim field. [" + field + "] is null."); } else if (val instanceof String) { @@ -261,7 +261,7 @@ public final class MutateProcessor implements Processor { private void doUppercase(Data data) { for(String field : uppercase) { - Object val = data.getProperty(field); + Object val = data.getProperty(field, Object.class); if (val == null) { throw new IllegalArgumentException("Cannot uppercase field. [" + field + "] is null."); } else if (val instanceof String) { @@ -274,7 +274,7 @@ public final class MutateProcessor implements Processor { private void doLowercase(Data data) { for(String field : lowercase) { - Object val = data.getProperty(field); + Object val = data.getProperty(field, Object.class); if (val == null) { throw new IllegalArgumentException("Cannot lowercase field. [" + field + "] is null."); } else if (val instanceof String) { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java index 511d465495c..a50d79e6c8d 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java @@ -36,6 +36,7 @@ public class DataTests extends ESTestCase { public void setData() { Map document = new HashMap<>(); document.put("foo", "bar"); + document.put("int", 123); Map innerObject = new HashMap<>(); innerObject.put("buzz", "hello world"); document.put("fizz", innerObject); @@ -43,23 +44,40 @@ public class DataTests extends ESTestCase { } public void testSimpleGetProperty() { - assertThat(data.getProperty("foo"), equalTo("bar")); + assertThat(data.getProperty("foo", String.class), equalTo("bar")); + assertThat(data.getProperty("int", Integer.class), equalTo(123)); + } + + public void testSimpleGetPropertyTypeMismatch() { + try { + data.getProperty("int", String.class); + fail("getProperty should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [int] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); + } + + try { + data.getProperty("foo", Integer.class); + fail("getProperty should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [foo] of type [java.lang.String] cannot be cast to [java.lang.Integer]")); + } } public void testNestedGetProperty() { - assertThat(data.getProperty("fizz.buzz"), equalTo("hello world")); + assertThat(data.getProperty("fizz.buzz", String.class), equalTo("hello world")); } public void testGetPropertyNotFound() { - assertThat(data.getProperty("not.here"), nullValue()); + assertThat(data.getProperty("not.here", String.class), nullValue()); } public void testGetPropertyNull() { - assertNull(data.getProperty(null)); + assertNull(data.getProperty(null, String.class)); } public void testGetPropertyEmpty() { - assertNull(data.getProperty("")); + assertNull(data.getProperty("", String.class)); } public void testContainsProperty() { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java index f129e437f75..f288809a43a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java @@ -38,7 +38,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "2010 12 06 11:05:15"); Data data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date"), equalTo("2010-06-12T11:05:15.000+02:00")); + assertThat(data.getProperty("date_as_date", String.class), equalTo("2010-06-12T11:05:15.000+02:00")); } public void testJodaPatternMultipleFormats() { @@ -53,19 +53,19 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "2010 12 06"); Data data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date"), equalTo("2010-06-12T00:00:00.000+02:00")); + assertThat(data.getProperty("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "12/06/2010"); data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date"), equalTo("2010-06-12T00:00:00.000+02:00")); + assertThat(data.getProperty("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "12-06-2010"); data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date"), equalTo("2010-06-12T00:00:00.000+02:00")); + assertThat(data.getProperty("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "2010"); @@ -85,7 +85,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "2010 12 giugno"); Data data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date"), equalTo("2010-06-12T00:00:00.000+02:00")); + assertThat(data.getProperty("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); } public void testJodaPatternDefaultYear() { @@ -95,7 +95,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "12/06"); Data data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date"), equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00")); + assertThat(data.getProperty("date_as_date", String.class), equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00")); } public void testTAI64N() { @@ -106,7 +106,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", dateAsString); Data data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date"), equalTo("2012-12-22T03:00:46.767+02:00")); + assertThat(data.getProperty("date_as_date", String.class), equalTo("2012-12-22T03:00:46.767+02:00")); } public void testUnixMs() { @@ -116,7 +116,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "1000500"); Data data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date"), equalTo("1970-01-01T00:16:40.500Z")); + assertThat(data.getProperty("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); } public void testUnix() { @@ -126,6 +126,6 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "1000.5"); Data data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date"), equalTo("1970-01-01T00:16:40.500Z")); + assertThat(data.getProperty("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java index 371b4621485..53cd87da895 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java @@ -58,7 +58,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(update, null, null, null, null, null, null, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("foo"), equalTo(123)); + assertThat(data.getProperty("foo", Integer.class), equalTo(123)); } public void testRename() throws IOException { @@ -67,7 +67,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, rename, null, null, null, null, null, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("bar"), equalTo("bar")); + assertThat(data.getProperty("bar", String.class), equalTo("bar")); assertThat(data.containsProperty("foo"), is(false)); } @@ -77,7 +77,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, convert, null, null, null, null, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("num"), equalTo(64)); + assertThat(data.getProperty("num", Integer.class), equalTo(64)); } public void testConvertNullField() throws IOException { @@ -98,7 +98,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, convert, null, null, null, null, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("arr"), equalTo(Arrays.asList(1, 2, 3))); + assertThat(data.getProperty("arr", List.class), equalTo(Arrays.asList(1, 2, 3))); } public void testSplit() throws IOException { @@ -107,7 +107,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, null, split, null, null, null, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("ip"), equalTo(Arrays.asList("127", "0", "0", "1"))); + assertThat(data.getProperty("ip", List.class), equalTo(Arrays.asList("127", "0", "0", "1"))); } public void testSplitNullValue() throws IOException { @@ -127,7 +127,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, null, null, gsubExpressions, null, null, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("ip"), equalTo("127-0-0-1")); + assertThat(data.getProperty("ip", String.class), equalTo("127-0-0-1")); } public void testGsub_NullValue() throws IOException { @@ -147,7 +147,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, null, null, null, join, null, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("arr"), equalTo("1-2-3")); + assertThat(data.getProperty("arr", String.class), equalTo("1-2-3")); } public void testRemove() throws IOException { @@ -155,8 +155,8 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, null, null, null, null, remove, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(5)); - assertThat(data.getProperty("foo"), nullValue()); - assertThat(data.getProperty("ip"), nullValue()); + assertThat(data.getProperty("foo", Object.class), nullValue()); + assertThat(data.getProperty("ip", Object.class), nullValue()); } public void testTrim() throws IOException { @@ -164,8 +164,8 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, null, null, null, null, null, trim, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("foo"), equalTo("bar")); - assertThat(data.getProperty("to_strip"), equalTo("clean")); + assertThat(data.getProperty("foo", String.class), equalTo("bar")); + assertThat(data.getProperty("to_strip", String.class), equalTo("clean")); } public void testTrimNullValue() throws IOException { @@ -184,7 +184,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("foo"), equalTo("BAR")); + assertThat(data.getProperty("foo", String.class), equalTo("BAR")); } public void testUppercaseNullValue() throws IOException { @@ -203,7 +203,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, null, lowercase); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("alpha"), equalTo("abcd")); + assertThat(data.getProperty("alpha", String.class), equalTo("abcd")); } public void testLowercaseNullValue() throws IOException { From ba8f8810ea6fd1f023de32d1ecdb10da2eadb15a Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 18 Nov 2015 10:03:28 +0100 Subject: [PATCH 066/347] rename getProperty, containsProperty and addField methods For more consistency we now have getPropertyValue, hasPropertyValue, and setPropertyValue --- .../java/org/elasticsearch/ingest/Data.java | 13 ++- .../ingest/processor/date/DateProcessor.java | 4 +- .../processor/geoip/GeoIpProcessor.java | 4 +- .../ingest/processor/grok/GrokProcessor.java | 4 +- .../processor/mutate/MutateProcessor.java | 36 ++++---- .../org/elasticsearch/ingest/DataTests.java | 84 +++++++++---------- .../processor/date/DateProcessorTests.java | 18 ++-- .../mutate/MutateProcessorTests.java | 28 +++---- 8 files changed, 95 insertions(+), 96 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index f00487f546b..ab9ab077ec9 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -55,7 +55,7 @@ public final class Data { * @return the value for the provided path if existing, null otherwise * @throws IllegalArgumentException if the field is present but is not of the type provided as argument. */ - public T getProperty(String path, Class clazz) { + public T getPropertyValue(String path, Class clazz) { Object property = get(path); if (property == null) { return null; @@ -69,9 +69,9 @@ public final class Data { /** * Checks whether the document contains a value for the provided path * @param path The path within the document in dot-notation - * @return true if the document contains the property, false otherwise + * @return true if the document contains a non null value for the property, false otherwise */ - public boolean containsProperty(String path) { + public boolean hasPropertyValue(String path) { return get(path) != null; } @@ -99,13 +99,12 @@ public final class Data { } /** - * Adds the provided value to the provided path in the document. - * If path does not exist, nested maps will be put in as parent key values until - * leaf key name in path is reached. + * Sets the provided value to the provided path in the document. + * Any non existing path element will be created. * @param path The path within the document in dot-notation * @param value The value to put in for the path key */ - public void addField(String path, Object value) { + public void setPropertyValue(String path, Object value) { if (path == null || path.length() == 0) { throw new IllegalArgumentException("cannot add null or empty field"); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java index 9c7fe955f7d..ead4e0a1b1c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java @@ -57,7 +57,7 @@ public final class DateProcessor implements Processor { @Override public void execute(Data data) { - String value = data.getProperty(matchField, String.class); + String value = data.getPropertyValue(matchField, String.class); // TODO(talevy): handle custom timestamp fields DateTime dateTime = null; @@ -75,7 +75,7 @@ public final class DateProcessor implements Processor { throw new IllegalArgumentException("unable to parse date [" + value + "]", lastException); } - data.addField(targetField, ISODateTimeFormat.dateTime().print(dateTime)); + data.setPropertyValue(targetField, ISODateTimeFormat.dateTime().print(dateTime)); } @Override diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index 2070176b3e3..629a7b59837 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -61,7 +61,7 @@ public final class GeoIpProcessor implements Processor { @Override public void execute(Data data) { - String ip = data.getProperty(sourceField, String.class); + String ip = data.getPropertyValue(sourceField, String.class); final InetAddress ipAddress; try { ipAddress = InetAddress.getByName(ip); @@ -88,7 +88,7 @@ public final class GeoIpProcessor implements Processor { default: throw new IllegalStateException("Unsupported database type [" + dbReader.getMetadata().getDatabaseType() + "]"); } - data.addField(targetField, geoData); + data.setPropertyValue(targetField, geoData); } @Override diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index 8b92ad89b88..58b999c8f78 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -46,12 +46,12 @@ public final class GrokProcessor implements Processor { @Override public void execute(Data data) { - Object field = data.getProperty(matchField, Object.class); + Object field = data.getPropertyValue(matchField, Object.class); // TODO(talevy): handle invalid field types if (field instanceof String) { Map matches = grok.captures((String) field); if (matches != null) { - matches.forEach((k, v) -> data.addField(k, v)); + matches.forEach((k, v) -> data.setPropertyValue(k, v)); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java index 5c23c21f7c4..3ae1fb98cf2 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java @@ -141,16 +141,16 @@ public final class MutateProcessor implements Processor { private void doUpdate(Data data) { for(Map.Entry entry : update.entrySet()) { - data.addField(entry.getKey(), entry.getValue()); + data.setPropertyValue(entry.getKey(), entry.getValue()); } } private void doRename(Data data) { for(Map.Entry entry : rename.entrySet()) { - if (data.containsProperty(entry.getKey())) { - Object oldVal = data.getProperty(entry.getKey(), Object.class); + if (data.hasPropertyValue(entry.getKey())) { + Object oldVal = data.getPropertyValue(entry.getKey(), Object.class); data.getDocument().remove(entry.getKey()); - data.addField(entry.getValue(), oldVal); + data.setPropertyValue(entry.getValue(), oldVal); } } } @@ -179,7 +179,7 @@ public final class MutateProcessor implements Processor { for(Map.Entry entry : convert.entrySet()) { String toType = entry.getValue(); - Object oldVal = data.getProperty(entry.getKey(), Object.class); + Object oldVal = data.getPropertyValue(entry.getKey(), Object.class); Object newVal; if (oldVal instanceof List) { @@ -194,17 +194,17 @@ public final class MutateProcessor implements Processor { newVal = parseValueAsType(oldVal, toType); } - data.addField(entry.getKey(), newVal); + data.setPropertyValue(entry.getKey(), newVal); } } private void doSplit(Data data) { for(Map.Entry entry : split.entrySet()) { - Object oldVal = data.getProperty(entry.getKey(), Object.class); + Object oldVal = data.getPropertyValue(entry.getKey(), Object.class); if (oldVal == null) { throw new IllegalArgumentException("Cannot split field. [" + entry.getKey() + "] is null."); } else if (oldVal instanceof String) { - data.addField(entry.getKey(), Arrays.asList(((String) oldVal).split(entry.getValue()))); + data.setPropertyValue(entry.getKey(), Arrays.asList(((String) oldVal).split(entry.getValue()))); } else { throw new IllegalArgumentException("Cannot split a field that is not a String type"); } @@ -213,27 +213,27 @@ public final class MutateProcessor implements Processor { private void doGsub(Data data) { for (GsubExpression gsubExpression : gsub) { - String oldVal = data.getProperty(gsubExpression.getFieldName(), String.class); + String oldVal = data.getPropertyValue(gsubExpression.getFieldName(), String.class); if (oldVal == null) { throw new IllegalArgumentException("Field \"" + gsubExpression.getFieldName() + "\" is null, cannot match pattern."); } Matcher matcher = gsubExpression.getPattern().matcher(oldVal); String newVal = matcher.replaceAll(gsubExpression.getReplacement()); - data.addField(gsubExpression.getFieldName(), newVal); + data.setPropertyValue(gsubExpression.getFieldName(), newVal); } } @SuppressWarnings("unchecked") private void doJoin(Data data) { for(Map.Entry entry : join.entrySet()) { - Object oldVal = data.getProperty(entry.getKey(), Object.class); + Object oldVal = data.getPropertyValue(entry.getKey(), Object.class); if (oldVal instanceof List) { String joined = (String) ((List) oldVal) .stream() .map(Object::toString) .collect(Collectors.joining(entry.getValue())); - data.addField(entry.getKey(), joined); + data.setPropertyValue(entry.getKey(), joined); } else { throw new IllegalArgumentException("Cannot join field:" + entry.getKey() + " with type: " + oldVal.getClass()); } @@ -248,11 +248,11 @@ public final class MutateProcessor implements Processor { private void doTrim(Data data) { for(String field : trim) { - Object val = data.getProperty(field, Object.class); + Object val = data.getPropertyValue(field, Object.class); if (val == null) { throw new IllegalArgumentException("Cannot trim field. [" + field + "] is null."); } else if (val instanceof String) { - data.addField(field, ((String) val).trim()); + data.setPropertyValue(field, ((String) val).trim()); } else { throw new IllegalArgumentException("Cannot trim field:" + field + " with type: " + val.getClass()); } @@ -261,11 +261,11 @@ public final class MutateProcessor implements Processor { private void doUppercase(Data data) { for(String field : uppercase) { - Object val = data.getProperty(field, Object.class); + Object val = data.getPropertyValue(field, Object.class); if (val == null) { throw new IllegalArgumentException("Cannot uppercase field. [" + field + "] is null."); } else if (val instanceof String) { - data.addField(field, ((String) val).toUpperCase(Locale.ROOT)); + data.setPropertyValue(field, ((String) val).toUpperCase(Locale.ROOT)); } else { throw new IllegalArgumentException("Cannot uppercase field:" + field + " with type: " + val.getClass()); } @@ -274,11 +274,11 @@ public final class MutateProcessor implements Processor { private void doLowercase(Data data) { for(String field : lowercase) { - Object val = data.getProperty(field, Object.class); + Object val = data.getPropertyValue(field, Object.class); if (val == null) { throw new IllegalArgumentException("Cannot lowercase field. [" + field + "] is null."); } else if (val instanceof String) { - data.addField(field, ((String) val).toLowerCase(Locale.ROOT)); + data.setPropertyValue(field, ((String) val).toLowerCase(Locale.ROOT)); } else { throw new IllegalArgumentException("Cannot lowercase field:" + field + " with type: " + val.getClass()); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java index a50d79e6c8d..4c6f53d880e 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java @@ -43,75 +43,75 @@ public class DataTests extends ESTestCase { data = new Data("index", "type", "id", document); } - public void testSimpleGetProperty() { - assertThat(data.getProperty("foo", String.class), equalTo("bar")); - assertThat(data.getProperty("int", Integer.class), equalTo(123)); + public void testSimpleGetPropertyValue() { + assertThat(data.getPropertyValue("foo", String.class), equalTo("bar")); + assertThat(data.getPropertyValue("int", Integer.class), equalTo(123)); } - public void testSimpleGetPropertyTypeMismatch() { + public void testSimpleGetPropertyValueTypeMismatch() { try { - data.getProperty("int", String.class); + data.getPropertyValue("int", String.class); fail("getProperty should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("field [int] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); } try { - data.getProperty("foo", Integer.class); + data.getPropertyValue("foo", Integer.class); fail("getProperty should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("field [foo] of type [java.lang.String] cannot be cast to [java.lang.Integer]")); } } - public void testNestedGetProperty() { - assertThat(data.getProperty("fizz.buzz", String.class), equalTo("hello world")); + public void testNestedGetPropertyValue() { + assertThat(data.getPropertyValue("fizz.buzz", String.class), equalTo("hello world")); } - public void testGetPropertyNotFound() { - assertThat(data.getProperty("not.here", String.class), nullValue()); + public void testGetPropertyValueNotFound() { + assertThat(data.getPropertyValue("not.here", String.class), nullValue()); } - public void testGetPropertyNull() { - assertNull(data.getProperty(null, String.class)); + public void testGetPropertyValueNull() { + assertNull(data.getPropertyValue(null, String.class)); } - public void testGetPropertyEmpty() { - assertNull(data.getProperty("", String.class)); + public void testGetPropertyValueEmpty() { + assertNull(data.getPropertyValue("", String.class)); } - public void testContainsProperty() { - assertTrue(data.containsProperty("fizz")); + public void testHasProperty() { + assertTrue(data.hasPropertyValue("fizz")); } - public void testContainsPropertyNested() { - assertTrue(data.containsProperty("fizz.buzz")); + public void testHasPropertyValueNested() { + assertTrue(data.hasPropertyValue("fizz.buzz")); } - public void testContainsPropertyNotFound() { - assertFalse(data.containsProperty("doesnotexist")); + public void testHasPropertyValueNotFound() { + assertFalse(data.hasPropertyValue("doesnotexist")); } - public void testContainsPropertyNestedNotFound() { - assertFalse(data.containsProperty("fizz.doesnotexist")); + public void testHasPropertyValueNestedNotFound() { + assertFalse(data.hasPropertyValue("fizz.doesnotexist")); } - public void testContainsPropertyNull() { - assertFalse(data.containsProperty(null)); + public void testHasPropertyValueNull() { + assertFalse(data.hasPropertyValue(null)); } - public void testContainsPropertyEmpty() { - assertFalse(data.containsProperty("")); + public void testHasPropertyValueEmpty() { + assertFalse(data.hasPropertyValue("")); } - public void testSimpleAddField() { - data.addField("new_field", "foo"); + public void testSimpleSetPropertyValue() { + data.setPropertyValue("new_field", "foo"); assertThat(data.getDocument().get("new_field"), equalTo("foo")); } @SuppressWarnings("unchecked") - public void testNestedAddField() { - data.addField("a.b.c.d", "foo"); + public void testNestedSetPropertyValue() { + data.setPropertyValue("a.b.c.d", "foo"); assertThat(data.getDocument().get("a"), instanceOf(Map.class)); Map a = (Map) data.getDocument().get("a"); assertThat(a.get("b"), instanceOf(Map.class)); @@ -123,14 +123,14 @@ public class DataTests extends ESTestCase { assertThat(d, equalTo("foo")); } - public void testAddFieldOnExistingField() { - data.addField("foo", "newbar"); + public void testSetPropertyValueOnExistingField() { + data.setPropertyValue("foo", "newbar"); assertThat(data.getDocument().get("foo"), equalTo("newbar")); } @SuppressWarnings("unchecked") - public void testAddFieldOnExistingParent() { - data.addField("fizz.new", "bar"); + public void testSetPropertyValueOnExistingParent() { + data.setPropertyValue("fizz.new", "bar"); assertThat(data.getDocument().get("fizz"), instanceOf(Map.class)); Map innerMap = (Map) data.getDocument().get("fizz"); assertThat(innerMap.get("new"), instanceOf(String.class)); @@ -138,36 +138,36 @@ public class DataTests extends ESTestCase { assertThat(value, equalTo("bar")); } - public void testAddFieldOnExistingParentTypeMismatch() { + public void testSetPropertyValueOnExistingParentTypeMismatch() { try { - data.addField("fizz.buzz.new", "bar"); + data.setPropertyValue("fizz.buzz.new", "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add field to parent [buzz] of type [java.lang.String], [java.util.Map] expected instead.")); } } - public void testAddFieldNullName() { + public void testSetPropertyValueNullName() { try { - data.addField(null, "bar"); + data.setPropertyValue(null, "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add null or empty field")); } } - public void testAddFieldEmptyName() { + public void testSetPropertyValueEmptyName() { try { - data.addField("", "bar"); + data.setPropertyValue("", "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add null or empty field")); } } - public void testAddFieldNullValue() { + public void testSetPropertyValueNullValue() { try { - data.addField("new_field", null); + data.setPropertyValue("new_field", null); fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add null value to field [new_field]")); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java index f288809a43a..2a9c444d691 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java @@ -38,7 +38,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "2010 12 06 11:05:15"); Data data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date", String.class), equalTo("2010-06-12T11:05:15.000+02:00")); + assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T11:05:15.000+02:00")); } public void testJodaPatternMultipleFormats() { @@ -53,19 +53,19 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "2010 12 06"); Data data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); + assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "12/06/2010"); data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); + assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "12-06-2010"); data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); + assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "2010"); @@ -85,7 +85,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "2010 12 giugno"); Data data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); + assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); } public void testJodaPatternDefaultYear() { @@ -95,7 +95,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "12/06"); Data data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date", String.class), equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00")); + assertThat(data.getPropertyValue("date_as_date", String.class), equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00")); } public void testTAI64N() { @@ -106,7 +106,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", dateAsString); Data data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date", String.class), equalTo("2012-12-22T03:00:46.767+02:00")); + assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("2012-12-22T03:00:46.767+02:00")); } public void testUnixMs() { @@ -116,7 +116,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "1000500"); Data data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); + assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); } public void testUnix() { @@ -126,6 +126,6 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "1000.5"); Data data = new Data("index", "type", "id", document); dateProcessor.execute(data); - assertThat(data.getProperty("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); + assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java index 53cd87da895..d026b3d8fbc 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java @@ -58,7 +58,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(update, null, null, null, null, null, null, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("foo", Integer.class), equalTo(123)); + assertThat(data.getPropertyValue("foo", Integer.class), equalTo(123)); } public void testRename() throws IOException { @@ -67,8 +67,8 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, rename, null, null, null, null, null, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("bar", String.class), equalTo("bar")); - assertThat(data.containsProperty("foo"), is(false)); + assertThat(data.getPropertyValue("bar", String.class), equalTo("bar")); + assertThat(data.hasPropertyValue("foo"), is(false)); } public void testConvert() throws IOException { @@ -77,7 +77,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, convert, null, null, null, null, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("num", Integer.class), equalTo(64)); + assertThat(data.getPropertyValue("num", Integer.class), equalTo(64)); } public void testConvertNullField() throws IOException { @@ -98,7 +98,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, convert, null, null, null, null, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("arr", List.class), equalTo(Arrays.asList(1, 2, 3))); + assertThat(data.getPropertyValue("arr", List.class), equalTo(Arrays.asList(1, 2, 3))); } public void testSplit() throws IOException { @@ -107,7 +107,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, null, split, null, null, null, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("ip", List.class), equalTo(Arrays.asList("127", "0", "0", "1"))); + assertThat(data.getPropertyValue("ip", List.class), equalTo(Arrays.asList("127", "0", "0", "1"))); } public void testSplitNullValue() throws IOException { @@ -127,7 +127,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, null, null, gsubExpressions, null, null, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("ip", String.class), equalTo("127-0-0-1")); + assertThat(data.getPropertyValue("ip", String.class), equalTo("127-0-0-1")); } public void testGsub_NullValue() throws IOException { @@ -147,7 +147,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, null, null, null, join, null, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("arr", String.class), equalTo("1-2-3")); + assertThat(data.getPropertyValue("arr", String.class), equalTo("1-2-3")); } public void testRemove() throws IOException { @@ -155,8 +155,8 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, null, null, null, null, remove, null, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(5)); - assertThat(data.getProperty("foo", Object.class), nullValue()); - assertThat(data.getProperty("ip", Object.class), nullValue()); + assertThat(data.getPropertyValue("foo", Object.class), nullValue()); + assertThat(data.getPropertyValue("ip", Object.class), nullValue()); } public void testTrim() throws IOException { @@ -164,8 +164,8 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, null, null, null, null, null, trim, null, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("foo", String.class), equalTo("bar")); - assertThat(data.getProperty("to_strip", String.class), equalTo("clean")); + assertThat(data.getPropertyValue("foo", String.class), equalTo("bar")); + assertThat(data.getPropertyValue("to_strip", String.class), equalTo("clean")); } public void testTrimNullValue() throws IOException { @@ -184,7 +184,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("foo", String.class), equalTo("BAR")); + assertThat(data.getPropertyValue("foo", String.class), equalTo("BAR")); } public void testUppercaseNullValue() throws IOException { @@ -203,7 +203,7 @@ public class MutateProcessorTests extends ESTestCase { Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, null, lowercase); processor.execute(data); assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getProperty("alpha", String.class), equalTo("abcd")); + assertThat(data.getPropertyValue("alpha", String.class), equalTo("abcd")); } public void testLowercaseNullValue() throws IOException { From ab5b6491840eddbe8c19e43d3440682280ecf55e Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 18 Nov 2015 10:40:16 +0100 Subject: [PATCH 067/347] accept null values and adapt hasPropertyValue return value --- .../java/org/elasticsearch/ingest/Data.java | 60 +++++++++++-------- .../org/elasticsearch/ingest/DataTests.java | 24 +++++--- 2 files changed, 51 insertions(+), 33 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index ab9ab077ec9..c64b8865607 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -56,26 +56,6 @@ public final class Data { * @throws IllegalArgumentException if the field is present but is not of the type provided as argument. */ public T getPropertyValue(String path, Class clazz) { - Object property = get(path); - if (property == null) { - return null; - } - if (clazz.isInstance(property)) { - return clazz.cast(property); - } - throw new IllegalArgumentException("field [" + path + "] of type [" + property.getClass().getName() + "] cannot be cast to [" + clazz.getName() + "]"); - } - - /** - * Checks whether the document contains a value for the provided path - * @param path The path within the document in dot-notation - * @return true if the document contains a non null value for the property, false otherwise - */ - public boolean hasPropertyValue(String path) { - return get(path) != null; - } - - private Object get(String path) { if (path == null || path.length() == 0) { return null; } @@ -95,7 +75,42 @@ public final class Data { } String leafKey = pathElements[pathElements.length - 1]; - return innerMap.get(leafKey); + Object property = innerMap.get(leafKey); + if (property == null) { + return null; + } + if (clazz.isInstance(property)) { + return clazz.cast(property); + } + throw new IllegalArgumentException("field [" + path + "] of type [" + property.getClass().getName() + "] cannot be cast to [" + clazz.getName() + "]"); + } + + /** + * Checks whether the document contains a value for the provided path + * @param path The path within the document in dot-notation + * @return true if the document contains a value for the property, false otherwise + */ + public boolean hasPropertyValue(String path) { + if (path == null || path.length() == 0) { + return false; + } + String[] pathElements = Strings.splitStringToArray(path, '.'); + assert pathElements.length > 0; + + Map innerMap = document; + for (int i = 0; i < pathElements.length - 1; i++) { + Object obj = innerMap.get(pathElements[i]); + if (obj instanceof Map) { + @SuppressWarnings("unchecked") + Map stringObjectMap = (Map) obj; + innerMap = stringObjectMap; + } else { + return false; + } + } + + String leafKey = pathElements[pathElements.length - 1]; + return innerMap.containsKey(leafKey); } /** @@ -108,9 +123,6 @@ public final class Data { if (path == null || path.length() == 0) { throw new IllegalArgumentException("cannot add null or empty field"); } - if (value == null) { - throw new IllegalArgumentException("cannot add null value to field [" + path + "]"); - } modified = true; String[] pathElements = Strings.splitStringToArray(path, '.'); assert pathElements.length > 0; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java index 4c6f53d880e..cf230d1c299 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java @@ -36,6 +36,7 @@ public class DataTests extends ESTestCase { public void setData() { Map document = new HashMap<>(); document.put("foo", "bar"); + document.put("foo_null", null); document.put("int", 123); Map innerObject = new HashMap<>(); innerObject.put("buzz", "hello world"); @@ -48,6 +49,10 @@ public class DataTests extends ESTestCase { assertThat(data.getPropertyValue("int", Integer.class), equalTo(123)); } + public void testGetPropertyValueNullValue() { + assertThat(data.getPropertyValue("foo_null", Object.class), nullValue()); + } + public void testSimpleGetPropertyValueTypeMismatch() { try { data.getPropertyValue("int", String.class); @@ -100,6 +105,10 @@ public class DataTests extends ESTestCase { assertFalse(data.hasPropertyValue(null)); } + public void testHasPropertyValueNullValue() { + assertTrue(data.hasPropertyValue("foo_null")); + } + public void testHasPropertyValueEmpty() { assertFalse(data.hasPropertyValue("")); } @@ -109,6 +118,12 @@ public class DataTests extends ESTestCase { assertThat(data.getDocument().get("new_field"), equalTo("foo")); } + public void testSetPropertyValueNullValue() { + data.setPropertyValue("new_field", null); + assertThat(data.getDocument().containsKey("new_field"), equalTo(true)); + assertThat(data.getDocument().get("new_field"), nullValue()); + } + @SuppressWarnings("unchecked") public void testNestedSetPropertyValue() { data.setPropertyValue("a.b.c.d", "foo"); @@ -165,15 +180,6 @@ public class DataTests extends ESTestCase { } } - public void testSetPropertyValueNullValue() { - try { - data.setPropertyValue("new_field", null); - fail("add field should have failed"); - } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("cannot add null value to field [new_field]")); - } - } - public void testEqualsAndHashcode() throws Exception { String index = randomAsciiOfLengthBetween(1, 10); String type = randomAsciiOfLengthBetween(1, 10); From 59868cd02e6b5d97f752e3b1c29a1f506bf6fcfe Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 18 Nov 2015 17:28:37 +0100 Subject: [PATCH 068/347] add support for removeProperty --- .../java/org/elasticsearch/ingest/Data.java | 47 ++++++++++------ .../org/elasticsearch/ingest/DataTests.java | 56 ++++++++++++++++++- 2 files changed, 83 insertions(+), 20 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index c64b8865607..827d17483a3 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -21,9 +21,7 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.Strings; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; +import java.util.*; /** * Represents the data and meta data (like id and type) of a single document that is going to be indexed. @@ -62,16 +60,9 @@ public final class Data { String[] pathElements = Strings.splitStringToArray(path, '.'); assert pathElements.length > 0; - Map innerMap = document; - for (int i = 0; i < pathElements.length - 1; i++) { - Object obj = innerMap.get(pathElements[i]); - if (obj instanceof Map) { - @SuppressWarnings("unchecked") - Map stringObjectMap = (Map) obj; - innerMap = stringObjectMap; - } else { - return null; - } + Map innerMap = getParent(pathElements); + if (innerMap == null) { + return null; } String leafKey = pathElements[pathElements.length - 1]; @@ -96,7 +87,29 @@ public final class Data { } String[] pathElements = Strings.splitStringToArray(path, '.'); assert pathElements.length > 0; + Map innerMap = getParent(pathElements); + if (innerMap == null) { + return false; + } + String leafKey = pathElements[pathElements.length - 1]; + return innerMap.containsKey(leafKey); + } + public void removeProperty(String path) { + if (path == null || path.length() == 0) { + return; + } + String[] pathElements = Strings.splitStringToArray(path, '.'); + assert pathElements.length > 0; + + Map parent = getParent(pathElements); + if (parent != null) { + String leafKey = pathElements[pathElements.length - 1]; + parent.remove(leafKey); + } + } + + private Map getParent(String[] pathElements) { Map innerMap = document; for (int i = 0; i < pathElements.length - 1; i++) { Object obj = innerMap.get(pathElements[i]); @@ -105,12 +118,10 @@ public final class Data { Map stringObjectMap = (Map) obj; innerMap = stringObjectMap; } else { - return false; + return null; } } - - String leafKey = pathElements[pathElements.length - 1]; - return innerMap.containsKey(leafKey); + return innerMap; } /** @@ -136,6 +147,8 @@ public final class Data { @SuppressWarnings("unchecked") Map stringObjectMap = (Map) object; inner = stringObjectMap; + } else if (object == null ) { + throw new IllegalArgumentException("cannot add field to null parent, [" + Map.class.getName() + "] expected instead."); } else { throw new IllegalArgumentException("cannot add field to parent [" + pathElement + "] of type [" + object.getClass().getName() + "], [" + Map.class.getName() + "] expected instead."); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java index cf230d1c299..5179a468b58 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java @@ -36,10 +36,10 @@ public class DataTests extends ESTestCase { public void setData() { Map document = new HashMap<>(); document.put("foo", "bar"); - document.put("foo_null", null); document.put("int", 123); Map innerObject = new HashMap<>(); innerObject.put("buzz", "hello world"); + innerObject.put("foo_null", null); document.put("fizz", innerObject); data = new Data("index", "type", "id", document); } @@ -50,7 +50,7 @@ public class DataTests extends ESTestCase { } public void testGetPropertyValueNullValue() { - assertThat(data.getPropertyValue("foo_null", Object.class), nullValue()); + assertThat(data.getPropertyValue("fizz.foo_null", Object.class), nullValue()); } public void testSimpleGetPropertyValueTypeMismatch() { @@ -106,7 +106,7 @@ public class DataTests extends ESTestCase { } public void testHasPropertyValueNullValue() { - assertTrue(data.hasPropertyValue("foo_null")); + assertTrue(data.hasPropertyValue("fizz.foo_null")); } public void testHasPropertyValueEmpty() { @@ -162,6 +162,15 @@ public class DataTests extends ESTestCase { } } + public void testSetPropertyValueOnExistingNullParent() { + try { + data.setPropertyValue("fizz.foo_null.test", "bar"); + fail("add field should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("cannot add field to null parent, [java.util.Map] expected instead.")); + } + } + public void testSetPropertyValueNullName() { try { data.setPropertyValue(null, "bar"); @@ -180,6 +189,47 @@ public class DataTests extends ESTestCase { } } + public void testRemoveProperty() { + data.removeProperty("foo"); + assertThat(data.getDocument().size(), equalTo(2)); + assertThat(data.getDocument().containsKey("foo"), equalTo(false)); + } + + public void testRemoveInnerProperty() { + data.removeProperty("fizz.buzz"); + assertThat(data.getDocument().size(), equalTo(3)); + assertThat(data.getDocument().get("fizz"), instanceOf(Map.class)); + @SuppressWarnings("unchecked") + Map map = (Map)data.getDocument().get("fizz"); + assertThat(map.size(), equalTo(1)); + assertThat(map.containsKey("buzz"), equalTo(false)); + + data.removeProperty("fizz.foo_null"); + assertThat(map.size(), equalTo(0)); + assertThat(data.getDocument().size(), equalTo(3)); + assertThat(data.getDocument().containsKey("fizz"), equalTo(true)); + } + + public void testRemoveNonExistingProperty() { + data.removeProperty("does_not_exist"); + assertThat(data.getDocument().size(), equalTo(3)); + } + + public void testRemoveExistingParentTypeMismatch() { + data.removeProperty("foo.test"); + assertThat(data.getDocument().size(), equalTo(3)); + } + + public void testRemoveNullProperty() { + data.removeProperty(null); + assertThat(data.getDocument().size(), equalTo(3)); + } + + public void testRemoveEmptyProperty() { + data.removeProperty(""); + assertThat(data.getDocument().size(), equalTo(3)); + } + public void testEqualsAndHashcode() throws Exception { String index = randomAsciiOfLengthBetween(1, 10); String type = randomAsciiOfLengthBetween(1, 10); From 404ae395ca6e916e480aa330082448cee4015c0f Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 18 Nov 2015 18:00:25 +0100 Subject: [PATCH 069/347] add javadocs for Data#removeProperty --- .../ingest/src/main/java/org/elasticsearch/ingest/Data.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index 827d17483a3..cafa8133877 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -95,13 +95,16 @@ public final class Data { return innerMap.containsKey(leafKey); } + /** + * Removes the property identified by the provided path + * @param path the path of the property to be removed + */ public void removeProperty(String path) { if (path == null || path.length() == 0) { return; } String[] pathElements = Strings.splitStringToArray(path, '.'); assert pathElements.length > 0; - Map parent = getParent(pathElements); if (parent != null) { String leafKey = pathElements[pathElements.length - 1]; From be3e91334902ea155a395ad520cd37bbe68cd050 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 20 Nov 2015 17:59:45 +0100 Subject: [PATCH 070/347] set Data modified flag when a property is removed and improve behaviour when adding a field --- .../main/java/org/elasticsearch/ingest/Data.java | 7 +++++-- .../java/org/elasticsearch/ingest/DataTests.java | 14 ++++++++++++++ 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index cafa8133877..910c3e08909 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -108,7 +108,10 @@ public final class Data { Map parent = getParent(pathElements); if (parent != null) { String leafKey = pathElements[pathElements.length - 1]; - parent.remove(leafKey); + if (parent.containsKey(leafKey)) { + modified = true; + parent.remove(leafKey); + } } } @@ -137,7 +140,6 @@ public final class Data { if (path == null || path.length() == 0) { throw new IllegalArgumentException("cannot add null or empty field"); } - modified = true; String[] pathElements = Strings.splitStringToArray(path, '.'); assert pathElements.length > 0; @@ -164,6 +166,7 @@ public final class Data { String leafKey = pathElements[pathElements.length - 1]; inner.put(leafKey, value); + modified = true; } public String getIndex() { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java index 5179a468b58..64cb6489e4f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/DataTests.java @@ -116,12 +116,14 @@ public class DataTests extends ESTestCase { public void testSimpleSetPropertyValue() { data.setPropertyValue("new_field", "foo"); assertThat(data.getDocument().get("new_field"), equalTo("foo")); + assertThat(data.isModified(), equalTo(true)); } public void testSetPropertyValueNullValue() { data.setPropertyValue("new_field", null); assertThat(data.getDocument().containsKey("new_field"), equalTo(true)); assertThat(data.getDocument().get("new_field"), nullValue()); + assertThat(data.isModified(), equalTo(true)); } @SuppressWarnings("unchecked") @@ -136,6 +138,7 @@ public class DataTests extends ESTestCase { assertThat(c.get("d"), instanceOf(String.class)); String d = (String) c.get("d"); assertThat(d, equalTo("foo")); + assertThat(data.isModified(), equalTo(true)); } public void testSetPropertyValueOnExistingField() { @@ -151,6 +154,7 @@ public class DataTests extends ESTestCase { assertThat(innerMap.get("new"), instanceOf(String.class)); String value = (String) innerMap.get("new"); assertThat(value, equalTo("bar")); + assertThat(data.isModified(), equalTo(true)); } public void testSetPropertyValueOnExistingParentTypeMismatch() { @@ -159,6 +163,7 @@ public class DataTests extends ESTestCase { fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add field to parent [buzz] of type [java.lang.String], [java.util.Map] expected instead.")); + assertThat(data.isModified(), equalTo(false)); } } @@ -168,6 +173,7 @@ public class DataTests extends ESTestCase { fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add field to null parent, [java.util.Map] expected instead.")); + assertThat(data.isModified(), equalTo(false)); } } @@ -177,6 +183,7 @@ public class DataTests extends ESTestCase { fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add null or empty field")); + assertThat(data.isModified(), equalTo(false)); } } @@ -186,11 +193,13 @@ public class DataTests extends ESTestCase { fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add null or empty field")); + assertThat(data.isModified(), equalTo(false)); } } public void testRemoveProperty() { data.removeProperty("foo"); + assertThat(data.isModified(), equalTo(true)); assertThat(data.getDocument().size(), equalTo(2)); assertThat(data.getDocument().containsKey("foo"), equalTo(false)); } @@ -208,25 +217,30 @@ public class DataTests extends ESTestCase { assertThat(map.size(), equalTo(0)); assertThat(data.getDocument().size(), equalTo(3)); assertThat(data.getDocument().containsKey("fizz"), equalTo(true)); + assertThat(data.isModified(), equalTo(true)); } public void testRemoveNonExistingProperty() { data.removeProperty("does_not_exist"); + assertThat(data.isModified(), equalTo(false)); assertThat(data.getDocument().size(), equalTo(3)); } public void testRemoveExistingParentTypeMismatch() { data.removeProperty("foo.test"); + assertThat(data.isModified(), equalTo(false)); assertThat(data.getDocument().size(), equalTo(3)); } public void testRemoveNullProperty() { data.removeProperty(null); + assertThat(data.isModified(), equalTo(false)); assertThat(data.getDocument().size(), equalTo(3)); } public void testRemoveEmptyProperty() { data.removeProperty(""); + assertThat(data.isModified(), equalTo(false)); assertThat(data.getDocument().size(), equalTo(3)); } From bb298ed27a03dc6f2d696bdf05e8290f24368771 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 20 Nov 2015 18:01:27 +0100 Subject: [PATCH 071/347] add some javadocs to Data#getDocument --- .../ingest/src/main/java/org/elasticsearch/ingest/Data.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java index 910c3e08909..3c3a25f84af 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java @@ -181,6 +181,11 @@ public final class Data { return id; } + /** + * Returns the document. Should be used only for reading. Any change made to the returned map will + * not be reflected to the modified flag. Modify the document instead using {@link #setPropertyValue(String, Object)} + * and {@link #removeProperty(String)} + */ public Map getDocument() { return document; } From ecc8158b891ab9b9d5f572ca49edf32c9b1ff010 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 23 Nov 2015 13:12:46 +0100 Subject: [PATCH 072/347] renamed Data to IngestDocument moved all metadata related fields to a single metadata map removed specific metadata getters with a generic getMetadata() --- .../ingest/{Data.java => IngestDocument.java} | 78 +++++---- .../org/elasticsearch/ingest/Pipeline.java | 4 +- .../ingest/processor/Processor.java | 4 +- .../ingest/processor/date/DateProcessor.java | 8 +- .../processor/geoip/GeoIpProcessor.java | 8 +- .../ingest/processor/grok/GrokProcessor.java | 8 +- .../processor/mutate/MutateProcessor.java | 84 +++++----- .../ingest/PipelineExecutionService.java | 10 +- .../ingest/transport/IngestActionFilter.java | 22 +-- .../ingest/transport/TransportData.java | 40 ++--- .../simulate/ParsedSimulateRequest.java | 26 +-- .../SimulateDocumentSimpleResult.java | 10 +- .../simulate/SimulateExecutionService.java | 24 +-- .../simulate/SimulateProcessorResult.java | 10 +- .../org/elasticsearch/ingest/DataTests.java | 152 +++++++++--------- .../elasticsearch/ingest/IngestClientIT.java | 4 +- .../processor/date/DateProcessorTests.java | 60 +++---- .../processor/geoip/GeoIpProcessorTests.java | 28 ++-- .../mutate/MutateProcessorTests.java | 90 +++++------ .../ingest/PipelineExecutionServiceTests.java | 26 +-- .../transport/IngestActionFilterTests.java | 18 +-- .../ingest/transport/TransportDataTests.java | 12 +- .../ParsedSimulateRequestParserTests.java | 25 +-- .../SimulateDocumentSimpleResultTests.java | 6 +- .../SimulateExecutionServiceTests.java | 40 ++--- .../SimulatePipelineResponseTests.java | 10 +- .../SimulateProcessorResultTests.java | 6 +- 27 files changed, 416 insertions(+), 397 deletions(-) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/{Data.java => IngestDocument.java} (80%) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java similarity index 80% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 3c3a25f84af..27606090d29 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Data.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -24,26 +24,26 @@ import org.elasticsearch.common.Strings; import java.util.*; /** - * Represents the data and meta data (like id and type) of a single document that is going to be indexed. + * Represents a single document being captured before indexing and holds the source and meta data (like id, type and index). */ -public final class Data { +public final class IngestDocument { - private final String index; - private final String type; - private final String id; - private final Map document; + private final Map metaData; + private final Map source; private boolean modified = false; - public Data(String index, String type, String id, Map document) { - this.index = index; - this.type = type; - this.id = id; - this.document = document; + public IngestDocument(String index, String type, String id, Map source) { + this.metaData = new HashMap<>(); + this.metaData.put("_index", index); + this.metaData.put("_type", type); + this.metaData.put("_id", id); + this.source = source; } - public Data(Data other) { - this(other.index, other.type, other.id, new HashMap<>(other.document)); + public IngestDocument(IngestDocument other) { + this.metaData = new HashMap<>(other.metaData); + this.source = new HashMap<>(other.source); } /** @@ -116,7 +116,7 @@ public final class Data { } private Map getParent(String[] pathElements) { - Map innerMap = document; + Map innerMap = source; for (int i = 0; i < pathElements.length - 1; i++) { Object obj = innerMap.get(pathElements[i]); if (obj instanceof Map) { @@ -143,7 +143,7 @@ public final class Data { String[] pathElements = Strings.splitStringToArray(path, '.'); assert pathElements.length > 0; - Map inner = document; + Map inner = source; for (int i = 0; i < pathElements.length - 1; i++) { String pathElement = pathElements[i]; if (inner.containsKey(pathElement)) { @@ -169,16 +169,8 @@ public final class Data { modified = true; } - public String getIndex() { - return index; - } - - public String getType() { - return type; - } - - public String getId() { - return id; + public String getMetadata(MetaData metaData) { + return this.metaData.get(metaData.getName()); } /** @@ -186,8 +178,8 @@ public final class Data { * not be reflected to the modified flag. Modify the document instead using {@link #setPropertyValue(String, Object)} * and {@link #removeProperty(String)} */ - public Map getDocument() { - return document; + public Map getSource() { + return source; } public boolean isModified() { @@ -201,15 +193,35 @@ public final class Data { return false; } - Data other = (Data) obj; - return Objects.equals(document, other.document) && - Objects.equals(index, other.index) && - Objects.equals(type, other.type) && - Objects.equals(id, other.id); + IngestDocument other = (IngestDocument) obj; + return Objects.equals(source, other.source) && + Objects.equals(metaData, other.metaData); } @Override public int hashCode() { - return Objects.hash(index, type, id, document); + return Objects.hash(metaData, source); } + + public enum MetaData { + + INDEX("_index"), + TYPE("_type"), + ID("_id"), + ROUTING("_routing"), + PARENT("_parent"), + TIMESTAMP("_timestamp"), + TTL("_ttl"); + + private final String name; + + MetaData(String name) { + this.name = name; + } + + public String getName() { + return name; + } + } + } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index 7b44f7d5a7f..12575c731ee 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -44,9 +44,9 @@ public final class Pipeline { /** * Modifies the data of a document to be indexed based on the processor this pipeline holds */ - public void execute(Data data) { + public void execute(IngestDocument ingestDocument) { for (Processor processor : processors) { - processor.execute(data); + processor.execute(ingestDocument); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java index fc268b2b128..6e7d276876c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java @@ -20,7 +20,7 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import java.io.Closeable; import java.io.IOException; @@ -36,7 +36,7 @@ public interface Processor { /** * Introspect and potentially modify the incoming data. */ - void execute(Data data); + void execute(IngestDocument ingestDocument); /** * Gets the type of a processor diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java index ead4e0a1b1c..6f2016fc5c5 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java @@ -19,7 +19,7 @@ package org.elasticsearch.ingest.processor.date; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; import org.joda.time.DateTime; @@ -56,8 +56,8 @@ public final class DateProcessor implements Processor { } @Override - public void execute(Data data) { - String value = data.getPropertyValue(matchField, String.class); + public void execute(IngestDocument ingestDocument) { + String value = ingestDocument.getPropertyValue(matchField, String.class); // TODO(talevy): handle custom timestamp fields DateTime dateTime = null; @@ -75,7 +75,7 @@ public final class DateProcessor implements Processor { throw new IllegalArgumentException("unable to parse date [" + value + "]", lastException); } - data.setPropertyValue(targetField, ISODateTimeFormat.dateTime().print(dateTime)); + ingestDocument.setPropertyValue(targetField, ISODateTimeFormat.dateTime().print(dateTime)); } @Override diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index 629a7b59837..400632e828c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -26,7 +26,7 @@ import com.maxmind.geoip2.model.CountryResponse; import com.maxmind.geoip2.record.*; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.network.NetworkAddress; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.Processor; import java.io.IOException; @@ -60,8 +60,8 @@ public final class GeoIpProcessor implements Processor { } @Override - public void execute(Data data) { - String ip = data.getPropertyValue(sourceField, String.class); + public void execute(IngestDocument ingestDocument) { + String ip = ingestDocument.getPropertyValue(sourceField, String.class); final InetAddress ipAddress; try { ipAddress = InetAddress.getByName(ip); @@ -88,7 +88,7 @@ public final class GeoIpProcessor implements Processor { default: throw new IllegalStateException("Unsupported database type [" + dbReader.getMetadata().getDatabaseType() + "]"); } - data.setPropertyValue(targetField, geoData); + ingestDocument.setPropertyValue(targetField, geoData); } @Override diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index 58b999c8f78..31440ad9e53 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -19,7 +19,7 @@ package org.elasticsearch.ingest.processor.grok; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; @@ -45,13 +45,13 @@ public final class GrokProcessor implements Processor { } @Override - public void execute(Data data) { - Object field = data.getPropertyValue(matchField, Object.class); + public void execute(IngestDocument ingestDocument) { + Object field = ingestDocument.getPropertyValue(matchField, Object.class); // TODO(talevy): handle invalid field types if (field instanceof String) { Map matches = grok.captures((String) field); if (matches != null) { - matches.forEach((k, v) -> data.setPropertyValue(k, v)); + matches.forEach((k, v) -> ingestDocument.setPropertyValue(k, v)); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java index 3ae1fb98cf2..ee121b61a4b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java @@ -20,7 +20,7 @@ package org.elasticsearch.ingest.processor.mutate; import org.elasticsearch.common.Booleans; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; @@ -101,36 +101,36 @@ public final class MutateProcessor implements Processor { } @Override - public void execute(Data data) { + public void execute(IngestDocument ingestDocument) { if (update != null) { - doUpdate(data); + doUpdate(ingestDocument); } if (rename != null) { - doRename(data); + doRename(ingestDocument); } if (convert != null) { - doConvert(data); + doConvert(ingestDocument); } if (split != null) { - doSplit(data); + doSplit(ingestDocument); } if (gsub != null) { - doGsub(data); + doGsub(ingestDocument); } if (join != null) { - doJoin(data); + doJoin(ingestDocument); } if (remove != null) { - doRemove(data); + doRemove(ingestDocument); } if (trim != null) { - doTrim(data); + doTrim(ingestDocument); } if (uppercase != null) { - doUppercase(data); + doUppercase(ingestDocument); } if (lowercase != null) { - doLowercase(data); + doLowercase(ingestDocument); } } @@ -139,18 +139,18 @@ public final class MutateProcessor implements Processor { return TYPE; } - private void doUpdate(Data data) { + private void doUpdate(IngestDocument ingestDocument) { for(Map.Entry entry : update.entrySet()) { - data.setPropertyValue(entry.getKey(), entry.getValue()); + ingestDocument.setPropertyValue(entry.getKey(), entry.getValue()); } } - private void doRename(Data data) { + private void doRename(IngestDocument ingestDocument) { for(Map.Entry entry : rename.entrySet()) { - if (data.hasPropertyValue(entry.getKey())) { - Object oldVal = data.getPropertyValue(entry.getKey(), Object.class); - data.getDocument().remove(entry.getKey()); - data.setPropertyValue(entry.getValue(), oldVal); + if (ingestDocument.hasPropertyValue(entry.getKey())) { + Object oldVal = ingestDocument.getPropertyValue(entry.getKey(), Object.class); + ingestDocument.getSource().remove(entry.getKey()); + ingestDocument.setPropertyValue(entry.getValue(), oldVal); } } } @@ -175,11 +175,11 @@ public final class MutateProcessor implements Processor { } @SuppressWarnings("unchecked") - private void doConvert(Data data) { + private void doConvert(IngestDocument ingestDocument) { for(Map.Entry entry : convert.entrySet()) { String toType = entry.getValue(); - Object oldVal = data.getPropertyValue(entry.getKey(), Object.class); + Object oldVal = ingestDocument.getPropertyValue(entry.getKey(), Object.class); Object newVal; if (oldVal instanceof List) { @@ -194,91 +194,91 @@ public final class MutateProcessor implements Processor { newVal = parseValueAsType(oldVal, toType); } - data.setPropertyValue(entry.getKey(), newVal); + ingestDocument.setPropertyValue(entry.getKey(), newVal); } } - private void doSplit(Data data) { + private void doSplit(IngestDocument ingestDocument) { for(Map.Entry entry : split.entrySet()) { - Object oldVal = data.getPropertyValue(entry.getKey(), Object.class); + Object oldVal = ingestDocument.getPropertyValue(entry.getKey(), Object.class); if (oldVal == null) { throw new IllegalArgumentException("Cannot split field. [" + entry.getKey() + "] is null."); } else if (oldVal instanceof String) { - data.setPropertyValue(entry.getKey(), Arrays.asList(((String) oldVal).split(entry.getValue()))); + ingestDocument.setPropertyValue(entry.getKey(), Arrays.asList(((String) oldVal).split(entry.getValue()))); } else { throw new IllegalArgumentException("Cannot split a field that is not a String type"); } } } - private void doGsub(Data data) { + private void doGsub(IngestDocument ingestDocument) { for (GsubExpression gsubExpression : gsub) { - String oldVal = data.getPropertyValue(gsubExpression.getFieldName(), String.class); + String oldVal = ingestDocument.getPropertyValue(gsubExpression.getFieldName(), String.class); if (oldVal == null) { throw new IllegalArgumentException("Field \"" + gsubExpression.getFieldName() + "\" is null, cannot match pattern."); } Matcher matcher = gsubExpression.getPattern().matcher(oldVal); String newVal = matcher.replaceAll(gsubExpression.getReplacement()); - data.setPropertyValue(gsubExpression.getFieldName(), newVal); + ingestDocument.setPropertyValue(gsubExpression.getFieldName(), newVal); } } @SuppressWarnings("unchecked") - private void doJoin(Data data) { + private void doJoin(IngestDocument ingestDocument) { for(Map.Entry entry : join.entrySet()) { - Object oldVal = data.getPropertyValue(entry.getKey(), Object.class); + Object oldVal = ingestDocument.getPropertyValue(entry.getKey(), Object.class); if (oldVal instanceof List) { String joined = (String) ((List) oldVal) .stream() .map(Object::toString) .collect(Collectors.joining(entry.getValue())); - data.setPropertyValue(entry.getKey(), joined); + ingestDocument.setPropertyValue(entry.getKey(), joined); } else { throw new IllegalArgumentException("Cannot join field:" + entry.getKey() + " with type: " + oldVal.getClass()); } } } - private void doRemove(Data data) { + private void doRemove(IngestDocument ingestDocument) { for(String field : remove) { - data.getDocument().remove(field); + ingestDocument.getSource().remove(field); } } - private void doTrim(Data data) { + private void doTrim(IngestDocument ingestDocument) { for(String field : trim) { - Object val = data.getPropertyValue(field, Object.class); + Object val = ingestDocument.getPropertyValue(field, Object.class); if (val == null) { throw new IllegalArgumentException("Cannot trim field. [" + field + "] is null."); } else if (val instanceof String) { - data.setPropertyValue(field, ((String) val).trim()); + ingestDocument.setPropertyValue(field, ((String) val).trim()); } else { throw new IllegalArgumentException("Cannot trim field:" + field + " with type: " + val.getClass()); } } } - private void doUppercase(Data data) { + private void doUppercase(IngestDocument ingestDocument) { for(String field : uppercase) { - Object val = data.getPropertyValue(field, Object.class); + Object val = ingestDocument.getPropertyValue(field, Object.class); if (val == null) { throw new IllegalArgumentException("Cannot uppercase field. [" + field + "] is null."); } else if (val instanceof String) { - data.setPropertyValue(field, ((String) val).toUpperCase(Locale.ROOT)); + ingestDocument.setPropertyValue(field, ((String) val).toUpperCase(Locale.ROOT)); } else { throw new IllegalArgumentException("Cannot uppercase field:" + field + " with type: " + val.getClass()); } } } - private void doLowercase(Data data) { + private void doLowercase(IngestDocument ingestDocument) { for(String field : lowercase) { - Object val = data.getPropertyValue(field, Object.class); + Object val = ingestDocument.getPropertyValue(field, Object.class); if (val == null) { throw new IllegalArgumentException("Cannot lowercase field. [" + field + "] is null."); } else if (val instanceof String) { - data.setPropertyValue(field, ((String) val).toLowerCase(Locale.ROOT)); + ingestDocument.setPropertyValue(field, ((String) val).toLowerCase(Locale.ROOT)); } else { throw new IllegalArgumentException("Cannot lowercase field:" + field + " with type: " + val.getClass()); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index 18d656813ec..82e3a403fd1 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.support.LoggerMessageFormat; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.threadpool.ThreadPool; @@ -40,7 +40,7 @@ public class PipelineExecutionService { this.threadPool = threadPool; } - public void execute(Data data, String pipelineId, Listener listener) { + public void execute(IngestDocument ingestDocument, String pipelineId, Listener listener) { Pipeline pipeline = store.get(pipelineId); if (pipeline == null) { listener.failed(new IllegalArgumentException(LoggerMessageFormat.format("pipeline with id [{}] does not exist", pipelineId))); @@ -51,8 +51,8 @@ public class PipelineExecutionService { @Override public void run() { try { - pipeline.execute(data); - listener.executed(data); + pipeline.execute(ingestDocument); + listener.executed(ingestDocument); } catch (Exception e) { listener.failed(e); } @@ -62,7 +62,7 @@ public class PipelineExecutionService { public interface Listener { - void executed(Data data); + void executed(IngestDocument ingestDocument); void failed(Exception e); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index 3483544a8ce..37099df9b5f 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -29,7 +29,7 @@ import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineExecutionService; @@ -82,12 +82,12 @@ public class IngestActionFilter extends AbstractComponent implements ActionFilte } Map sourceAsMap = indexRequest.sourceAsMap(); - Data data = new Data(indexRequest.index(), indexRequest.type(), indexRequest.id(), sourceAsMap); - executionService.execute(data, pipelineId, new PipelineExecutionService.Listener() { + IngestDocument ingestDocument = new IngestDocument(indexRequest.index(), indexRequest.type(), indexRequest.id(), sourceAsMap); + executionService.execute(ingestDocument, pipelineId, new PipelineExecutionService.Listener() { @Override - public void executed(Data data) { - if (data.isModified()) { - indexRequest.source(data.getDocument()); + public void executed(IngestDocument ingestDocument) { + if (ingestDocument.isModified()) { + indexRequest.source(ingestDocument.getSource()); } indexRequest.putHeader(IngestPlugin.PIPELINE_ALREADY_PROCESSED, true); chain.proceed(action, indexRequest, listener); @@ -115,12 +115,12 @@ public class IngestActionFilter extends AbstractComponent implements ActionFilte IndexRequest indexRequest = (IndexRequest) actionRequest; Map sourceAsMap = indexRequest.sourceAsMap(); - Data data = new Data(indexRequest.index(), indexRequest.type(), indexRequest.id(), sourceAsMap); - executionService.execute(data, pipelineId, new PipelineExecutionService.Listener() { + IngestDocument ingestDocument = new IngestDocument(indexRequest.index(), indexRequest.type(), indexRequest.id(), sourceAsMap); + executionService.execute(ingestDocument, pipelineId, new PipelineExecutionService.Listener() { @Override - public void executed(Data data) { - if (data.isModified()) { - indexRequest.source(data.getDocument()); + public void executed(IngestDocument ingestDocument) { + if (ingestDocument.isModified()) { + indexRequest.source(ingestDocument.getSource()); } processBulkIndexRequest(action, listener, chain, bulkRequest, pipelineId, requests); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java index cc9a8513e03..bb26161582a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java @@ -25,24 +25,28 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import java.io.IOException; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.ingest.IngestDocument.MetaData.ID; +import static org.elasticsearch.ingest.IngestDocument.MetaData.INDEX; +import static org.elasticsearch.ingest.IngestDocument.MetaData.TYPE; + public class TransportData implements Writeable, ToXContent { private static final TransportData PROTOTYPE = new TransportData(null); - private final Data data; + private final IngestDocument ingestDocument; - public TransportData(Data data) { - this.data = data; + public TransportData(IngestDocument ingestDocument) { + this.ingestDocument = ingestDocument; } - public Data get() { - return data; + public IngestDocument get() { + return ingestDocument; } public static TransportData readTransportDataFrom(StreamInput in) throws IOException { @@ -55,25 +59,25 @@ public class TransportData implements Writeable, ToXContent { String type = in.readString(); String id = in.readString(); Map doc = in.readMap(); - return new TransportData(new Data(index, type, id, doc)); + return new TransportData(new IngestDocument(index, type, id, doc)); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(data.getIndex()); - out.writeString(data.getType()); - out.writeString(data.getId()); - out.writeMap(data.getDocument()); + out.writeString(ingestDocument.getMetadata(INDEX)); + out.writeString(ingestDocument.getMetadata(TYPE)); + out.writeString(ingestDocument.getMetadata(ID)); + out.writeMap(ingestDocument.getSource()); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields.DOCUMENT); - builder.field(Fields.MODIFIED, data.isModified()); - builder.field(Fields.INDEX, data.getIndex()); - builder.field(Fields.TYPE, data.getType()); - builder.field(Fields.ID, data.getId()); - builder.field(Fields.SOURCE, data.getDocument()); + builder.field(Fields.MODIFIED, ingestDocument.isModified()); + builder.field(Fields.INDEX, ingestDocument.getMetadata(INDEX)); + builder.field(Fields.TYPE, ingestDocument.getMetadata(TYPE)); + builder.field(Fields.ID, ingestDocument.getMetadata(ID)); + builder.field(Fields.SOURCE, ingestDocument.getSource()); builder.endObject(); return builder; } @@ -87,12 +91,12 @@ public class TransportData implements Writeable, ToXContent { return false; } TransportData that = (TransportData) o; - return Objects.equals(data, that.data); + return Objects.equals(ingestDocument, that.ingestDocument); } @Override public int hashCode() { - return Objects.hash(data); + return Objects.hash(ingestDocument); } static final class Fields { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java index 50897435217..4f55ef8424d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.plugin.ingest.transport.simulate; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.plugin.ingest.PipelineStore; @@ -29,11 +29,11 @@ import java.util.*; import static org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequest.Fields; public class ParsedSimulateRequest { - private final List documents; + private final List documents; private final Pipeline pipeline; private final boolean verbose; - ParsedSimulateRequest(Pipeline pipeline, List documents, boolean verbose) { + ParsedSimulateRequest(Pipeline pipeline, List documents, boolean verbose) { this.pipeline = pipeline; this.documents = Collections.unmodifiableList(documents); this.verbose = verbose; @@ -43,7 +43,7 @@ public class ParsedSimulateRequest { return pipeline; } - public List getDocuments() { + public List getDocuments() { return documents; } @@ -55,18 +55,18 @@ public class ParsedSimulateRequest { private static final Pipeline.Factory PIPELINE_FACTORY = new Pipeline.Factory(); public static final String SIMULATED_PIPELINE_ID = "_simulate_pipeline"; - private List parseDocs(Map config) { + private List parseDocs(Map config) { List> docs = ConfigurationUtils.readList(config, Fields.DOCS); - List dataList = new ArrayList<>(); + List ingestDocumentList = new ArrayList<>(); for (Map dataMap : docs) { Map document = ConfigurationUtils.readMap(dataMap, Fields.SOURCE); - Data data = new Data(ConfigurationUtils.readStringProperty(dataMap, Fields.INDEX), + IngestDocument ingestDocument = new IngestDocument(ConfigurationUtils.readStringProperty(dataMap, Fields.INDEX), ConfigurationUtils.readStringProperty(dataMap, Fields.TYPE), ConfigurationUtils.readStringProperty(dataMap, Fields.ID), document); - dataList.add(data); + ingestDocumentList.add(ingestDocument); } - return dataList; + return ingestDocumentList; } public ParsedSimulateRequest parseWithPipelineId(String pipelineId, Map config, boolean verbose, PipelineStore pipelineStore) { @@ -74,16 +74,16 @@ public class ParsedSimulateRequest { throw new IllegalArgumentException("param [pipeline] is null"); } Pipeline pipeline = pipelineStore.get(pipelineId); - List dataList = parseDocs(config); - return new ParsedSimulateRequest(pipeline, dataList, verbose); + List ingestDocumentList = parseDocs(config); + return new ParsedSimulateRequest(pipeline, ingestDocumentList, verbose); } public ParsedSimulateRequest parse(Map config, boolean verbose, PipelineStore pipelineStore) throws IOException { Map pipelineConfig = ConfigurationUtils.readMap(config, Fields.PIPELINE); Pipeline pipeline = PIPELINE_FACTORY.create(SIMULATED_PIPELINE_ID, pipelineConfig, pipelineStore.getProcessorFactoryRegistry()); - List dataList = parseDocs(config); - return new ParsedSimulateRequest(pipeline, dataList, verbose); + List ingestDocumentList = parseDocs(config); + return new ParsedSimulateRequest(pipeline, ingestDocumentList, verbose); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java index 1783b10f998..8cf08e3dc61 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java @@ -22,20 +22,20 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.plugin.ingest.transport.TransportData; import java.io.IOException; public class SimulateDocumentSimpleResult implements SimulateDocumentResult { - private static final SimulateDocumentSimpleResult PROTOTYPE = new SimulateDocumentSimpleResult((Data)null); + private static final SimulateDocumentSimpleResult PROTOTYPE = new SimulateDocumentSimpleResult((IngestDocument)null); private TransportData data; private Exception failure; - public SimulateDocumentSimpleResult(Data data) { - this.data = new TransportData(data); + public SimulateDocumentSimpleResult(IngestDocument ingestDocument) { + this.data = new TransportData(ingestDocument); } private SimulateDocumentSimpleResult(TransportData data) { @@ -46,7 +46,7 @@ public class SimulateDocumentSimpleResult implements SimulateDocumentResult processorResultList = new ArrayList<>(); - Data currentData = new Data(data); + IngestDocument currentIngestDocument = new IngestDocument(ingestDocument); for (int i = 0; i < pipeline.getProcessors().size(); i++) { Processor processor = pipeline.getProcessors().get(i); String processorId = "processor[" + processor.getType() + "]-" + i; try { - processor.execute(currentData); - processorResultList.add(new SimulateProcessorResult(processorId, currentData)); + processor.execute(currentIngestDocument); + processorResultList.add(new SimulateProcessorResult(processorId, currentIngestDocument)); } catch (Exception e) { processorResultList.add(new SimulateProcessorResult(processorId, e)); } - currentData = new Data(currentData); + currentIngestDocument = new IngestDocument(currentIngestDocument); } return new SimulateDocumentVerboseResult(processorResultList); } @@ -73,11 +73,11 @@ public class SimulateExecutionService { @Override public void run() { List responses = new ArrayList<>(); - for (Data data : request.getDocuments()) { + for (IngestDocument ingestDocument : request.getDocuments()) { if (request.isVerbose()) { - responses.add(executeVerboseItem(request.getPipeline(), data)); + responses.add(executeVerboseItem(request.getPipeline(), ingestDocument)); } else { - responses.add(executeItem(request.getPipeline(), data)); + responses.add(executeItem(request.getPipeline(), ingestDocument)); } } listener.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(), request.isVerbose(), responses)); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java index 138f1ae553c..27d2065848c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java @@ -25,22 +25,22 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.plugin.ingest.transport.TransportData; import java.io.IOException; public class SimulateProcessorResult implements Writeable, ToXContent { - private static final SimulateProcessorResult PROTOTYPE = new SimulateProcessorResult(null, (Data)null); + private static final SimulateProcessorResult PROTOTYPE = new SimulateProcessorResult(null, (IngestDocument)null); private String processorId; private TransportData data; private Exception failure; - public SimulateProcessorResult(String processorId, Data data) { + public SimulateProcessorResult(String processorId, IngestDocument ingestDocument) { this.processorId = processorId; - this.data = new TransportData(data); + this.data = new TransportData(ingestDocument); } private SimulateProcessorResult(String processorId, TransportData data) { @@ -53,7 +53,7 @@ public class SimulateProcessorResult implements Writeable a = (Map) data.getDocument().get("a"); + ingestDocument.setPropertyValue("a.b.c.d", "foo"); + assertThat(ingestDocument.getSource().get("a"), instanceOf(Map.class)); + Map a = (Map) ingestDocument.getSource().get("a"); assertThat(a.get("b"), instanceOf(Map.class)); Map b = (Map) a.get("b"); assertThat(b.get("c"), instanceOf(Map.class)); @@ -138,110 +138,110 @@ public class DataTests extends ESTestCase { assertThat(c.get("d"), instanceOf(String.class)); String d = (String) c.get("d"); assertThat(d, equalTo("foo")); - assertThat(data.isModified(), equalTo(true)); + assertThat(ingestDocument.isModified(), equalTo(true)); } public void testSetPropertyValueOnExistingField() { - data.setPropertyValue("foo", "newbar"); - assertThat(data.getDocument().get("foo"), equalTo("newbar")); + ingestDocument.setPropertyValue("foo", "newbar"); + assertThat(ingestDocument.getSource().get("foo"), equalTo("newbar")); } @SuppressWarnings("unchecked") public void testSetPropertyValueOnExistingParent() { - data.setPropertyValue("fizz.new", "bar"); - assertThat(data.getDocument().get("fizz"), instanceOf(Map.class)); - Map innerMap = (Map) data.getDocument().get("fizz"); + ingestDocument.setPropertyValue("fizz.new", "bar"); + assertThat(ingestDocument.getSource().get("fizz"), instanceOf(Map.class)); + Map innerMap = (Map) ingestDocument.getSource().get("fizz"); assertThat(innerMap.get("new"), instanceOf(String.class)); String value = (String) innerMap.get("new"); assertThat(value, equalTo("bar")); - assertThat(data.isModified(), equalTo(true)); + assertThat(ingestDocument.isModified(), equalTo(true)); } public void testSetPropertyValueOnExistingParentTypeMismatch() { try { - data.setPropertyValue("fizz.buzz.new", "bar"); + ingestDocument.setPropertyValue("fizz.buzz.new", "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add field to parent [buzz] of type [java.lang.String], [java.util.Map] expected instead.")); - assertThat(data.isModified(), equalTo(false)); + assertThat(ingestDocument.isModified(), equalTo(false)); } } public void testSetPropertyValueOnExistingNullParent() { try { - data.setPropertyValue("fizz.foo_null.test", "bar"); + ingestDocument.setPropertyValue("fizz.foo_null.test", "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add field to null parent, [java.util.Map] expected instead.")); - assertThat(data.isModified(), equalTo(false)); + assertThat(ingestDocument.isModified(), equalTo(false)); } } public void testSetPropertyValueNullName() { try { - data.setPropertyValue(null, "bar"); + ingestDocument.setPropertyValue(null, "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add null or empty field")); - assertThat(data.isModified(), equalTo(false)); + assertThat(ingestDocument.isModified(), equalTo(false)); } } public void testSetPropertyValueEmptyName() { try { - data.setPropertyValue("", "bar"); + ingestDocument.setPropertyValue("", "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add null or empty field")); - assertThat(data.isModified(), equalTo(false)); + assertThat(ingestDocument.isModified(), equalTo(false)); } } public void testRemoveProperty() { - data.removeProperty("foo"); - assertThat(data.isModified(), equalTo(true)); - assertThat(data.getDocument().size(), equalTo(2)); - assertThat(data.getDocument().containsKey("foo"), equalTo(false)); + ingestDocument.removeProperty("foo"); + assertThat(ingestDocument.isModified(), equalTo(true)); + assertThat(ingestDocument.getSource().size(), equalTo(2)); + assertThat(ingestDocument.getSource().containsKey("foo"), equalTo(false)); } public void testRemoveInnerProperty() { - data.removeProperty("fizz.buzz"); - assertThat(data.getDocument().size(), equalTo(3)); - assertThat(data.getDocument().get("fizz"), instanceOf(Map.class)); + ingestDocument.removeProperty("fizz.buzz"); + assertThat(ingestDocument.getSource().size(), equalTo(3)); + assertThat(ingestDocument.getSource().get("fizz"), instanceOf(Map.class)); @SuppressWarnings("unchecked") - Map map = (Map)data.getDocument().get("fizz"); + Map map = (Map) ingestDocument.getSource().get("fizz"); assertThat(map.size(), equalTo(1)); assertThat(map.containsKey("buzz"), equalTo(false)); - data.removeProperty("fizz.foo_null"); + ingestDocument.removeProperty("fizz.foo_null"); assertThat(map.size(), equalTo(0)); - assertThat(data.getDocument().size(), equalTo(3)); - assertThat(data.getDocument().containsKey("fizz"), equalTo(true)); - assertThat(data.isModified(), equalTo(true)); + assertThat(ingestDocument.getSource().size(), equalTo(3)); + assertThat(ingestDocument.getSource().containsKey("fizz"), equalTo(true)); + assertThat(ingestDocument.isModified(), equalTo(true)); } public void testRemoveNonExistingProperty() { - data.removeProperty("does_not_exist"); - assertThat(data.isModified(), equalTo(false)); - assertThat(data.getDocument().size(), equalTo(3)); + ingestDocument.removeProperty("does_not_exist"); + assertThat(ingestDocument.isModified(), equalTo(false)); + assertThat(ingestDocument.getSource().size(), equalTo(3)); } public void testRemoveExistingParentTypeMismatch() { - data.removeProperty("foo.test"); - assertThat(data.isModified(), equalTo(false)); - assertThat(data.getDocument().size(), equalTo(3)); + ingestDocument.removeProperty("foo.test"); + assertThat(ingestDocument.isModified(), equalTo(false)); + assertThat(ingestDocument.getSource().size(), equalTo(3)); } public void testRemoveNullProperty() { - data.removeProperty(null); - assertThat(data.isModified(), equalTo(false)); - assertThat(data.getDocument().size(), equalTo(3)); + ingestDocument.removeProperty(null); + assertThat(ingestDocument.isModified(), equalTo(false)); + assertThat(ingestDocument.getSource().size(), equalTo(3)); } public void testRemoveEmptyProperty() { - data.removeProperty(""); - assertThat(data.isModified(), equalTo(false)); - assertThat(data.getDocument().size(), equalTo(3)); + ingestDocument.removeProperty(""); + assertThat(ingestDocument.isModified(), equalTo(false)); + assertThat(ingestDocument.getSource().size(), equalTo(3)); } public void testEqualsAndHashcode() throws Exception { @@ -250,7 +250,7 @@ public class DataTests extends ESTestCase { String id = randomAsciiOfLengthBetween(1, 10); String fieldName = randomAsciiOfLengthBetween(1, 10); String fieldValue = randomAsciiOfLengthBetween(1, 10); - Data data = new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue)); + IngestDocument ingestDocument = new IngestDocument(index, type, id, Collections.singletonMap(fieldName, fieldValue)); boolean changed = false; String otherIndex; @@ -282,16 +282,16 @@ public class DataTests extends ESTestCase { document = Collections.singletonMap(fieldName, fieldValue); } - Data otherData = new Data(otherIndex, otherType, otherId, document); + IngestDocument otherIngestDocument = new IngestDocument(otherIndex, otherType, otherId, document); if (changed) { - assertThat(data, not(equalTo(otherData))); - assertThat(otherData, not(equalTo(data))); + assertThat(ingestDocument, not(equalTo(otherIngestDocument))); + assertThat(otherIngestDocument, not(equalTo(ingestDocument))); } else { - assertThat(data, equalTo(otherData)); - assertThat(otherData, equalTo(data)); - Data thirdData = new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue)); - assertThat(thirdData, equalTo(data)); - assertThat(data, equalTo(thirdData)); + assertThat(ingestDocument, equalTo(otherIngestDocument)); + assertThat(otherIngestDocument, equalTo(ingestDocument)); + IngestDocument thirdIngestDocument = new IngestDocument(index, type, id, Collections.singletonMap(fieldName, fieldValue)); + assertThat(thirdIngestDocument, equalTo(ingestDocument)); + assertThat(ingestDocument, equalTo(thirdIngestDocument)); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 9b61ffa4fe3..a14ca5a31cf 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -106,8 +106,8 @@ public class IngestClientIT extends ESIntegTestCase { assertThat(response.getResults().size(), equalTo(1)); assertThat(response.getResults().get(0), instanceOf(SimulateDocumentSimpleResult.class)); SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) response.getResults().get(0); - Data expectedData = new Data("index", "type", "id", Collections.singletonMap("foo", "bar")); - assertThat(simulateDocumentSimpleResult.getData(), equalTo(expectedData)); + IngestDocument expectedIngestDocument = new IngestDocument("index", "type", "id", Collections.singletonMap("foo", "bar")); + assertThat(simulateDocumentSimpleResult.getData(), equalTo(expectedIngestDocument)); assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java index 2a9c444d691..8b8c1da25f1 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.ingest.processor.date; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -36,9 +36,9 @@ public class DateProcessorTests extends ESTestCase { "date_as_string", Collections.singletonList("yyyy dd MM hh:mm:ss"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 06 11:05:15"); - Data data = new Data("index", "type", "id", document); - dateProcessor.execute(data); - assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T11:05:15.000+02:00")); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); + dateProcessor.execute(ingestDocument); + assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T11:05:15.000+02:00")); } public void testJodaPatternMultipleFormats() { @@ -51,27 +51,27 @@ public class DateProcessorTests extends ESTestCase { Map document = new HashMap<>(); document.put("date_as_string", "2010 12 06"); - Data data = new Data("index", "type", "id", document); - dateProcessor.execute(data); - assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); + dateProcessor.execute(ingestDocument); + assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "12/06/2010"); - data = new Data("index", "type", "id", document); - dateProcessor.execute(data); - assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); + ingestDocument = new IngestDocument("index", "type", "id", document); + dateProcessor.execute(ingestDocument); + assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "12-06-2010"); - data = new Data("index", "type", "id", document); - dateProcessor.execute(data); - assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); + ingestDocument = new IngestDocument("index", "type", "id", document); + dateProcessor.execute(ingestDocument); + assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "2010"); - data = new Data("index", "type", "id", document); + ingestDocument = new IngestDocument("index", "type", "id", document); try { - dateProcessor.execute(data); + dateProcessor.execute(ingestDocument); fail("processor should have failed due to not supported date format"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), containsString("unable to parse date [2010]")); @@ -83,9 +83,9 @@ public class DateProcessorTests extends ESTestCase { "date_as_string", Collections.singletonList("yyyy dd MMM"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 giugno"); - Data data = new Data("index", "type", "id", document); - dateProcessor.execute(data); - assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); + dateProcessor.execute(ingestDocument); + assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); } public void testJodaPatternDefaultYear() { @@ -93,9 +93,9 @@ public class DateProcessorTests extends ESTestCase { "date_as_string", Collections.singletonList("dd/MM"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "12/06"); - Data data = new Data("index", "type", "id", document); - dateProcessor.execute(data); - assertThat(data.getPropertyValue("date_as_date", String.class), equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00")); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); + dateProcessor.execute(ingestDocument); + assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00")); } public void testTAI64N() { @@ -104,9 +104,9 @@ public class DateProcessorTests extends ESTestCase { Map document = new HashMap<>(); String dateAsString = (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024"; document.put("date_as_string", dateAsString); - Data data = new Data("index", "type", "id", document); - dateProcessor.execute(data); - assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("2012-12-22T03:00:46.767+02:00")); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); + dateProcessor.execute(ingestDocument); + assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("2012-12-22T03:00:46.767+02:00")); } public void testUnixMs() { @@ -114,9 +114,9 @@ public class DateProcessorTests extends ESTestCase { "date_as_string", Collections.singletonList(DateParserFactory.UNIX_MS), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "1000500"); - Data data = new Data("index", "type", "id", document); - dateProcessor.execute(data); - assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); + dateProcessor.execute(ingestDocument); + assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); } public void testUnix() { @@ -124,8 +124,8 @@ public class DateProcessorTests extends ESTestCase { "date_as_string", Collections.singletonList(DateParserFactory.UNIX), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "1000.5"); - Data data = new Data("index", "type", "id", document); - dateProcessor.execute(data); - assertThat(data.getPropertyValue("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); + dateProcessor.execute(ingestDocument); + assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java index c93b4b78f1a..86de0a7862a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.ingest.processor.geoip; import com.maxmind.geoip2.DatabaseReader; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.test.ESTestCase; import java.io.InputStream; @@ -38,13 +38,13 @@ public class GeoIpProcessorTests extends ESTestCase { Map document = new HashMap<>(); document.put("source_field", "82.170.213.79"); - Data data = new Data("_index", "_type", "_id", document); - processor.execute(data); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", document); + processor.execute(ingestDocument); - assertThat(data.getDocument().size(), equalTo(2)); - assertThat(data.getDocument().get("source_field"), equalTo("82.170.213.79")); + assertThat(ingestDocument.getSource().size(), equalTo(2)); + assertThat(ingestDocument.getSource().get("source_field"), equalTo("82.170.213.79")); @SuppressWarnings("unchecked") - Map geoData = (Map) data.getDocument().get("target_field"); + Map geoData = (Map) ingestDocument.getSource().get("target_field"); assertThat(geoData.size(), equalTo(10)); assertThat(geoData.get("ip"), equalTo("82.170.213.79")); assertThat(geoData.get("country_iso_code"), equalTo("NL")); @@ -64,13 +64,13 @@ public class GeoIpProcessorTests extends ESTestCase { Map document = new HashMap<>(); document.put("source_field", "82.170.213.79"); - Data data = new Data("_index", "_type", "_id", document); - processor.execute(data); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", document); + processor.execute(ingestDocument); - assertThat(data.getDocument().size(), equalTo(2)); - assertThat(data.getDocument().get("source_field"), equalTo("82.170.213.79")); + assertThat(ingestDocument.getSource().size(), equalTo(2)); + assertThat(ingestDocument.getSource().get("source_field"), equalTo("82.170.213.79")); @SuppressWarnings("unchecked") - Map geoData = (Map) data.getDocument().get("target_field"); + Map geoData = (Map) ingestDocument.getSource().get("target_field"); assertThat(geoData.size(), equalTo(4)); assertThat(geoData.get("ip"), equalTo("82.170.213.79")); assertThat(geoData.get("country_iso_code"), equalTo("NL")); @@ -84,10 +84,10 @@ public class GeoIpProcessorTests extends ESTestCase { Map document = new HashMap<>(); document.put("source_field", "202.45.11.11"); - Data data = new Data("_index", "_type", "_id", document); - processor.execute(data); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", document); + processor.execute(ingestDocument); @SuppressWarnings("unchecked") - Map geoData = (Map) data.getDocument().get("target_field"); + Map geoData = (Map) ingestDocument.getSource().get("target_field"); assertThat(geoData.size(), equalTo(0)); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java index d026b3d8fbc..9231a5db2be 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.ingest.processor.mutate; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.nullValue; public class MutateProcessorTests extends ESTestCase { - private Data data; + private IngestDocument ingestDocument; @Before public void setData() { @@ -49,35 +49,35 @@ public class MutateProcessorTests extends ESTestCase { fizz.put("buzz", "hello world"); document.put("fizz", fizz); - data = new Data("index", "type", "id", document); + ingestDocument = new IngestDocument("index", "type", "id", document); } public void testUpdate() throws IOException { Map update = new HashMap<>(); update.put("foo", 123); Processor processor = new MutateProcessor(update, null, null, null, null, null, null, null, null, null); - processor.execute(data); - assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getPropertyValue("foo", Integer.class), equalTo(123)); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSource().size(), equalTo(7)); + assertThat(ingestDocument.getPropertyValue("foo", Integer.class), equalTo(123)); } public void testRename() throws IOException { Map rename = new HashMap<>(); rename.put("foo", "bar"); Processor processor = new MutateProcessor(null, rename, null, null, null, null, null, null, null, null); - processor.execute(data); - assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getPropertyValue("bar", String.class), equalTo("bar")); - assertThat(data.hasPropertyValue("foo"), is(false)); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSource().size(), equalTo(7)); + assertThat(ingestDocument.getPropertyValue("bar", String.class), equalTo("bar")); + assertThat(ingestDocument.hasPropertyValue("foo"), is(false)); } public void testConvert() throws IOException { Map convert = new HashMap<>(); convert.put("num", "integer"); Processor processor = new MutateProcessor(null, null, convert, null, null, null, null, null, null, null); - processor.execute(data); - assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getPropertyValue("num", Integer.class), equalTo(64)); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSource().size(), equalTo(7)); + assertThat(ingestDocument.getPropertyValue("num", Integer.class), equalTo(64)); } public void testConvertNullField() throws IOException { @@ -85,7 +85,7 @@ public class MutateProcessorTests extends ESTestCase { convert.put("null", "integer"); Processor processor = new MutateProcessor(null, null, convert, null, null, null, null, null, null, null); try { - processor.execute(data); + processor.execute(ingestDocument); fail("processor execute should have failed"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("Field \"null\" is null, cannot be converted to a/an integer")); @@ -96,18 +96,18 @@ public class MutateProcessorTests extends ESTestCase { Map convert = new HashMap<>(); convert.put("arr", "integer"); Processor processor = new MutateProcessor(null, null, convert, null, null, null, null, null, null, null); - processor.execute(data); - assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getPropertyValue("arr", List.class), equalTo(Arrays.asList(1, 2, 3))); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSource().size(), equalTo(7)); + assertThat(ingestDocument.getPropertyValue("arr", List.class), equalTo(Arrays.asList(1, 2, 3))); } public void testSplit() throws IOException { Map split = new HashMap<>(); split.put("ip", "\\."); Processor processor = new MutateProcessor(null, null, null, split, null, null, null, null, null, null); - processor.execute(data); - assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getPropertyValue("ip", List.class), equalTo(Arrays.asList("127", "0", "0", "1"))); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSource().size(), equalTo(7)); + assertThat(ingestDocument.getPropertyValue("ip", List.class), equalTo(Arrays.asList("127", "0", "0", "1"))); } public void testSplitNullValue() throws IOException { @@ -115,7 +115,7 @@ public class MutateProcessorTests extends ESTestCase { split.put("not.found", "\\."); Processor processor = new MutateProcessor(null, null, null, split, null, null, null, null, null, null); try { - processor.execute(data); + processor.execute(ingestDocument); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("Cannot split field. [not.found] is null.")); @@ -125,16 +125,16 @@ public class MutateProcessorTests extends ESTestCase { public void testGsub() throws IOException { List gsubExpressions = Collections.singletonList(new GsubExpression("ip", Pattern.compile("\\."), "-")); Processor processor = new MutateProcessor(null, null, null, null, gsubExpressions, null, null, null, null, null); - processor.execute(data); - assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getPropertyValue("ip", String.class), equalTo("127-0-0-1")); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSource().size(), equalTo(7)); + assertThat(ingestDocument.getPropertyValue("ip", String.class), equalTo("127-0-0-1")); } public void testGsub_NullValue() throws IOException { List gsubExpressions = Collections.singletonList(new GsubExpression("null_field", Pattern.compile("\\."), "-")); Processor processor = new MutateProcessor(null, null, null, null, gsubExpressions, null, null, null, null, null); try { - processor.execute(data); + processor.execute(ingestDocument); fail("processor execution should have failed"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("Field \"null_field\" is null, cannot match pattern.")); @@ -145,34 +145,34 @@ public class MutateProcessorTests extends ESTestCase { HashMap join = new HashMap<>(); join.put("arr", "-"); Processor processor = new MutateProcessor(null, null, null, null, null, join, null, null, null, null); - processor.execute(data); - assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getPropertyValue("arr", String.class), equalTo("1-2-3")); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSource().size(), equalTo(7)); + assertThat(ingestDocument.getPropertyValue("arr", String.class), equalTo("1-2-3")); } public void testRemove() throws IOException { List remove = Arrays.asList("foo", "ip"); Processor processor = new MutateProcessor(null, null, null, null, null, null, remove, null, null, null); - processor.execute(data); - assertThat(data.getDocument().size(), equalTo(5)); - assertThat(data.getPropertyValue("foo", Object.class), nullValue()); - assertThat(data.getPropertyValue("ip", Object.class), nullValue()); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSource().size(), equalTo(5)); + assertThat(ingestDocument.getPropertyValue("foo", Object.class), nullValue()); + assertThat(ingestDocument.getPropertyValue("ip", Object.class), nullValue()); } public void testTrim() throws IOException { List trim = Arrays.asList("to_strip", "foo"); Processor processor = new MutateProcessor(null, null, null, null, null, null, null, trim, null, null); - processor.execute(data); - assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getPropertyValue("foo", String.class), equalTo("bar")); - assertThat(data.getPropertyValue("to_strip", String.class), equalTo("clean")); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSource().size(), equalTo(7)); + assertThat(ingestDocument.getPropertyValue("foo", String.class), equalTo("bar")); + assertThat(ingestDocument.getPropertyValue("to_strip", String.class), equalTo("clean")); } public void testTrimNullValue() throws IOException { List trim = Collections.singletonList("not.found"); Processor processor = new MutateProcessor(null, null, null, null, null, null, null, trim, null, null); try { - processor.execute(data); + processor.execute(ingestDocument); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("Cannot trim field. [not.found] is null.")); @@ -182,16 +182,16 @@ public class MutateProcessorTests extends ESTestCase { public void testUppercase() throws IOException { List uppercase = Collections.singletonList("foo"); Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null); - processor.execute(data); - assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getPropertyValue("foo", String.class), equalTo("BAR")); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSource().size(), equalTo(7)); + assertThat(ingestDocument.getPropertyValue("foo", String.class), equalTo("BAR")); } public void testUppercaseNullValue() throws IOException { List uppercase = Collections.singletonList("not.found"); Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null); try { - processor.execute(data); + processor.execute(ingestDocument); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("Cannot uppercase field. [not.found] is null.")); @@ -201,16 +201,16 @@ public class MutateProcessorTests extends ESTestCase { public void testLowercase() throws IOException { List lowercase = Collections.singletonList("alpha"); Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, null, lowercase); - processor.execute(data); - assertThat(data.getDocument().size(), equalTo(7)); - assertThat(data.getPropertyValue("alpha", String.class), equalTo("abcd")); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSource().size(), equalTo(7)); + assertThat(ingestDocument.getPropertyValue("alpha", String.class), equalTo("abcd")); } public void testLowercaseNullValue() throws IOException { List lowercase = Collections.singletonList("not.found"); Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, null, lowercase); try { - processor.execute(data); + processor.execute(ingestDocument); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("Cannot lowercase field. [not.found] is null.")); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index f970f24b237..3f9ec3517e6 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; @@ -59,25 +59,25 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testExecute_pipelineDoesNotExist() { when(store.get("_id")).thenReturn(null); - Data data = new Data("_index", "_type", "_id", Collections.emptyMap()); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", Collections.emptyMap()); PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); - executionService.execute(data, "_id", listener); + executionService.execute(ingestDocument, "_id", listener); verify(listener).failed(any(IllegalArgumentException.class)); - verify(listener, times(0)).executed(data); + verify(listener, times(0)).executed(ingestDocument); } public void testExecute_success() throws Exception { Processor processor = mock(Processor.class); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(processor))); - Data data = new Data("_index", "_type", "_id", Collections.emptyMap()); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", Collections.emptyMap()); PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); - executionService.execute(data, "_id", listener); + executionService.execute(ingestDocument, "_id", listener); assertBusy(new Runnable() { @Override public void run() { - verify(processor).execute(data); - verify(listener).executed(data); + verify(processor).execute(ingestDocument); + verify(listener).executed(ingestDocument); verify(listener, times(0)).failed(any(Exception.class)); } }); @@ -86,15 +86,15 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testExecute_failure() throws Exception { Processor processor = mock(Processor.class); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(processor))); - Data data = new Data("_index", "_type", "_id", Collections.emptyMap()); - doThrow(new RuntimeException()).when(processor).execute(data); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", Collections.emptyMap()); + doThrow(new RuntimeException()).when(processor).execute(ingestDocument); PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); - executionService.execute(data, "_id", listener); + executionService.execute(ingestDocument, "_id", listener); assertBusy(new Runnable() { @Override public void run() { - verify(processor).execute(data); - verify(listener, times(0)).executed(data); + verify(processor).execute(ingestDocument); + verify(listener, times(0)).executed(ingestDocument); verify(listener).failed(any(RuntimeException.class)); } }); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index c0ff79b056f..cee6b1b1d78 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -27,7 +27,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.ingest.processor.mutate.MutateProcessor; @@ -80,7 +80,7 @@ public class IngestActionFilterTests extends ESTestCase { filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(Data.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verify(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); verifyZeroInteractions(actionFilterChain); } @@ -93,7 +93,7 @@ public class IngestActionFilterTests extends ESTestCase { filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(Data.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verify(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); verifyZeroInteractions(actionFilterChain); } @@ -121,16 +121,16 @@ public class IngestActionFilterTests extends ESTestCase { Answer answer = new Answer() { @Override public Object answer(InvocationOnMock invocationOnMock) throws Throwable { - Data data = (Data) invocationOnMock.getArguments()[0]; + IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; PipelineExecutionService.Listener listener = (PipelineExecutionService.Listener) invocationOnMock.getArguments()[2]; - listener.executed(data); + listener.executed(ingestDocument); return null; } }; - doAnswer(answer).when(executionService).execute(any(Data.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + doAnswer(answer).when(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(Data.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verify(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); verify(actionFilterChain).proceed("_action", indexRequest, actionListener); verifyZeroInteractions(actionListener); } @@ -151,10 +151,10 @@ public class IngestActionFilterTests extends ESTestCase { return null; } }; - doAnswer(answer).when(executionService).execute(any(Data.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + doAnswer(answer).when(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(Data.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verify(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); verify(actionListener).onFailure(exception); verifyZeroInteractions(actionFilterChain); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java index 1cc3f6baada..165ea62e1f2 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.plugin.ingest.transport; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -39,7 +39,7 @@ public class TransportDataTests extends ESTestCase { String id = randomAsciiOfLengthBetween(1, 10); String fieldName = randomAsciiOfLengthBetween(1, 10); String fieldValue = randomAsciiOfLengthBetween(1, 10); - TransportData transportData = new TransportData(new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue))); + TransportData transportData = new TransportData(new IngestDocument(index, type, id, Collections.singletonMap(fieldName, fieldValue))); boolean changed = false; String otherIndex; @@ -71,23 +71,23 @@ public class TransportDataTests extends ESTestCase { document = Collections.singletonMap(fieldName, fieldValue); } - TransportData otherTransportData = new TransportData(new Data(otherIndex, otherType, otherId, document)); + TransportData otherTransportData = new TransportData(new IngestDocument(otherIndex, otherType, otherId, document)); if (changed) { assertThat(transportData, not(equalTo(otherTransportData))); assertThat(otherTransportData, not(equalTo(transportData))); } else { assertThat(transportData, equalTo(otherTransportData)); assertThat(otherTransportData, equalTo(transportData)); - TransportData thirdTransportData = new TransportData(new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue))); + TransportData thirdTransportData = new TransportData(new IngestDocument(index, type, id, Collections.singletonMap(fieldName, fieldValue))); assertThat(thirdTransportData, equalTo(transportData)); assertThat(transportData, equalTo(thirdTransportData)); } } public void testSerialization() throws IOException { - Data data = new Data(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), + IngestDocument ingestDocument = new IngestDocument(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); - TransportData transportData = new TransportData(data); + TransportData transportData = new TransportData(ingestDocument); BytesStreamOutput out = new BytesStreamOutput(); transportData.writeTo(out); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java index 7f44fc08b9b..29c4faa17f7 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.plugin.ingest.transport.simulate; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.plugin.ingest.PipelineStore; @@ -29,6 +29,9 @@ import org.junit.Before; import java.io.IOException; import java.util.*; +import static org.elasticsearch.ingest.IngestDocument.MetaData.ID; +import static org.elasticsearch.ingest.IngestDocument.MetaData.INDEX; +import static org.elasticsearch.ingest.IngestDocument.MetaData.TYPE; import static org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequest.Fields; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; @@ -80,12 +83,12 @@ public class ParsedSimulateRequestParserTests extends ESTestCase { assertThat(actualRequest.isVerbose(), equalTo(false)); assertThat(actualRequest.getDocuments().size(), equalTo(numDocs)); Iterator> expectedDocsIterator = expectedDocs.iterator(); - for (Data data : actualRequest.getDocuments()) { + for (IngestDocument ingestDocument : actualRequest.getDocuments()) { Map expectedDocument = expectedDocsIterator.next(); - assertThat(data.getDocument(), equalTo(expectedDocument.get(Fields.SOURCE))); - assertThat(data.getIndex(), equalTo(expectedDocument.get(Fields.INDEX))); - assertThat(data.getType(), equalTo(expectedDocument.get(Fields.TYPE))); - assertThat(data.getId(), equalTo(expectedDocument.get(Fields.ID))); + assertThat(ingestDocument.getSource(), equalTo(expectedDocument.get(Fields.SOURCE))); + assertThat(ingestDocument.getMetadata(INDEX), equalTo(expectedDocument.get(Fields.INDEX))); + assertThat(ingestDocument.getMetadata(TYPE), equalTo(expectedDocument.get(Fields.TYPE))); + assertThat(ingestDocument.getMetadata(ID), equalTo(expectedDocument.get(Fields.ID))); } assertThat(actualRequest.getPipeline().getId(), equalTo(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID)); @@ -133,12 +136,12 @@ public class ParsedSimulateRequestParserTests extends ESTestCase { assertThat(actualRequest.isVerbose(), equalTo(false)); assertThat(actualRequest.getDocuments().size(), equalTo(numDocs)); Iterator> expectedDocsIterator = expectedDocs.iterator(); - for (Data data : actualRequest.getDocuments()) { + for (IngestDocument ingestDocument : actualRequest.getDocuments()) { Map expectedDocument = expectedDocsIterator.next(); - assertThat(data.getDocument(), equalTo(expectedDocument.get(Fields.SOURCE))); - assertThat(data.getIndex(), equalTo(expectedDocument.get(Fields.INDEX))); - assertThat(data.getType(), equalTo(expectedDocument.get(Fields.TYPE))); - assertThat(data.getId(), equalTo(expectedDocument.get(Fields.ID))); + assertThat(ingestDocument.getSource(), equalTo(expectedDocument.get(Fields.SOURCE))); + assertThat(ingestDocument.getMetadata(INDEX), equalTo(expectedDocument.get(Fields.INDEX))); + assertThat(ingestDocument.getMetadata(TYPE), equalTo(expectedDocument.get(Fields.TYPE))); + assertThat(ingestDocument.getMetadata(ID), equalTo(expectedDocument.get(Fields.ID))); } assertThat(actualRequest.getPipeline().getId(), equalTo(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID)); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java index e0b1c1c0f88..183aed0a5a4 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -38,9 +38,9 @@ public class SimulateDocumentSimpleResultTests extends ESTestCase { if (isFailure) { simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(new IllegalArgumentException("test")); } else { - Data data = new Data(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), + IngestDocument ingestDocument = new IngestDocument(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); - simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(data); + simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(ingestDocument); } BytesStreamOutput out = new BytesStreamOutput(); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java index bd2e6397a74..901ecb29dca 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; @@ -40,7 +40,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { private SimulateExecutionService executionService; private Pipeline pipeline; private Processor processor; - private Data data; + private IngestDocument ingestDocument; @Before public void setup() { @@ -53,7 +53,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { processor = mock(Processor.class); when(processor.getType()).thenReturn("mock"); pipeline = new Pipeline("_id", "_description", Arrays.asList(processor, processor)); - data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); + ingestDocument = new IngestDocument("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); } @After @@ -62,35 +62,35 @@ public class SimulateExecutionServiceTests extends ESTestCase { } public void testExecuteVerboseItem() throws Exception { - SimulateDocumentResult actualItemResponse = executionService.executeVerboseItem(pipeline, data); - verify(processor, times(2)).execute(data); + SimulateDocumentResult actualItemResponse = executionService.executeVerboseItem(pipeline, ingestDocument); + verify(processor, times(2)).execute(ingestDocument); assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(2)); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getProcessorId(), equalTo("processor[mock]-0")); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getData(), not(sameInstance(data))); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getData(), equalTo(data)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getData(), not(sameInstance(ingestDocument))); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getData(), equalTo(ingestDocument)); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(), nullValue()); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorId(), equalTo("processor[mock]-1")); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), not(sameInstance(data))); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), equalTo(data)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), not(sameInstance(ingestDocument))); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), equalTo(ingestDocument)); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getFailure(), nullValue()); } public void testExecuteItem() throws Exception { - SimulateDocumentResult actualItemResponse = executionService.executeItem(pipeline, data); - verify(processor, times(2)).execute(data); + SimulateDocumentResult actualItemResponse = executionService.executeItem(pipeline, ingestDocument); + verify(processor, times(2)).execute(ingestDocument); assertThat(actualItemResponse, instanceOf(SimulateDocumentSimpleResult.class)); SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) actualItemResponse; - assertThat(simulateDocumentSimpleResult.getData(), equalTo(data)); + assertThat(simulateDocumentSimpleResult.getData(), equalTo(ingestDocument)); assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); } public void testExecuteVerboseItemWithFailure() throws Exception { Exception e = new RuntimeException("processor failed"); - doThrow(e).doNothing().when(processor).execute(data); - SimulateDocumentResult actualItemResponse = executionService.executeVerboseItem(pipeline, data); - verify(processor, times(2)).execute(data); + doThrow(e).doNothing().when(processor).execute(ingestDocument); + SimulateDocumentResult actualItemResponse = executionService.executeVerboseItem(pipeline, ingestDocument); + verify(processor, times(2)).execute(ingestDocument); assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(2)); @@ -100,8 +100,8 @@ public class SimulateExecutionServiceTests extends ESTestCase { RuntimeException runtimeException = (RuntimeException) simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(); assertThat(runtimeException.getMessage(), equalTo("processor failed")); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorId(), equalTo("processor[mock]-1")); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), not(sameInstance(data))); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), equalTo(data)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), not(sameInstance(ingestDocument))); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), equalTo(ingestDocument)); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getFailure(), nullValue()); runtimeException = (RuntimeException) simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(); assertThat(runtimeException.getMessage(), equalTo("processor failed")); @@ -109,9 +109,9 @@ public class SimulateExecutionServiceTests extends ESTestCase { public void testExecuteItemWithFailure() throws Exception { Exception e = new RuntimeException("processor failed"); - doThrow(e).when(processor).execute(data); - SimulateDocumentResult actualItemResponse = executionService.executeItem(pipeline, data); - verify(processor, times(1)).execute(data); + doThrow(e).when(processor).execute(ingestDocument); + SimulateDocumentResult actualItemResponse = executionService.executeItem(pipeline, ingestDocument); + verify(processor, times(1)).execute(ingestDocument); assertThat(actualItemResponse, instanceOf(SimulateDocumentSimpleResult.class)); SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) actualItemResponse; assertThat(simulateDocumentSimpleResult.getData(), nullValue()); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java index 0a325ca3268..ab7803c719d 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -42,7 +42,7 @@ public class SimulatePipelineResponseTests extends ESTestCase { List results = new ArrayList<>(numResults); for (int i = 0; i < numResults; i++) { boolean isFailure = randomBoolean(); - Data data = new Data(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), + IngestDocument ingestDocument = new IngestDocument(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); if (isVerbose) { int numProcessors = randomIntBetween(1, 10); @@ -53,18 +53,18 @@ public class SimulatePipelineResponseTests extends ESTestCase { if (isFailure) { processorResult = new SimulateProcessorResult(processorId, new IllegalArgumentException("test")); } else { - processorResult = new SimulateProcessorResult(processorId, data); + processorResult = new SimulateProcessorResult(processorId, ingestDocument); } processorResults.add(processorResult); } results.add(new SimulateDocumentVerboseResult(processorResults)); } else { - results.add(new SimulateDocumentSimpleResult(data)); + results.add(new SimulateDocumentSimpleResult(ingestDocument)); SimulateDocumentSimpleResult simulateDocumentSimpleResult; if (isFailure) { simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(new IllegalArgumentException("test")); } else { - simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(data); + simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(ingestDocument); } results.add(simulateDocumentSimpleResult); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java index 55024219347..d347c6749e3 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.Data; +import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -39,9 +39,9 @@ public class SimulateProcessorResultTests extends ESTestCase { if (isFailure) { simulateProcessorResult = new SimulateProcessorResult(processorId, new IllegalArgumentException("test")); } else { - Data data = new Data(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), + IngestDocument ingestDocument = new IngestDocument(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); - simulateProcessorResult = new SimulateProcessorResult(processorId, data); + simulateProcessorResult = new SimulateProcessorResult(processorId, ingestDocument); } BytesStreamOutput out = new BytesStreamOutput(); From 8b1f117e51d92d2add60265e8bb13ba40daa89cb Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 20 Nov 2015 14:55:07 +0100 Subject: [PATCH 073/347] Instead of failing the entire bulk request if the pipeline fails, only fail a bulk item. --- .../ingest/PipelineExecutionService.java | 6 +- .../ingest/transport/IngestActionFilter.java | 136 ++++++++++++++++-- .../elasticsearch/ingest/IngestClientIT.java | 46 ++++++ .../transport/IngestActionFilterTests.java | 128 ++++++++++++++++- 4 files changed, 296 insertions(+), 20 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index 82e3a403fd1..4a963beecc8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -43,7 +43,7 @@ public class PipelineExecutionService { public void execute(IngestDocument ingestDocument, String pipelineId, Listener listener) { Pipeline pipeline = store.get(pipelineId); if (pipeline == null) { - listener.failed(new IllegalArgumentException(LoggerMessageFormat.format("pipeline with id [{}] does not exist", pipelineId))); + listener.failed(new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist")); return; } @@ -53,7 +53,7 @@ public class PipelineExecutionService { try { pipeline.execute(ingestDocument); listener.executed(ingestDocument); - } catch (Exception e) { + } catch (Throwable e) { listener.failed(e); } } @@ -64,7 +64,7 @@ public class PipelineExecutionService { void executed(IngestDocument ingestDocument); - void failed(Exception e); + void failed(Throwable e); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index 37099df9b5f..a80d10a18e4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -22,7 +22,9 @@ package org.elasticsearch.plugin.ingest.transport; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilterChain; @@ -33,10 +35,9 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineExecutionService; -import java.util.Iterator; -import java.util.Map; +import java.util.*; -public class IngestActionFilter extends AbstractComponent implements ActionFilter { +public final class IngestActionFilter extends AbstractComponent implements ActionFilter { private final PipelineExecutionService executionService; @@ -61,7 +62,10 @@ public class IngestActionFilter extends AbstractComponent implements ActionFilte processIndexRequest(action, listener, chain, (IndexRequest) request, pipelineId); } else if (request instanceof BulkRequest) { BulkRequest bulkRequest = (BulkRequest) request; - processBulkIndexRequest(action, listener, chain, bulkRequest, pipelineId, bulkRequest.requests().iterator()); + @SuppressWarnings("unchecked") + ActionListener actionListener = (ActionListener) listener; + BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(bulkRequest); + processBulkIndexRequest(bulkRequestModifier, pipelineId, action, chain, actionListener); } else { chain.proceed(action, request, listener); } @@ -94,22 +98,31 @@ public class IngestActionFilter extends AbstractComponent implements ActionFilte } @Override - public void failed(Exception e) { + public void failed(Throwable e) { logger.error("failed to execute pipeline [{}]", e, pipelineId); listener.onFailure(e); } }); } - void processBulkIndexRequest(String action, ActionListener listener, ActionFilterChain chain, BulkRequest bulkRequest, String pipelineId, Iterator requests) { - if (!requests.hasNext()) { - chain.proceed(action, bulkRequest, listener); + void processBulkIndexRequest(BulkRequestModifier bulkRequestModifier, String pipelineId, String action, ActionFilterChain chain, ActionListener listener) { + if (!bulkRequestModifier.hasNext()) { + BulkRequest bulkRequest = bulkRequestModifier.getBulkRequest(); + ActionListener actionListener = bulkRequestModifier.wrapActionListenerIfNeeded(listener); + if (bulkRequest.requests().isEmpty()) { + // in this stage, the transport bulk action can't deal with a bulk request with no requests, + // so we stop and send a empty response back to the client. + // (this will happen if all preprocessing all items in the bulk failed) + actionListener.onResponse(new BulkResponse(new BulkItemResponse[0], 0)); + } else { + chain.proceed(action, bulkRequest, actionListener); + } return; } - ActionRequest actionRequest = requests.next(); + ActionRequest actionRequest = bulkRequestModifier.next(); if (!(actionRequest instanceof IndexRequest)) { - processBulkIndexRequest(action, listener, chain, bulkRequest, pipelineId, requests); + processBulkIndexRequest(bulkRequestModifier, pipelineId, action, chain, listener); return; } @@ -122,13 +135,14 @@ public class IngestActionFilter extends AbstractComponent implements ActionFilte if (ingestDocument.isModified()) { indexRequest.source(ingestDocument.getSource()); } - processBulkIndexRequest(action, listener, chain, bulkRequest, pipelineId, requests); + processBulkIndexRequest(bulkRequestModifier, pipelineId, action, chain, listener); } @Override - public void failed(Exception e) { - logger.error("failed to execute pipeline [{}]", e, pipelineId); - listener.onFailure(e); + public void failed(Throwable e) { + logger.debug("failed to execute pipeline [{}]", e, pipelineId); + bulkRequestModifier.markCurrentItemAsFailed(e); + processBulkIndexRequest(bulkRequestModifier, pipelineId, action, chain, listener); } }); } @@ -137,4 +151,98 @@ public class IngestActionFilter extends AbstractComponent implements ActionFilte public int order() { return Integer.MAX_VALUE; } + + final static class BulkRequestModifier implements Iterator { + + final BulkRequest bulkRequest; + final Set failedSlots; + final List itemResponses; + + int currentSlot = -1; + int[] originalSlots; + + BulkRequestModifier(BulkRequest bulkRequest) { + this.bulkRequest = bulkRequest; + this.failedSlots = new HashSet<>(); + this.itemResponses = new ArrayList<>(bulkRequest.requests().size()); + } + + @Override + public ActionRequest next() { + return bulkRequest.requests().get(++currentSlot); + } + + @Override + public boolean hasNext() { + return (currentSlot + 1) < bulkRequest.requests().size(); + } + + BulkRequest getBulkRequest() { + if (itemResponses.isEmpty()) { + return bulkRequest; + } else { + BulkRequest modifiedBulkRequest = new BulkRequest(bulkRequest); + modifiedBulkRequest.refresh(bulkRequest.refresh()); + modifiedBulkRequest.consistencyLevel(bulkRequest.consistencyLevel()); + modifiedBulkRequest.timeout(bulkRequest.timeout()); + + int slot = 0; + originalSlots = new int[bulkRequest.requests().size() - failedSlots.size()]; + for (int i = 0; i < bulkRequest.requests().size(); i++) { + ActionRequest request = bulkRequest.requests().get(i); + if (failedSlots.contains(i) == false) { + modifiedBulkRequest.add(request); + originalSlots[slot++] = i; + } + } + return modifiedBulkRequest; + } + } + + ActionListener wrapActionListenerIfNeeded(ActionListener actionListener) { + if (itemResponses.isEmpty()) { + return actionListener; + } else { + return new IngestBulkResponseListener(originalSlots, itemResponses, actionListener); + } + } + + void markCurrentItemAsFailed(Throwable e) { + IndexRequest indexRequest = (IndexRequest) bulkRequest.requests().get(currentSlot); + // We hit a error during preprocessing a request, so we: + // 1) Remember the request item slot from the bulk, so that we're done processing all requests we know what failed + // 2) Add a bulk item failure for this request + // 3) Continue with the next request in the bulk. + failedSlots.add(currentSlot); + BulkItemResponse.Failure failure = new BulkItemResponse.Failure(indexRequest.index(), indexRequest.type(), indexRequest.id(), e); + itemResponses.add(new BulkItemResponse(currentSlot, indexRequest.opType().lowercase(), failure)); + } + + } + + private final static class IngestBulkResponseListener implements ActionListener { + + private final int[] originalSlots; + private final List itemResponses; + private final ActionListener actionListener; + + IngestBulkResponseListener(int[] originalSlots, List itemResponses, ActionListener actionListener) { + this.itemResponses = itemResponses; + this.actionListener = actionListener; + this.originalSlots = originalSlots; + } + + @Override + public void onResponse(BulkResponse bulkItemResponses) { + for (int i = 0; i < bulkItemResponses.getItems().length; i++) { + itemResponses.add(originalSlots[i], bulkItemResponses.getItems()[i]); + } + actionListener.onResponse(new BulkResponse(itemResponses.toArray(new BulkItemResponse[itemResponses.size()]), bulkItemResponses.getTookInMillis())); + } + + @Override + public void onFailure(Throwable e) { + actionListener.onFailure(e); + } + } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index a14ca5a31cf..6d4b2bdcf64 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -19,7 +19,15 @@ package org.elasticsearch.ingest; +import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; @@ -39,6 +47,7 @@ import org.elasticsearch.test.ESIntegTestCase; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -111,6 +120,43 @@ public class IngestClientIT extends ESIntegTestCase { assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); } + public void testBulkWithIngestFailures() { + createIndex("index"); + + int numRequests = scaledRandomIntBetween(32, 128); + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_none_existing_id"); + for (int i = 0; i < numRequests; i++) { + if (i % 2 == 0) { + UpdateRequest updateRequest = new UpdateRequest("index", "type", Integer.toString(i)); + updateRequest.upsert("field", "value"); + updateRequest.doc(new HashMap()); + bulkRequest.add(updateRequest); + } else { + IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i)); + indexRequest.source("field1", "value1"); + bulkRequest.add(indexRequest); + } + } + + BulkResponse response = client().bulk(bulkRequest).actionGet(); + assertThat(response.getItems().length, equalTo(bulkRequest.requests().size())); + for (int i = 0; i < bulkRequest.requests().size(); i++) { + ActionRequest request = bulkRequest.requests().get(i); + BulkItemResponse itemResponse = response.getItems()[i]; + if (request instanceof IndexRequest) { + BulkItemResponse.Failure failure = itemResponse.getFailure(); + assertThat(failure.getMessage(), equalTo("java.lang.IllegalArgumentException: pipeline with id [_none_existing_id] does not exist")); + } else if (request instanceof UpdateRequest) { + UpdateResponse updateResponse = itemResponse.getResponse(); + assertThat(updateResponse.getId(), equalTo(Integer.toString(i))); + assertThat(updateResponse.isCreated(), is(true)); + } else { + fail("unexpected request item [" + request + "]"); + } + } + } + public void test() throws Exception { new PutPipelineRequestBuilder(client(), PutPipelineAction.INSTANCE) .setId("_id") diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index cee6b1b1d78..336c351d497 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -21,7 +21,10 @@ package org.elasticsearch.plugin.ingest.transport; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilterChain; @@ -40,11 +43,12 @@ import org.junit.Before; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; +import java.util.*; +import static org.elasticsearch.plugin.ingest.transport.IngestActionFilter.*; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.*; @@ -225,4 +229,122 @@ public class IngestActionFilterTests extends ESTestCase { threadPool.shutdown(); } + public void testApplyWithBulkRequestWithFailure() throws Exception { + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); + int numRequest = scaledRandomIntBetween(8, 64); + int numNonIndexRequests = 0; + for (int i = 0; i < numRequest; i++) { + if (i % 2 == 0) { + numNonIndexRequests++; + ActionRequest request; + if (randomBoolean()) { + request = new DeleteRequest("_index", "_type", "_id"); + } else { + request = new UpdateRequest("_index", "_type", "_id"); + } + bulkRequest.add(request); + } else { + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + indexRequest.source("field1", "value1"); + bulkRequest.add(indexRequest); + } + } + + RuntimeException exception = new RuntimeException(); + Answer answer = (invocationOnMock) -> { + PipelineExecutionService.Listener listener = (PipelineExecutionService.Listener) invocationOnMock.getArguments()[2]; + listener.failed(exception); + return null; + }; + doAnswer(answer).when(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + + ActionListener actionListener = mock(ActionListener.class); + RecordRequestAFC actionFilterChain = new RecordRequestAFC(); + + filter.apply("_action", bulkRequest, actionListener, actionFilterChain); + + BulkRequest interceptedRequests = actionFilterChain.getRequest(); + assertThat(interceptedRequests.requests().size(), equalTo(numNonIndexRequests)); + + verifyZeroInteractions(actionListener); + } + + public void testBulkRequestModifier() { + int numRequests = scaledRandomIntBetween(8, 64); + BulkRequest bulkRequest = new BulkRequest(); + for (int i = 0; i < numRequests; i++) { + bulkRequest.add(new IndexRequest("_index", "_type", String.valueOf(i)).source("{}")); + } + CaptureActionListener actionListener = new CaptureActionListener(); + BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(bulkRequest); + + int i = 0; + Set failedSlots = new HashSet<>(); + while (bulkRequestModifier.hasNext()) { + IndexRequest indexRequest = (IndexRequest) bulkRequestModifier.next(); + if (randomBoolean()) { + bulkRequestModifier.markCurrentItemAsFailed(new RuntimeException()); + failedSlots.add(i); + } + i++; + } + + assertThat(bulkRequestModifier.getBulkRequest().requests().size(), equalTo(numRequests - failedSlots.size())); + // simulate that we actually executed the modified bulk request: + ActionListener result = bulkRequestModifier.wrapActionListenerIfNeeded(actionListener); + result.onResponse(new BulkResponse(new BulkItemResponse[numRequests - failedSlots.size()], 0)); + + BulkResponse bulkResponse = actionListener.getResponse(); + for (int j = 0; j < bulkResponse.getItems().length; j++) { + if (failedSlots.contains(j)) { + BulkItemResponse item = bulkResponse.getItems()[j]; + assertThat(item.isFailed(), is(true)); + assertThat(item.getFailure().getIndex(), equalTo("_index")); + assertThat(item.getFailure().getType(), equalTo("_type")); + assertThat(item.getFailure().getId(), equalTo(String.valueOf(j))); + assertThat(item.getFailure().getMessage(), equalTo("java.lang.RuntimeException")); + } else { + assertThat(bulkResponse.getItems()[j], nullValue()); + } + } + } + + private final static class RecordRequestAFC implements ActionFilterChain { + + private ActionRequest request; + + @Override + public void proceed(String action, ActionRequest request, ActionListener listener) { + this.request = request; + } + + @Override + public void proceed(String action, ActionResponse response, ActionListener listener) { + + } + + public > T getRequest() { + return (T) request; + } + } + + private final static class CaptureActionListener implements ActionListener { + + private BulkResponse response; + + @Override + public void onResponse(BulkResponse bulkItemResponses) { + this.response = bulkItemResponses ; + } + + @Override + public void onFailure(Throwable e) { + } + + public BulkResponse getResponse() { + return response; + } + } + } From 1e9d5c7b22f1a33660578568dabeef9ff259cc37 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 24 Nov 2015 13:41:40 +0100 Subject: [PATCH 074/347] test: also test what happens if all index requests fail to be processed by the pipeline --- .../transport/IngestActionFilterTests.java | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index 336c351d497..7185db944b2 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -229,6 +229,34 @@ public class IngestActionFilterTests extends ESTestCase { threadPool.shutdown(); } + + public void testApplyWithBulkRequestWithFailureAllFailed() throws Exception { + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); + int numRequest = scaledRandomIntBetween(0, 8); + for (int i = 0; i < numRequest; i++) { + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + indexRequest.source("field1", "value1"); + bulkRequest.add(indexRequest); + } + + RuntimeException exception = new RuntimeException(); + Answer answer = (invocationOnMock) -> { + PipelineExecutionService.Listener listener = (PipelineExecutionService.Listener) invocationOnMock.getArguments()[2]; + listener.failed(exception); + return null; + }; + doAnswer(answer).when(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + + @SuppressWarnings("unchecked") + ActionListener actionListener = mock(ActionListener.class); + RecordRequestAFC actionFilterChain = new RecordRequestAFC(); + + filter.apply("_action", bulkRequest, actionListener, actionFilterChain); + + verify(actionListener, times(1)).onResponse(any()); + } + public void testApplyWithBulkRequestWithFailure() throws Exception { BulkRequest bulkRequest = new BulkRequest(); bulkRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); From 8f1f5d4da0eafd363b277a1e0d9aa78c86e741e3 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 23 Nov 2015 14:38:31 +0100 Subject: [PATCH 075/347] Split mutate processor into one processor per function --- docs/plugins/ingest.asciidoc | 175 +++++----- .../org/elasticsearch/ingest/Pipeline.java | 2 +- .../processor/AbstractStringProcessor.java | 68 ++++ .../ingest/processor/add/AddProcessor.java | 67 ++++ .../processor/convert/ConvertProcessor.java | 141 ++++++++ .../{mutate => gsub}/GsubExpression.java | 2 +- .../ingest/processor/gsub/GsubProcessor.java | 93 +++++ .../ingest/processor/join/JoinProcessor.java | 77 ++++ .../lowercase/LowercaseProcessor.java | 56 +++ .../processor/mutate/MutateProcessor.java | 329 ------------------ .../processor/remove/RemoveProcessor.java | 69 ++++ .../processor/rename/RenameProcessor.java | 73 ++++ .../processor/split/SplitProcessor.java | 73 ++++ .../ingest/processor/trim/TrimProcessor.java | 55 +++ .../uppercase/UppercaseProcessor.java | 56 +++ .../plugin/ingest/IngestModule.java | 22 +- .../ingest/PipelineFactoryTests.java | 47 +-- .../ingest/RandomDocumentPicks.java | 216 ++++++++++++ .../AbstractStringProcessorTestCase.java | 84 +++++ .../add/AddProcessorFactoryTests.java | 53 +++ .../processor/add/AddProcessorTests.java | 82 +++++ .../convert/ConvertProcessorFactoryTests.java | 69 ++++ .../convert/ConvertProcessorTests.java | 325 +++++++++++++++++ .../gsub/GsubProcessorFactoryTests.java | 114 ++++++ .../processor/gsub/GsubProcessorTests.java | 79 +++++ .../join/JoinProcessorFactoryTests.java | 53 +++ .../processor/join/JoinProcessorTests.java | 117 +++++++ .../LowercaseProcessorFactoryTests.java | 53 +++ .../lowercase/LowercaseProcessorTests.java | 38 ++ .../mutate/MutateProcessorFactoryTests.java | 225 ------------ .../mutate/MutateProcessorTests.java | 219 ------------ .../remove/RemoveProcessorFactoryTests.java | 54 +++ .../remove/RemoveProcessorTests.java | 59 ++++ .../rename/RenameProcessorFactoryTests.java | 53 +++ .../rename/RenameProcessorTests.java | 77 ++++ .../split/SplitProcessorFactoryTests.java | 52 +++ .../processor/split/SplitProcessorTests.java | 74 ++++ .../trim/TrimProcessorFactoryTests.java | 54 +++ .../processor/trim/TrimProcessorTests.java | 55 +++ .../UppercaseProcessorFactoryTests.java | 53 +++ .../uppercase/UppercaseProcessorTests.java | 39 +++ .../plugin/ingest/PipelineStoreTests.java | 7 +- .../transport/IngestActionFilterTests.java | 28 +- .../rest-api-spec/test/ingest/20_crud.yaml | 4 +- .../rest-api-spec/test/ingest/60_mutate.yaml | 132 +++++++ .../test/ingest/60_mutate_processor.yaml | 50 --- .../test/ingest/80_simulate.yaml | 32 +- 47 files changed, 2984 insertions(+), 971 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/add/AddProcessor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{mutate => gsub}/GsubExpression.java (97%) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml delete mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate_processor.yaml diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index d94d6932d45..3532204fee2 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -3,26 +3,64 @@ === Processors -==== Mutate Processor - -The Mutate Processor applies functions on the structure of a document. The processor comes with a few -functions to help achieve this. - -The following are the supported configuration actions and how to use them. - -===== Convert -Convert a field's value to a different type, like turning a string to an integer. -If the field value is an array, all members will be converted. - -The supported types include: `integer`, `float`, `string`, and `boolean`. - -`boolean` will set a field to "true" if its string value does not match any of the following: "false", "0", "off", "no". +==== Add processor +Adds one or more fields and associates them with the specified values. If a field already exists, +its value will be replaced with the provided one. [source,js] -------------------------------------------------- { - "mutate": { - "convert": { + "add": { + "fields": { + "field": 582.1 + } + } +} +-------------------------------------------------- + +==== Remove processor +Removes one or more existing fields. If a field doesn't exist, nothing will happen. + +[source,js] +-------------------------------------------------- +{ + "remove": { + "fields": [ + "field1","field2" + ] + } +} +-------------------------------------------------- + +==== Rename processor +Renames one or more existing fields. If a field doesn't exist, an exception will be thrown. + +[source,js] +-------------------------------------------------- +{ + "rename": { + "fields": { + "field1": "field2" + } + } +} +-------------------------------------------------- + + +==== Convert processor +Converts one or more field value to a different type, like turning a string to an integer. +If the field value is an array, all members will be converted. + +The supported types include: `integer`, `float`, `string`, and `boolean`. + +`boolean` will set a field to true if its string value is equal to `true` (ignore case), to +false if its string value is equal to `false` (ignore case) and it will throw exception otherwise. + +[source,js] +-------------------------------------------------- +{ + "convert": { + "fields": { "field1": "integer", "field2": "float" } @@ -30,132 +68,95 @@ The supported types include: `integer`, `float`, `string`, and `boolean`. } -------------------------------------------------- -===== Gsub -Convert a string field by applying a regular expression and a replacement. -If the field is not a string, no action will be taken. +==== Gsub processor +Converts a string field by applying a regular expression and a replacement. +If the field is not a string, the processor will throw an exception. -This configuration takes an array consisting of two elements per field/substition. One for the -pattern to be replaced, and the second for the pattern to replace with. +This configuration takes an `expression` array consisting of objects. Each object +holds three elements: `field` for the field name, `pattern` for the +pattern to be replaced, and `replacement` for the string to replace the matching patterns with. [source,js] -------------------------------------------------- { - "mutate": { - "gsub": { - "field1": ["\.", "-"] - } + "gsub": { + "expressions": [ + { + "field": "field1", + "pattern": "\.", + "replacement": "-" + } + ] } } -------------------------------------------------- -===== Join -Join an array with a separator character. Does nothing on non-array fields. +==== Join processor +Joins each element of an array into a single string using a separator character between each element. +Throws error when the field is not an array. [source,js] -------------------------------------------------- { - "mutate": { - "join": { + "join": { + "fields": { "joined_array_field": "other_array_field" } } } -------------------------------------------------- -===== Lowercase -Convert a string to its lowercase equivalent. - -[source,js] --------------------------------------------------- -{ - "mutate": { - "lowercase": ["foo", "bar"] - } -} --------------------------------------------------- - -===== Remove -Remove one or more fields. - -[source,js] --------------------------------------------------- -{ - "mutate": { - "remove": ["foo", "bar"] - } -} --------------------------------------------------- - -===== Rename -Renames one or more fields. - -[source,js] --------------------------------------------------- -{ - "mutate": { - "rename": { - "foo": "update_foo", - "bar": "new_bar" - } - } -} --------------------------------------------------- - -===== Split +==== Split processor Split a field to an array using a separator character. Only works on string fields. [source,js] -------------------------------------------------- { - "mutate": { - "split": { + "split": { + "fields": { "message": "," } } } -------------------------------------------------- -===== Strip -Strip whitespace from field. NOTE: this only works on leading and trailing whitespace. +==== Lowercase processor +Converts a string to its lowercase equivalent. [source,js] -------------------------------------------------- { - "mutate": { - "strip": ["foo", "bar"] + "lowercase": { + "fields": ["foo", "bar"] } } -------------------------------------------------- -===== Update -Update an existing field with a new value. If the field does not exist, then no action will be taken. +==== Uppercase processor +Converts a string to its uppercase equivalent. [source,js] -------------------------------------------------- { - "mutate": { - "update": { - "field": 582.1 - } + "uppercase": { + "fields": ["foo", "bar"] } } -------------------------------------------------- -===== Uppercase -Convert a string to its uppercase equivalent. +==== Trim processor +Trims whitespace from field. NOTE: this only works on leading and trailing whitespaces. [source,js] -------------------------------------------------- { - "mutate": { - "uppercase": ["foo", "bar"] + "trim": { + "fields": ["foo", "bar"] } } -------------------------------------------------- -=== Processors - ==== Grok Processor The Grok Processor extracts structured fields out of a single text field within a document. You choose which field to diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index 12575c731ee..b2ba34cfd54 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -86,7 +86,7 @@ public final class Pipeline { Map processorConfig = entry.getValue(); processors.add(factory.create(processorConfig)); if (processorConfig.isEmpty() == false) { - throw new IllegalArgumentException("processor [" + entry.getKey() + "] doesn't support one or more provided configuration parameters [" + Arrays.toString(processorConfig.keySet().toArray()) + "]"); + throw new IllegalArgumentException("processor [" + entry.getKey() + "] doesn't support one or more provided configuration parameters " + Arrays.toString(processorConfig.keySet().toArray())); } } else { throw new IllegalArgumentException("No processor type exist with name [" + entry.getKey() + "]"); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java new file mode 100644 index 00000000000..409c67924e9 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java @@ -0,0 +1,68 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor; + +import org.elasticsearch.ingest.IngestDocument; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** + * Base class for processors that manipulate strings and require a single "fields" array config value, which + * holds a list of field names in string format. + */ +public abstract class AbstractStringProcessor implements Processor { + + private final Collection fields; + + protected AbstractStringProcessor(Collection fields) { + this.fields = fields; + } + + public Collection getFields() { + return fields; + } + + @Override + public final void execute(IngestDocument document) { + for(String field : fields) { + String val = document.getPropertyValue(field, String.class); + if (val == null) { + throw new IllegalArgumentException("field [" + field + "] is null, cannot process it."); + } + document.setPropertyValue(field, process(val)); + } + } + + protected abstract String process(String value); + + public static abstract class Factory implements Processor.Factory { + @Override + public T create(Map config) throws IOException { + List fields = ConfigurationUtils.readList(config, "fields"); + return newProcessor(Collections.unmodifiableList(fields)); + } + + protected abstract T newProcessor(Collection fields); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/add/AddProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/add/AddProcessor.java new file mode 100644 index 00000000000..e17fa622070 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/add/AddProcessor.java @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.add; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.processor.ConfigurationUtils; +import org.elasticsearch.ingest.processor.Processor; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; + +/** + * Processor that adds new fields with their corresponding values. If the field is already present, its value + * will be replaced with the provided one. + */ +public class AddProcessor implements Processor { + + public static final String TYPE = "add"; + + private final Map fields; + + AddProcessor(Map fields) { + this.fields = fields; + } + + Map getFields() { + return fields; + } + + @Override + public void execute(IngestDocument document) { + for(Map.Entry entry : fields.entrySet()) { + document.setPropertyValue(entry.getKey(), entry.getValue()); + } + } + + @Override + public String getType() { + return TYPE; + } + + public static final class Factory implements Processor.Factory { + @Override + public AddProcessor create(Map config) throws IOException { + Map fields = ConfigurationUtils.readMap(config, "fields"); + return new AddProcessor(Collections.unmodifiableMap(fields)); + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java new file mode 100644 index 00000000000..6ea3a955fd7 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java @@ -0,0 +1,141 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.convert; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.processor.ConfigurationUtils; +import org.elasticsearch.ingest.processor.Processor; + +import java.io.IOException; +import java.util.*; + +/** + * Processor that converts fields content to a different type. Supported types are: integer, float, boolean and string. + * Throws exception if the field is not there or the conversion fails. + */ +public class ConvertProcessor implements Processor { + + enum Type { + INTEGER { + @Override + public Object convert(Object value) { + try { + return Integer.parseInt(value.toString()); + } catch(NumberFormatException e) { + throw new IllegalArgumentException("unable to convert [" + value + "] to integer", e); + } + + } + }, FLOAT { + @Override + public Object convert(Object value) { + try { + return Float.parseFloat(value.toString()); + } catch(NumberFormatException e) { + throw new IllegalArgumentException("unable to convert [" + value + "] to float", e); + } + } + }, BOOLEAN { + @Override + public Object convert(Object value) { + if (value.toString().equalsIgnoreCase("true")) { + return true; + } else if (value.toString().equalsIgnoreCase("false")) { + return false; + } else { + throw new IllegalArgumentException("[" + value + "] is not a boolean value, cannot convert to boolean"); + } + } + }, STRING { + @Override + public Object convert(Object value) { + return value.toString(); + } + }; + + @Override + public final String toString() { + return name().toLowerCase(Locale.ROOT); + } + + public abstract Object convert(Object value); + + public static Type fromString(String type) { + try { + return Type.valueOf(type.toUpperCase(Locale.ROOT)); + } catch(IllegalArgumentException e) { + throw new IllegalArgumentException("type [" + type + "] not supported, cannot convert field.", e); + } + } + } + + public static final String TYPE = "convert"; + + private final Map fields; + + ConvertProcessor(Map fields) { + this.fields = fields; + } + + Map getFields() { + return fields; + } + + @Override + public void execute(IngestDocument document) { + for(Map.Entry entry : fields.entrySet()) { + Type type = entry.getValue(); + Object oldValue = document.getPropertyValue(entry.getKey(), Object.class); + Object newValue; + if (oldValue == null) { + throw new IllegalArgumentException("Field [" + entry.getKey() + "] is null, cannot be converted to type [" + type + "]"); + } + + if (oldValue instanceof List) { + List list = (List) oldValue; + List newList = new ArrayList<>(); + for (Object value : list) { + newList.add(type.convert(value)); + } + newValue = newList; + } else { + newValue = type.convert(oldValue); + } + document.setPropertyValue(entry.getKey(), newValue); + } + } + + @Override + public String getType() { + return TYPE; + } + + public static class Factory implements Processor.Factory { + @Override + public ConvertProcessor create(Map config) throws IOException { + Map fields = ConfigurationUtils.readMap(config, "fields"); + Map convertFields = new HashMap<>(); + for (Map.Entry entry : fields.entrySet()) { + convertFields.put(entry.getKey(), Type.fromString(entry.getValue())); + } + return new ConvertProcessor(Collections.unmodifiableMap(convertFields)); + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/GsubExpression.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubExpression.java similarity index 97% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/GsubExpression.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubExpression.java index 402061b18ac..54d55a0add0 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/GsubExpression.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubExpression.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.mutate; +package org.elasticsearch.ingest.processor.gsub; import java.util.Objects; import java.util.regex.Pattern; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java new file mode 100644 index 00000000000..d436a888fd7 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.gsub; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.processor.ConfigurationUtils; +import org.elasticsearch.ingest.processor.Processor; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Processor that allows to search for patterns in field content and replace them with corresponding string replacement. + * Support fields of string type only, throws exception if a field is of a different type. + */ +public class GsubProcessor implements Processor { + + public static final String TYPE = "gsub"; + + private final List gsubExpressions; + + GsubProcessor(List gsubExpressions) { + this.gsubExpressions = gsubExpressions; + } + + List getGsubExpressions() { + return gsubExpressions; + } + + @Override + public void execute(IngestDocument document) { + for (GsubExpression gsubExpression : gsubExpressions) { + String oldVal = document.getPropertyValue(gsubExpression.getFieldName(), String.class); + if (oldVal == null) { + throw new IllegalArgumentException("field [" + gsubExpression.getFieldName() + "] is null, cannot match pattern."); + } + Matcher matcher = gsubExpression.getPattern().matcher(oldVal); + String newVal = matcher.replaceAll(gsubExpression.getReplacement()); + document.setPropertyValue(gsubExpression.getFieldName(), newVal); + } + } + + @Override + public String getType() { + return TYPE; + } + + public static class Factory implements Processor.Factory { + @Override + public GsubProcessor create(Map config) throws IOException { + List> gsubConfig = ConfigurationUtils.readList(config, "expressions"); + List gsubExpressions = new ArrayList<>(); + for (Map stringObjectMap : gsubConfig) { + String field = stringObjectMap.get("field"); + if (field == null) { + throw new IllegalArgumentException("no [field] specified for gsub expression"); + } + String pattern = stringObjectMap.get("pattern"); + if (pattern == null) { + throw new IllegalArgumentException("no [pattern] specified for gsub expression"); + } + String replacement = stringObjectMap.get("replacement"); + if (replacement == null) { + throw new IllegalArgumentException("no [replacement] specified for gsub expression"); + } + Pattern searchPattern = Pattern.compile(pattern); + gsubExpressions.add(new GsubExpression(field, searchPattern, replacement)); + } + return new GsubProcessor(gsubExpressions); + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java new file mode 100644 index 00000000000..283a7ee49f8 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.join; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.processor.ConfigurationUtils; +import org.elasticsearch.ingest.processor.Processor; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Processor that joins the different items of an array into a single string value using a separator between each item. + * Throws exception is the specified field is not an array. + */ +public class JoinProcessor implements Processor { + + public static final String TYPE = "join"; + + private final Map fields; + + JoinProcessor(Map fields) { + this.fields = fields; + } + + Map getFields() { + return fields; + } + + @Override + public void execute(IngestDocument document) { + for(Map.Entry entry : fields.entrySet()) { + List list = document.getPropertyValue(entry.getKey(), List.class); + if (list == null) { + throw new IllegalArgumentException("field [" + entry.getKey() + "] is null, cannot join."); + } + String joined = list.stream() + .map(Object::toString) + .collect(Collectors.joining(entry.getValue())); + document.setPropertyValue(entry.getKey(), joined); + } + } + + @Override + public String getType() { + return TYPE; + } + + public static class Factory implements Processor.Factory { + @Override + public JoinProcessor create(Map config) throws IOException { + Map fields = ConfigurationUtils.readMap(config, "fields"); + return new JoinProcessor(Collections.unmodifiableMap(fields)); + } + } +} + diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java new file mode 100644 index 00000000000..751a566d10a --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java @@ -0,0 +1,56 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.lowercase; + +import org.elasticsearch.ingest.processor.AbstractStringProcessor; + +import java.util.Collection; +import java.util.Locale; + +/** + * Processor that converts the content of string fields to lowercase. + * Throws exception is the field is not of type string. + */ + +public class LowercaseProcessor extends AbstractStringProcessor { + + public static final String TYPE = "lowercase"; + + LowercaseProcessor(Collection fields) { + super(fields); + } + + @Override + protected String process(String value) { + return value.toLowerCase(Locale.ROOT); + } + + @Override + public String getType() { + return TYPE; + } + + public static class Factory extends AbstractStringProcessor.Factory { + @Override + protected LowercaseProcessor newProcessor(Collection fields) { + return new LowercaseProcessor(fields); + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java deleted file mode 100644 index ee121b61a4b..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/mutate/MutateProcessor.java +++ /dev/null @@ -1,329 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.mutate; - -import org.elasticsearch.common.Booleans; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.ingest.processor.Processor; - -import java.io.IOException; -import java.util.*; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.stream.Collectors; - -public final class MutateProcessor implements Processor { - - public static final String TYPE = "mutate"; - - private final Map update; - private final Map rename; - private final Map convert; - private final Map split; - private final List gsub; - private final Map join; - private final List remove; - private final List trim; - private final List uppercase; - private final List lowercase; - - MutateProcessor(Map update, Map rename, Map convert, - Map split, List gsub, Map join, - List remove, List trim, List uppercase, List lowercase) { - this.update = update; - this.rename = rename; - this.convert = convert; - this.split = split; - this.gsub = gsub; - this.join = join; - this.remove = remove; - this.trim = trim; - this.uppercase = uppercase; - this.lowercase = lowercase; - } - - Map getUpdate() { - return update; - } - - Map getRename() { - return rename; - } - - Map getConvert() { - return convert; - } - - Map getSplit() { - return split; - } - - List getGsub() { - return gsub; - } - - Map getJoin() { - return join; - } - - List getRemove() { - return remove; - } - - List getTrim() { - return trim; - } - - List getUppercase() { - return uppercase; - } - - List getLowercase() { - return lowercase; - } - - @Override - public void execute(IngestDocument ingestDocument) { - if (update != null) { - doUpdate(ingestDocument); - } - if (rename != null) { - doRename(ingestDocument); - } - if (convert != null) { - doConvert(ingestDocument); - } - if (split != null) { - doSplit(ingestDocument); - } - if (gsub != null) { - doGsub(ingestDocument); - } - if (join != null) { - doJoin(ingestDocument); - } - if (remove != null) { - doRemove(ingestDocument); - } - if (trim != null) { - doTrim(ingestDocument); - } - if (uppercase != null) { - doUppercase(ingestDocument); - } - if (lowercase != null) { - doLowercase(ingestDocument); - } - } - - @Override - public String getType() { - return TYPE; - } - - private void doUpdate(IngestDocument ingestDocument) { - for(Map.Entry entry : update.entrySet()) { - ingestDocument.setPropertyValue(entry.getKey(), entry.getValue()); - } - } - - private void doRename(IngestDocument ingestDocument) { - for(Map.Entry entry : rename.entrySet()) { - if (ingestDocument.hasPropertyValue(entry.getKey())) { - Object oldVal = ingestDocument.getPropertyValue(entry.getKey(), Object.class); - ingestDocument.getSource().remove(entry.getKey()); - ingestDocument.setPropertyValue(entry.getValue(), oldVal); - } - } - } - - private Object parseValueAsType(Object oldVal, String toType) { - switch (toType) { - case "integer": - oldVal = Integer.parseInt(oldVal.toString()); - break; - case "float": - oldVal = Float.parseFloat(oldVal.toString()); - break; - case "string": - oldVal = oldVal.toString(); - break; - case "boolean": - // TODO(talevy): Booleans#parseBoolean depends on Elasticsearch, should be moved into dedicated library. - oldVal = Booleans.parseBoolean(oldVal.toString(), false); - } - - return oldVal; - } - - @SuppressWarnings("unchecked") - private void doConvert(IngestDocument ingestDocument) { - for(Map.Entry entry : convert.entrySet()) { - String toType = entry.getValue(); - - Object oldVal = ingestDocument.getPropertyValue(entry.getKey(), Object.class); - Object newVal; - - if (oldVal instanceof List) { - newVal = new ArrayList<>(); - for (Object e : ((List) oldVal)) { - ((List) newVal).add(parseValueAsType(e, toType)); - } - } else { - if (oldVal == null) { - throw new IllegalArgumentException("Field \"" + entry.getKey() + "\" is null, cannot be converted to a/an " + toType); - } - newVal = parseValueAsType(oldVal, toType); - } - - ingestDocument.setPropertyValue(entry.getKey(), newVal); - } - } - - private void doSplit(IngestDocument ingestDocument) { - for(Map.Entry entry : split.entrySet()) { - Object oldVal = ingestDocument.getPropertyValue(entry.getKey(), Object.class); - if (oldVal == null) { - throw new IllegalArgumentException("Cannot split field. [" + entry.getKey() + "] is null."); - } else if (oldVal instanceof String) { - ingestDocument.setPropertyValue(entry.getKey(), Arrays.asList(((String) oldVal).split(entry.getValue()))); - } else { - throw new IllegalArgumentException("Cannot split a field that is not a String type"); - } - } - } - - private void doGsub(IngestDocument ingestDocument) { - for (GsubExpression gsubExpression : gsub) { - String oldVal = ingestDocument.getPropertyValue(gsubExpression.getFieldName(), String.class); - if (oldVal == null) { - throw new IllegalArgumentException("Field \"" + gsubExpression.getFieldName() + "\" is null, cannot match pattern."); - } - Matcher matcher = gsubExpression.getPattern().matcher(oldVal); - String newVal = matcher.replaceAll(gsubExpression.getReplacement()); - ingestDocument.setPropertyValue(gsubExpression.getFieldName(), newVal); - } - } - - @SuppressWarnings("unchecked") - private void doJoin(IngestDocument ingestDocument) { - for(Map.Entry entry : join.entrySet()) { - Object oldVal = ingestDocument.getPropertyValue(entry.getKey(), Object.class); - if (oldVal instanceof List) { - String joined = (String) ((List) oldVal) - .stream() - .map(Object::toString) - .collect(Collectors.joining(entry.getValue())); - - ingestDocument.setPropertyValue(entry.getKey(), joined); - } else { - throw new IllegalArgumentException("Cannot join field:" + entry.getKey() + " with type: " + oldVal.getClass()); - } - } - } - - private void doRemove(IngestDocument ingestDocument) { - for(String field : remove) { - ingestDocument.getSource().remove(field); - } - } - - private void doTrim(IngestDocument ingestDocument) { - for(String field : trim) { - Object val = ingestDocument.getPropertyValue(field, Object.class); - if (val == null) { - throw new IllegalArgumentException("Cannot trim field. [" + field + "] is null."); - } else if (val instanceof String) { - ingestDocument.setPropertyValue(field, ((String) val).trim()); - } else { - throw new IllegalArgumentException("Cannot trim field:" + field + " with type: " + val.getClass()); - } - } - } - - private void doUppercase(IngestDocument ingestDocument) { - for(String field : uppercase) { - Object val = ingestDocument.getPropertyValue(field, Object.class); - if (val == null) { - throw new IllegalArgumentException("Cannot uppercase field. [" + field + "] is null."); - } else if (val instanceof String) { - ingestDocument.setPropertyValue(field, ((String) val).toUpperCase(Locale.ROOT)); - } else { - throw new IllegalArgumentException("Cannot uppercase field:" + field + " with type: " + val.getClass()); - } - } - } - - private void doLowercase(IngestDocument ingestDocument) { - for(String field : lowercase) { - Object val = ingestDocument.getPropertyValue(field, Object.class); - if (val == null) { - throw new IllegalArgumentException("Cannot lowercase field. [" + field + "] is null."); - } else if (val instanceof String) { - ingestDocument.setPropertyValue(field, ((String) val).toLowerCase(Locale.ROOT)); - } else { - throw new IllegalArgumentException("Cannot lowercase field:" + field + " with type: " + val.getClass()); - } - } - } - - public static final class Factory implements Processor.Factory { - @Override - public MutateProcessor create(Map config) throws IOException { - Map update = ConfigurationUtils.readOptionalMap(config, "update"); - Map rename = ConfigurationUtils.readOptionalMap(config, "rename"); - Map convert = ConfigurationUtils.readOptionalMap(config, "convert"); - Map split = ConfigurationUtils.readOptionalMap(config, "split"); - Map> gsubConfig = ConfigurationUtils.readOptionalMap(config, "gsub"); - Map join = ConfigurationUtils.readOptionalMap(config, "join"); - List remove = ConfigurationUtils.readOptionalList(config, "remove"); - List trim = ConfigurationUtils.readOptionalList(config, "trim"); - List uppercase = ConfigurationUtils.readOptionalList(config, "uppercase"); - List lowercase = ConfigurationUtils.readOptionalList(config, "lowercase"); - - // pre-compile regex patterns - List gsubExpressions = null; - if (gsubConfig != null) { - gsubExpressions = new ArrayList<>(); - for (Map.Entry> entry : gsubConfig.entrySet()) { - List searchAndReplace = entry.getValue(); - if (searchAndReplace.size() != 2) { - throw new IllegalArgumentException("Invalid search and replace values " + searchAndReplace + " for field: " + entry.getKey()); - } - Pattern searchPattern = Pattern.compile(searchAndReplace.get(0)); - gsubExpressions.add(new GsubExpression(entry.getKey(), searchPattern, searchAndReplace.get(1))); - } - } - - return new MutateProcessor( - (update == null) ? null : Collections.unmodifiableMap(update), - (rename == null) ? null : Collections.unmodifiableMap(rename), - (convert == null) ? null : Collections.unmodifiableMap(convert), - (split == null) ? null : Collections.unmodifiableMap(split), - (gsubExpressions == null) ? null : Collections.unmodifiableList(gsubExpressions), - (join == null) ? null : Collections.unmodifiableMap(join), - (remove == null) ? null : Collections.unmodifiableList(remove), - (trim == null) ? null : Collections.unmodifiableList(trim), - (uppercase == null) ? null : Collections.unmodifiableList(uppercase), - (lowercase == null) ? null : Collections.unmodifiableList(lowercase)); - } - } -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java new file mode 100644 index 00000000000..744b668a27a --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.remove; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.processor.ConfigurationUtils; +import org.elasticsearch.ingest.processor.Processor; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** + * Processor that removes existing fields. Nothing happens if the field is not present. + */ +public class RemoveProcessor implements Processor { + + public static final String TYPE = "remove"; + + private final Collection fields; + + RemoveProcessor(Collection fields) { + this.fields = fields; + } + + Collection getFields() { + return fields; + } + + @Override + public void execute(IngestDocument document) { + for(String field : fields) { + document.removeProperty(field); + } + } + + @Override + public String getType() { + return TYPE; + } + + public static class Factory implements Processor.Factory { + @Override + public RemoveProcessor create(Map config) throws IOException { + List fields = ConfigurationUtils.readList(config, "fields"); + return new RemoveProcessor(Collections.unmodifiableList(fields)); + } + } +} + diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java new file mode 100644 index 00000000000..f829e7ab0d7 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.rename; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.processor.ConfigurationUtils; +import org.elasticsearch.ingest.processor.Processor; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; + +/** + * Processor that allows to rename existing fields. Will throw exception if the field is not present. + */ +public class RenameProcessor implements Processor { + + public static final String TYPE = "rename"; + + private final Map fields; + + RenameProcessor(Map fields) { + this.fields = fields; + } + + Map getFields() { + return fields; + } + + @Override + public void execute(IngestDocument document) { + for(Map.Entry entry : fields.entrySet()) { + if (document.hasPropertyValue(entry.getKey())) { + if (document.hasPropertyValue(entry.getKey()) == false) { + throw new IllegalArgumentException("field [" + entry.getKey() + "] doesn't exist"); + } + Object oldValue = document.getPropertyValue(entry.getKey(), Object.class); + document.removeProperty(entry.getKey()); + document.setPropertyValue(entry.getValue(), oldValue); + } + } + } + + @Override + public String getType() { + return TYPE; + } + + public static class Factory implements Processor.Factory { + @Override + public RenameProcessor create(Map config) throws IOException { + Map fields = ConfigurationUtils.readMap(config, "fields"); + return new RenameProcessor(Collections.unmodifiableMap(fields)); + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java new file mode 100644 index 00000000000..bc83bc9c794 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.split; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.processor.ConfigurationUtils; +import org.elasticsearch.ingest.processor.Processor; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; + +/** + * Processor that splits fields content into different items based on the occurrence of a specified separator. + * New field value will be an array containing all of the different extracted items. + * Throws exception if the field is null or a type other than string. + */ +public class SplitProcessor implements Processor { + + public static final String TYPE = "split"; + + private final Map fields; + + SplitProcessor(Map fields) { + this.fields = fields; + } + + Map getFields() { + return fields; + } + + @Override + public void execute(IngestDocument document) { + for(Map.Entry entry : fields.entrySet()) { + String oldVal = document.getPropertyValue(entry.getKey(), String.class); + if (oldVal == null) { + throw new IllegalArgumentException("field [" + entry.getKey() + "] is null, cannot split."); + } + document.setPropertyValue(entry.getKey(), Arrays.asList(oldVal.split(entry.getValue()))); + } + } + + @Override + public String getType() { + return TYPE; + } + + public static class Factory implements Processor.Factory { + @Override + public SplitProcessor create(Map config) throws IOException { + Map fields = ConfigurationUtils.readMap(config, "fields"); + return new SplitProcessor(Collections.unmodifiableMap(fields)); + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java new file mode 100644 index 00000000000..d3090a37d41 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.trim; + +import org.elasticsearch.ingest.processor.AbstractStringProcessor; + +import java.util.Collection; + +/** + * Processor that trims the content of string fields. + * Throws exception is the field is not of type string. + */ +public class TrimProcessor extends AbstractStringProcessor { + + public static final String TYPE = "trim"; + + TrimProcessor(Collection fields) { + super(fields); + } + + @Override + protected String process(String value) { + return value.trim(); + } + + @Override + public String getType() { + return TYPE; + } + + public static class Factory extends AbstractStringProcessor.Factory { + @Override + protected TrimProcessor newProcessor(Collection fields) { + return new TrimProcessor(fields); + } + } +} + diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java new file mode 100644 index 00000000000..a4b281fe2e9 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java @@ -0,0 +1,56 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.uppercase; + +import org.elasticsearch.ingest.processor.AbstractStringProcessor; + +import java.util.Collection; +import java.util.Locale; + +/** + * Processor that converts the content of string fields to uppercase. + * Throws exception is the field is not of type string. + */ +public class UppercaseProcessor extends AbstractStringProcessor { + + public static final String TYPE = "uppercase"; + + UppercaseProcessor(Collection fields) { + super(fields); + } + + @Override + protected String process(String value) { + return value.toUpperCase(Locale.ROOT); + } + + @Override + public String getType() { + return TYPE; + } + + public static class Factory extends AbstractStringProcessor.Factory { + @Override + protected UppercaseProcessor newProcessor(Collection fields) { + return new UppercaseProcessor(fields); + } + } +} + diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index ae685c75d5a..5cb4e703ea0 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -22,10 +22,19 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.processor.add.AddProcessor; +import org.elasticsearch.ingest.processor.convert.ConvertProcessor; import org.elasticsearch.ingest.processor.date.DateProcessor; import org.elasticsearch.ingest.processor.geoip.GeoIpProcessor; import org.elasticsearch.ingest.processor.grok.GrokProcessor; -import org.elasticsearch.ingest.processor.mutate.MutateProcessor; +import org.elasticsearch.ingest.processor.gsub.GsubProcessor; +import org.elasticsearch.ingest.processor.join.JoinProcessor; +import org.elasticsearch.ingest.processor.lowercase.LowercaseProcessor; +import org.elasticsearch.ingest.processor.remove.RemoveProcessor; +import org.elasticsearch.ingest.processor.rename.RenameProcessor; +import org.elasticsearch.ingest.processor.split.SplitProcessor; +import org.elasticsearch.ingest.processor.trim.TrimProcessor; +import org.elasticsearch.ingest.processor.uppercase.UppercaseProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; import org.elasticsearch.plugin.ingest.transport.simulate.SimulateExecutionService; @@ -47,7 +56,16 @@ public class IngestModule extends AbstractModule { addProcessor(GeoIpProcessor.TYPE, new GeoIpProcessor.Factory()); addProcessor(GrokProcessor.TYPE, new GrokProcessor.Factory()); addProcessor(DateProcessor.TYPE, new DateProcessor.Factory()); - addProcessor(MutateProcessor.TYPE, new MutateProcessor.Factory()); + addProcessor(AddProcessor.TYPE, new AddProcessor.Factory()); + addProcessor(RenameProcessor.TYPE, new RenameProcessor.Factory()); + addProcessor(RemoveProcessor.TYPE, new RemoveProcessor.Factory()); + addProcessor(SplitProcessor.TYPE, new SplitProcessor.Factory()); + addProcessor(JoinProcessor.TYPE, new JoinProcessor.Factory()); + addProcessor(UppercaseProcessor.TYPE, new UppercaseProcessor.Factory()); + addProcessor(LowercaseProcessor.TYPE, new LowercaseProcessor.Factory()); + addProcessor(TrimProcessor.TYPE, new TrimProcessor.Factory()); + addProcessor(ConvertProcessor.TYPE, new ConvertProcessor.Factory()); + addProcessor(GsubProcessor.TYPE, new GsubProcessor.Factory()); MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, Processor.Factory.class); for (Map.Entry entry : processors.entrySet()) { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java index bc1c9eb0fe5..459f7a62869 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java @@ -20,54 +20,55 @@ package org.elasticsearch.ingest; import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.ingest.processor.mutate.MutateProcessor; import org.elasticsearch.test.ESTestCase; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class PipelineFactoryTests extends ESTestCase { public void testCreate() throws Exception { - Pipeline.Factory factory = new Pipeline.Factory(); - Map processorRegistry = new HashMap<>(); - processorRegistry.put("mutate", new MutateProcessor.Factory()); - Map processorConfig = new HashMap<>(); - processorConfig.put("uppercase", Arrays.asList("field1")); Map pipelineConfig = new HashMap<>(); pipelineConfig.put("description", "_description"); - pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("mutate", processorConfig))); - Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); + pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("test", processorConfig))); + Pipeline.Factory factory = new Pipeline.Factory(); + Map processorRegistry = new HashMap<>(); + Processor processor = mock(Processor.class); + when(processor.getType()).thenReturn("test-processor"); + Processor.Factory processorFactory = mock(Processor.Factory.class); + when(processorFactory.create(processorConfig)).thenReturn(processor); + processorRegistry.put("test", processorFactory); + Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); assertThat(pipeline.getId(), equalTo("_id")); assertThat(pipeline.getDescription(), equalTo("_description")); assertThat(pipeline.getProcessors().size(), equalTo(1)); - assertThat(pipeline.getProcessors().get(0), instanceOf(MutateProcessor.class)); + assertThat(pipeline.getProcessors().get(0).getType(), equalTo("test-processor")); } - public void testCreate_unusedProcessorOptions() throws Exception { - Pipeline.Factory factory = new Pipeline.Factory(); - Map processorRegistry = new HashMap<>(); - processorRegistry.put("mutate", new MutateProcessor.Factory()); - + public void testCreateUnusedProcessorOptions() throws Exception { Map processorConfig = new HashMap<>(); - processorConfig.put("uppercase", Arrays.asList("field1")); - processorConfig.put("foo", "bar"); + processorConfig.put("unused", "value"); Map pipelineConfig = new HashMap<>(); pipelineConfig.put("description", "_description"); - pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("mutate", processorConfig))); - + pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("test", processorConfig))); + Pipeline.Factory factory = new Pipeline.Factory(); + Map processorRegistry = new HashMap<>(); + Processor processor = mock(Processor.class); + when(processor.getType()).thenReturn("test-processor"); + Processor.Factory processorFactory = mock(Processor.Factory.class); + when(processorFactory.create(processorConfig)).thenReturn(processor); + processorRegistry.put("test", processorFactory); try { factory.create("_id", pipelineConfig, processorRegistry); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("processor [mutate] doesn't support one or more provided configuration parameters [[foo]]")); + assertThat(e.getMessage(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]")); } } - } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java new file mode 100644 index 00000000000..5277d6b790d --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java @@ -0,0 +1,216 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import com.carrotsearch.randomizedtesting.generators.RandomInts; +import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; +import org.elasticsearch.common.Strings; + +import java.util.*; + +public final class RandomDocumentPicks { + + private RandomDocumentPicks() { + + } + + /** + * Returns a random field name. Can be a leaf field name or the + * path to refer to a field name using the dot notation. + */ + public static String randomFieldName(Random random) { + int numLevels = RandomInts.randomIntBetween(random, 1, 5); + String fieldName = ""; + for (int i = 0; i < numLevels; i++) { + if (i > 0) { + fieldName += "."; + } + fieldName += randomString(random); + } + return fieldName; + } + + /** + * Returns a random leaf field name. + */ + public static String randomLeafFieldName(Random random) { + String fieldName; + do { + fieldName = randomString(random); + } while (fieldName.contains(".")); + return fieldName; + } + + /** + * Returns a randomly selected existing field name out of the fields that are contained + * in the document provided as an argument. + */ + public static String randomExistingFieldName(Random random, IngestDocument ingestDocument) { + Map source = new TreeMap<>(ingestDocument.getSource()); + Map.Entry randomEntry = RandomPicks.randomFrom(random, source.entrySet()); + String key = randomEntry.getKey(); + while (randomEntry.getValue() instanceof Map) { + @SuppressWarnings("unchecked") + Map map = (Map) randomEntry.getValue(); + Map treeMap = new TreeMap<>(map); + randomEntry = RandomPicks.randomFrom(random, treeMap.entrySet()); + key += "." + randomEntry.getKey(); + } + assert ingestDocument.getPropertyValue(key, Object.class) != null; + return key; + } + + /** + * Adds a random non existing field to the provided document and associates it + * with the provided value. The field will be added at a random position within the document, + * not necessarily at the top level using a leaf field name. + */ + public static String addRandomField(Random random, IngestDocument ingestDocument, Object value) { + String fieldName; + do { + fieldName = randomFieldName(random); + } while (canAddField(fieldName, ingestDocument) == false); + ingestDocument.setPropertyValue(fieldName, value); + return fieldName; + } + + /** + * Checks whether the provided field name can be safely added to the provided document. + * When the provided field name holds the path using the dot notation, we have to make sure + * that each node of the tree either doesn't exist or is a map, otherwise new fields cannot be added. + */ + public static boolean canAddField(String path, IngestDocument ingestDocument) { + String[] pathElements = Strings.splitStringToArray(path, '.'); + Map innerMap = ingestDocument.getSource(); + if (pathElements.length > 1) { + for (int i = 0; i < pathElements.length - 1; i++) { + Object currentLevel = innerMap.get(pathElements[i]); + if (currentLevel == null) { + return true; + } + if (currentLevel instanceof Map == false) { + return false; + } + @SuppressWarnings("unchecked") + Map map = (Map) currentLevel; + innerMap = map; + } + } + String leafKey = pathElements[pathElements.length - 1]; + return innerMap.containsKey(leafKey) == false; + } + + /** + * Generates a random document and random metadata + */ + public static IngestDocument randomIngestDocument(Random random) { + return randomIngestDocument(random, randomDocument(random)); + } + + /** + * Generates a document that holds random metadata and the document provided as a map argument + */ + public static IngestDocument randomIngestDocument(Random random, Map document) { + String index = randomString(random); + String type = randomString(random); + String id = randomString(random); + return new IngestDocument(index, type, id, document); + } + + private static Map randomDocument(Random random) { + Map document = new HashMap<>(); + addRandomFields(random, document, 0); + return document; + } + + /** + * Generates a random field value, can be a string, a number, a list of an object itself. + */ + public static Object randomFieldValue(Random random) { + return randomFieldValue(random, 0); + } + + private static Object randomFieldValue(Random random, int currentDepth) { + switch(RandomInts.randomIntBetween(random, 0, 8)) { + case 0: + return randomString(random); + case 1: + return random.nextInt(); + case 2: + return random.nextBoolean(); + case 3: + return random.nextDouble(); + case 4: + List stringList = new ArrayList<>(); + int numStringItems = RandomInts.randomIntBetween(random, 1, 10); + for (int j = 0; j < numStringItems; j++) { + stringList.add(randomString(random)); + } + return stringList; + case 5: + List intList = new ArrayList<>(); + int numIntItems = RandomInts.randomIntBetween(random, 1, 10); + for (int j = 0; j < numIntItems; j++) { + intList.add(random.nextInt()); + } + return intList; + case 6: + List booleanList = new ArrayList<>(); + int numBooleanItems = RandomInts.randomIntBetween(random, 1, 10); + for (int j = 0; j < numBooleanItems; j++) { + booleanList.add(random.nextBoolean()); + } + return booleanList; + case 7: + List doubleList = new ArrayList<>(); + int numDoubleItems = RandomInts.randomIntBetween(random, 1, 10); + for (int j = 0; j < numDoubleItems; j++) { + doubleList.add(random.nextDouble()); + } + return doubleList; + case 8: + Map newNode = new HashMap<>(); + addRandomFields(random, newNode, ++currentDepth); + return newNode; + default: + throw new UnsupportedOperationException(); + } + } + + public static String randomString(Random random) { + if (random.nextBoolean()) { + return RandomStrings.randomAsciiOfLengthBetween(random, 1, 10); + } + return RandomStrings.randomUnicodeOfCodepointLengthBetween(random, 1, 10); + } + + private static void addRandomFields(Random random, Map parentNode, int currentDepth) { + if (currentDepth > 5) { + return; + } + int numFields = RandomInts.randomIntBetween(random, 1, 10); + for (int i = 0; i < numFields; i++) { + String fieldName = randomLeafFieldName(random); + Object fieldValue = randomFieldValue(random, currentDepth); + parentNode.put(fieldName, fieldValue); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java new file mode 100644 index 00000000000..94bdce9f663 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public abstract class AbstractStringProcessorTestCase extends ESTestCase { + + protected abstract AbstractStringProcessor newProcessor(Collection fields); + + protected String modifyInput(String input) { + return input; + } + + protected abstract String expectedResult(String input); + + public void testProcessor() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + int numFields = randomIntBetween(1, 5); + Map expected = new HashMap<>(); + for (int i = 0; i < numFields; i++) { + String fieldValue = RandomDocumentPicks.randomString(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, modifyInput(fieldValue)); + expected.put(fieldName, expectedResult(fieldValue)); + } + Processor processor = newProcessor(expected.keySet()); + processor.execute(ingestDocument); + for (Map.Entry entry : expected.entrySet()) { + assertThat(ingestDocument.getPropertyValue(entry.getKey(), String.class), equalTo(entry.getValue())); + } + } + + public void testNullValue() throws IOException { + String fieldName = RandomDocumentPicks.randomFieldName(random()); + Processor processor = newProcessor(Collections.singletonList(fieldName)); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + try { + processor.execute(ingestDocument); + fail("processor should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [" + fieldName + "] is null, cannot process it.")); + } + } + + public void testNonStringValue() throws IOException { + String fieldName = RandomDocumentPicks.randomFieldName(random()); + Processor processor = newProcessor(Collections.singletonList(fieldName)); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + ingestDocument.setPropertyValue(fieldName, randomInt()); + try { + processor.execute(ingestDocument); + fail("processor should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorFactoryTests.java new file mode 100644 index 00000000000..8acbed60541 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorFactoryTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.add; + +import org.elasticsearch.ingest.processor.join.JoinProcessor; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class AddProcessorFactoryTests extends ESTestCase { + + public void testCreate() throws IOException { + AddProcessor.Factory factory = new AddProcessor.Factory(); + Map config = new HashMap<>(); + Map fields = Collections.singletonMap("field1", "value1"); + config.put("fields", fields); + AddProcessor addProcessor = factory.create(config); + assertThat(addProcessor.getFields(), equalTo(fields)); + } + + public void testCreateMissingFields() throws IOException { + AddProcessor.Factory factory = new AddProcessor.Factory(); + Map config = new HashMap<>(); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java new file mode 100644 index 00000000000..cecf2c9d9e2 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java @@ -0,0 +1,82 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.add; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.*; + +import static org.hamcrest.Matchers.equalTo; + +public class AddProcessorTests extends ESTestCase { + + public void testAddExistingFields() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + int numFields = randomIntBetween(1, 5); + Map fields = new HashMap<>(); + for (int i = 0; i < numFields; i++) { + String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); + Object fieldValue = RandomDocumentPicks.randomFieldValue(random()); + fields.put(fieldName, fieldValue); + } + Processor processor = new AddProcessor(fields); + processor.execute(ingestDocument); + + for (Map.Entry field : fields.entrySet()) { + assertThat(ingestDocument.hasPropertyValue(field.getKey()), equalTo(true)); + assertThat(ingestDocument.getPropertyValue(field.getKey(), Object.class), equalTo(field.getValue())); + } + } + + public void testAddNewFields() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + //used to verify that there are no conflicts between subsequent fields going to be added + IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + int numFields = randomIntBetween(1, 5); + Map fields = new HashMap<>(); + for (int i = 0; i < numFields; i++) { + Object fieldValue = RandomDocumentPicks.randomFieldValue(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), testIngestDocument, fieldValue); + fields.put(fieldName, fieldValue); + } + Processor processor = new AddProcessor(fields); + processor.execute(ingestDocument); + for (Map.Entry field : fields.entrySet()) { + assertThat(ingestDocument.hasPropertyValue(field.getKey()), equalTo(true)); + assertThat(ingestDocument.getPropertyValue(field.getKey(), Object.class), equalTo(field.getValue())); + } + } + + public void testAddFieldsTypeMismatch() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + ingestDocument.setPropertyValue("field", "value"); + Processor processor = new AddProcessor(Collections.singletonMap("field.inner", "value")); + try { + processor.execute(ingestDocument); + fail("processor execute should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("cannot add field to parent [field] of type [java.lang.String], [java.util.Map] expected instead.")); + } + } +} \ No newline at end of file diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java new file mode 100644 index 00000000000..ae20d647df9 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.convert; + +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class ConvertProcessorFactoryTests extends ESTestCase { + + public void testCreate() throws IOException { + ConvertProcessor.Factory factory = new ConvertProcessor.Factory(); + Map config = new HashMap<>(); + ConvertProcessor.Type type = randomFrom(ConvertProcessor.Type.values()); + Map fields = Collections.singletonMap("field1", type.toString()); + config.put("fields", fields); + ConvertProcessor convertProcessor = factory.create(config); + assertThat(convertProcessor.getFields().size(), equalTo(1)); + assertThat(convertProcessor.getFields().get("field1"), equalTo(type)); + } + + public void testCreateMissingFields() throws IOException { + ConvertProcessor.Factory factory = new ConvertProcessor.Factory(); + Map config = new HashMap<>(); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + } + } + + public void testCreateUnsupportedType() throws IOException { + ConvertProcessor.Factory factory = new ConvertProcessor.Factory(); + Map config = new HashMap<>(); + String type = "type-" + randomAsciiOfLengthBetween(1, 10); + Map fields = Collections.singletonMap("field1", type); + config.put("fields", fields); + try { + factory.create(config); + fail("factory create should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), Matchers.equalTo("type [" + type + "] not supported, cannot convert field.")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java new file mode 100644 index 00000000000..b89f51166df --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java @@ -0,0 +1,325 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.convert; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.*; + +import static org.elasticsearch.ingest.processor.convert.ConvertProcessor.*; +import static org.hamcrest.Matchers.equalTo; + +public class ConvertProcessorTests extends ESTestCase { + + public void testConvertInt() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Map fields = new HashMap<>(); + Map expectedResult = new HashMap<>(); + int numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + int randomInt = randomInt(); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, randomInt); + fields.put(fieldName, Type.INTEGER); + expectedResult.put(fieldName, randomInt); + } + Processor processor = new ConvertProcessor(fields); + processor.execute(ingestDocument); + for (Map.Entry entry : expectedResult.entrySet()) { + assertThat(ingestDocument.getPropertyValue(entry.getKey(), Integer.class), equalTo(entry.getValue())); + } + } + + public void testConvertIntList() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Map fields = new HashMap<>(); + Map> expectedResult = new HashMap<>(); + int numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + int numItems = randomIntBetween(1, 10); + List fieldValue = new ArrayList<>(); + List expectedList = new ArrayList<>(); + for (int j = 0; j < numItems; j++) { + int randomInt = randomInt(); + fieldValue.add(Integer.toString(randomInt)); + expectedList.add(randomInt); + } + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); + fields.put(fieldName, Type.INTEGER); + expectedResult.put(fieldName, expectedList); + } + Processor processor = new ConvertProcessor(fields); + processor.execute(ingestDocument); + for (Map.Entry> entry : expectedResult.entrySet()) { + assertThat(ingestDocument.getPropertyValue(entry.getKey(), List.class), equalTo(entry.getValue())); + } + } + + public void testConvertIntError() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + String fieldName = RandomDocumentPicks.randomFieldName(random()); + String value = "string-" + randomAsciiOfLengthBetween(1, 10); + ingestDocument.setPropertyValue(fieldName, value); + + Map convert = Collections.singletonMap(fieldName, Type.INTEGER); + Processor processor = new ConvertProcessor(convert); + try { + processor.execute(ingestDocument); + fail("processor execute should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("unable to convert [" + value + "] to integer")); + } + } + + public void testConvertFloat() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Map fields = new HashMap<>(); + Map expectedResult = new HashMap<>(); + int numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + float randomFloat = randomFloat(); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, randomFloat); + fields.put(fieldName, Type.FLOAT); + expectedResult.put(fieldName, randomFloat); + } + + Processor processor = new ConvertProcessor(fields); + processor.execute(ingestDocument); + for (Map.Entry entry : expectedResult.entrySet()) { + assertThat(ingestDocument.getPropertyValue(entry.getKey(), Float.class), equalTo(entry.getValue())); + } + } + + public void testConvertFloatList() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Map fields = new HashMap<>(); + Map> expectedResult = new HashMap<>(); + int numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + int numItems = randomIntBetween(1, 10); + List fieldValue = new ArrayList<>(); + List expectedList = new ArrayList<>(); + for (int j = 0; j < numItems; j++) { + float randomFloat = randomFloat(); + fieldValue.add(Float.toString(randomFloat)); + expectedList.add(randomFloat); + } + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); + fields.put(fieldName, Type.FLOAT); + expectedResult.put(fieldName, expectedList); + } + Processor processor = new ConvertProcessor(fields); + processor.execute(ingestDocument); + for (Map.Entry> entry : expectedResult.entrySet()) { + assertThat(ingestDocument.getPropertyValue(entry.getKey(), List.class), equalTo(entry.getValue())); + } + } + + public void testConvertFloatError() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + String fieldName = RandomDocumentPicks.randomFieldName(random()); + String value = "string-" + randomAsciiOfLengthBetween(1, 10); + ingestDocument.setPropertyValue(fieldName, value); + + Map convert = Collections.singletonMap(fieldName, Type.FLOAT); + Processor processor = new ConvertProcessor(convert); + try { + processor.execute(ingestDocument); + fail("processor execute should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("unable to convert [" + value + "] to float")); + } + } + + public void testConvertBoolean() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Map fields = new HashMap<>(); + Map expectedResult = new HashMap<>(); + int numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + boolean randomBoolean = randomBoolean(); + String booleanString = Boolean.toString(randomBoolean); + if (randomBoolean) { + booleanString = booleanString.toUpperCase(Locale.ROOT); + } + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, booleanString); + fields.put(fieldName, Type.BOOLEAN); + expectedResult.put(fieldName, randomBoolean); + } + + Processor processor = new ConvertProcessor(fields); + processor.execute(ingestDocument); + for (Map.Entry entry : expectedResult.entrySet()) { + assertThat(ingestDocument.getPropertyValue(entry.getKey(), Boolean.class), equalTo(entry.getValue())); + } + } + + public void testConvertBooleanList() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Map fields = new HashMap<>(); + Map> expectedResult = new HashMap<>(); + int numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + int numItems = randomIntBetween(1, 10); + List fieldValue = new ArrayList<>(); + List expectedList = new ArrayList<>(); + for (int j = 0; j < numItems; j++) { + boolean randomBoolean = randomBoolean(); + String booleanString = Boolean.toString(randomBoolean); + if (randomBoolean) { + booleanString = booleanString.toUpperCase(Locale.ROOT); + } + fieldValue.add(booleanString); + expectedList.add(randomBoolean); + } + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); + fields.put(fieldName, Type.BOOLEAN); + expectedResult.put(fieldName, expectedList); + } + Processor processor = new ConvertProcessor(fields); + processor.execute(ingestDocument); + for (Map.Entry> entry : expectedResult.entrySet()) { + assertThat(ingestDocument.getPropertyValue(entry.getKey(), List.class), equalTo(entry.getValue())); + } + } + + public void testConvertBooleanError() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + String fieldName = RandomDocumentPicks.randomFieldName(random()); + String fieldValue; + if (randomBoolean()) { + fieldValue = "string-" + randomAsciiOfLengthBetween(1, 10); + } else { + //verify that only proper boolean values are supported and we are strict about it + fieldValue = randomFrom("on", "off", "yes", "no", "0", "1"); + } + ingestDocument.setPropertyValue(fieldName, fieldValue); + + Map convert = Collections.singletonMap(fieldName, Type.BOOLEAN); + Processor processor = new ConvertProcessor(convert); + try { + processor.execute(ingestDocument); + fail("processor execute should have failed"); + } catch(Exception e) { + assertThat(e.getMessage(), equalTo("[" + fieldValue + "] is not a boolean value, cannot convert to boolean")); + } + } + + public void testConvertString() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Map fields = new HashMap<>(); + Map expectedResult = new HashMap<>(); + int numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + Object fieldValue; + String expectedFieldValue; + switch(randomIntBetween(0, 2)) { + case 0: + float randomFloat = randomFloat(); + fieldValue = randomFloat; + expectedFieldValue = Float.toString(randomFloat); + break; + case 1: + int randomInt = randomInt(); + fieldValue = randomInt; + expectedFieldValue = Integer.toString(randomInt); + break; + case 2: + boolean randomBoolean = randomBoolean(); + fieldValue = randomBoolean; + expectedFieldValue = Boolean.toString(randomBoolean); + break; + default: + throw new UnsupportedOperationException(); + } + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); + fields.put(fieldName, Type.STRING); + expectedResult.put(fieldName, expectedFieldValue); + } + + Processor processor = new ConvertProcessor(fields); + processor.execute(ingestDocument); + for (Map.Entry entry : expectedResult.entrySet()) { + assertThat(ingestDocument.getPropertyValue(entry.getKey(), String.class), equalTo(entry.getValue())); + } + } + + public void testConvertStringList() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Map fields = new HashMap<>(); + Map> expectedResult = new HashMap<>(); + int numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + int numItems = randomIntBetween(1, 10); + List fieldValue = new ArrayList<>(); + List expectedList = new ArrayList<>(); + for (int j = 0; j < numItems; j++) { + Object randomValue; + String randomValueString; + switch(randomIntBetween(0, 2)) { + case 0: + float randomFloat = randomFloat(); + randomValue = randomFloat; + randomValueString = Float.toString(randomFloat); + break; + case 1: + int randomInt = randomInt(); + randomValue = randomInt; + randomValueString = Integer.toString(randomInt); + break; + case 2: + boolean randomBoolean = randomBoolean(); + randomValue = randomBoolean; + randomValueString = Boolean.toString(randomBoolean); + break; + default: + throw new UnsupportedOperationException(); + } + fieldValue.add(randomValue); + expectedList.add(randomValueString); + } + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); + fields.put(fieldName, Type.STRING); + expectedResult.put(fieldName, expectedList); + } + Processor processor = new ConvertProcessor(fields); + processor.execute(ingestDocument); + for (Map.Entry> entry : expectedResult.entrySet()) { + assertThat(ingestDocument.getPropertyValue(entry.getKey(), List.class), equalTo(entry.getValue())); + } + } + + public void testConvertNullField() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + String fieldName = RandomDocumentPicks.randomFieldName(random()); + Type type = randomFrom(Type.values()); + Map convert = Collections.singletonMap(fieldName, type); + Processor processor = new ConvertProcessor(convert); + try { + processor.execute(ingestDocument); + fail("processor execute should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Field [" + fieldName + "] is null, cannot be converted to type [" + type + "]")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java new file mode 100644 index 00000000000..a66dbb2519c --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java @@ -0,0 +1,114 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.gsub; + +import org.elasticsearch.ingest.processor.join.JoinProcessor; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class GsubProcessorFactoryTests extends ESTestCase { + + public void testCreate() throws IOException { + GsubProcessor.Factory factory = new GsubProcessor.Factory(); + Map config = new HashMap<>(); + List> expressions = new ArrayList<>(); + Map expression = new HashMap<>(); + expression.put("field", "field1"); + expression.put("pattern", "\\."); + expression.put("replacement", "-"); + expressions.add(expression); + config.put("expressions", expressions); + GsubProcessor gsubProcessor = factory.create(config); + assertThat(gsubProcessor.getGsubExpressions().size(), equalTo(1)); + GsubExpression gsubExpression = gsubProcessor.getGsubExpressions().get(0); + assertThat(gsubExpression.getFieldName(), equalTo("field1")); + assertThat(gsubExpression.getPattern().toString(), equalTo("\\.")); + assertThat(gsubExpression.getReplacement(), equalTo("-")); + } + + public void testCreateMissingExpressions() throws IOException { + GsubProcessor.Factory factory = new GsubProcessor.Factory(); + Map config = new HashMap<>(); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [expressions] is missing")); + } + } + + public void testCreateNoFieldPresent() throws IOException { + GsubProcessor.Factory factory = new GsubProcessor.Factory(); + Map config = new HashMap<>(); + List> expressions = new ArrayList<>(); + Map expression = new HashMap<>(); + expression.put("pattern", "\\."); + expression.put("replacement", "-"); + expressions.add(expression); + config.put("expressions", expressions); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("no [field] specified for gsub expression")); + } + } + + public void testCreateNoPatternPresent() throws IOException { + GsubProcessor.Factory factory = new GsubProcessor.Factory(); + Map config = new HashMap<>(); + List> expressions = new ArrayList<>(); + Map expression = new HashMap<>(); + expression.put("field", "field1"); + expression.put("replacement", "-"); + expressions.add(expression); + config.put("expressions", expressions); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("no [pattern] specified for gsub expression")); + } + } + + public void testCreateNoReplacementPresent() throws IOException { + GsubProcessor.Factory factory = new GsubProcessor.Factory(); + Map config = new HashMap<>(); + List> expressions = new ArrayList<>(); + Map expression = new HashMap<>(); + expression.put("field", "field1"); + expression.put("pattern", "\\."); + expressions.add(expression); + config.put("expressions", expressions); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("no [replacement] specified for gsub expression")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java new file mode 100644 index 00000000000..9dce4fe48c7 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.gsub; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.regex.Pattern; + +import static org.hamcrest.Matchers.equalTo; + +public class GsubProcessorTests extends ESTestCase { + + public void testGsub() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + int numFields = randomIntBetween(1, 5); + List expressions = new ArrayList<>(); + for (int i = 0; i < numFields; i++) { + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "127.0.0.1"); + expressions.add(new GsubExpression(fieldName, Pattern.compile("\\."), "-")); + } + Processor processor = new GsubProcessor(expressions); + processor.execute(ingestDocument); + for (GsubExpression expression : expressions) { + assertThat(ingestDocument.getPropertyValue(expression.getFieldName(), String.class), equalTo("127-0-0-1")); + } + } + + public void testGsubNotAStringValue() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + String fieldName = RandomDocumentPicks.randomFieldName(random()); + ingestDocument.setPropertyValue(fieldName, 123); + List gsubExpressions = Collections.singletonList(new GsubExpression(fieldName, Pattern.compile("\\."), "-")); + Processor processor = new GsubProcessor(gsubExpressions); + try { + processor.execute(ingestDocument); + fail("processor execution should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); + } + } + + public void testGsubNullValue() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + String fieldName = RandomDocumentPicks.randomFieldName(random()); + List gsubExpressions = Collections.singletonList(new GsubExpression(fieldName, Pattern.compile("\\."), "-")); + Processor processor = new GsubProcessor(gsubExpressions); + try { + processor.execute(ingestDocument); + fail("processor execution should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [" + fieldName + "] is null, cannot match pattern.")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java new file mode 100644 index 00000000000..9b7aa52b8cd --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.join; + +import org.elasticsearch.ingest.processor.split.SplitProcessor; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class JoinProcessorFactoryTests extends ESTestCase { + + public void testCreate() throws IOException { + JoinProcessor.Factory factory = new JoinProcessor.Factory(); + Map config = new HashMap<>(); + Map fields = Collections.singletonMap("field1", "-"); + config.put("fields", fields); + JoinProcessor joinProcessor = factory.create(config); + assertThat(joinProcessor.getFields(), equalTo(fields)); + } + + public void testCreateMissingFields() throws IOException { + JoinProcessor.Factory factory = new JoinProcessor.Factory(); + Map config = new HashMap<>(); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java new file mode 100644 index 00000000000..8ad6c7bb3c0 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.join; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.*; + +import static org.hamcrest.Matchers.equalTo; + +public class JoinProcessorTests extends ESTestCase { + + private static final String[] SEPARATORS = new String[]{"-", "_", "."}; + + public void testJoinStrings() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Map fields = new HashMap<>(); + Map expectedResultMap = new HashMap<>(); + int numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + int numItems = randomIntBetween(1, 10); + String separator = randomFrom(SEPARATORS); + List fieldValue = new ArrayList<>(numItems); + String expectedResult = ""; + for (int j = 0; j < numItems; j++) { + String value = randomAsciiOfLengthBetween(1, 10); + fieldValue.add(value); + expectedResult += value; + if (j < numItems - 1) { + expectedResult += separator; + } + } + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); + expectedResultMap.put(fieldName, expectedResult); + fields.put(fieldName, separator); + } + Processor processor = new JoinProcessor(fields); + processor.execute(ingestDocument); + for (Map.Entry entry : expectedResultMap.entrySet()) { + assertThat(ingestDocument.getPropertyValue(entry.getKey(), String.class), equalTo(entry.getValue())); + } + } + + public void testJoinIntegers() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Map fields = new HashMap<>(); + Map expectedResultMap = new HashMap<>(); + int numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + int numItems = randomIntBetween(1, 10); + String separator = randomFrom(SEPARATORS); + List fieldValue = new ArrayList<>(numItems); + String expectedResult = ""; + for (int j = 0; j < numItems; j++) { + int value = randomInt(); + fieldValue.add(value); + expectedResult += value; + if (j < numItems - 1) { + expectedResult += separator; + } + } + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); + expectedResultMap.put(fieldName, expectedResult); + fields.put(fieldName, separator); + } + Processor processor = new JoinProcessor(fields); + processor.execute(ingestDocument); + for (Map.Entry entry : expectedResultMap.entrySet()) { + assertThat(ingestDocument.getPropertyValue(entry.getKey(), String.class), equalTo(entry.getValue())); + } + } + + public void testJoinNonListField() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + String fieldName = RandomDocumentPicks.randomFieldName(random()); + ingestDocument.setPropertyValue(fieldName, randomAsciiOfLengthBetween(1, 10)); + Map join = Collections.singletonMap(fieldName, "-"); + Processor processor = new JoinProcessor(join); + try { + processor.execute(ingestDocument); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.String] cannot be cast to [java.util.List]")); + } + } + + public void testJoinNonExistingField() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + String fieldName = RandomDocumentPicks.randomFieldName(random()); + Processor processor = new JoinProcessor(Collections.singletonMap(fieldName, "-")); + try { + processor.execute(ingestDocument); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [" + fieldName + "] is null, cannot join.")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java new file mode 100644 index 00000000000..2c52eaf5572 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.lowercase; + +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class LowercaseProcessorFactoryTests extends ESTestCase { + + public void testCreate() throws IOException { + LowercaseProcessor.Factory factory = new LowercaseProcessor.Factory(); + Map config = new HashMap<>(); + List fields = Collections.singletonList("field1"); + config.put("fields", fields); + LowercaseProcessor uppercaseProcessor = factory.create(config); + assertThat(uppercaseProcessor.getFields(), equalTo(fields)); + } + + public void testCreateMissingFields() throws IOException { + LowercaseProcessor.Factory factory = new LowercaseProcessor.Factory(); + Map config = new HashMap<>(); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java new file mode 100644 index 00000000000..07e14062764 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java @@ -0,0 +1,38 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.lowercase; + +import org.elasticsearch.ingest.processor.AbstractStringProcessor; +import org.elasticsearch.ingest.processor.AbstractStringProcessorTestCase; + +import java.util.Collection; +import java.util.Locale; + +public class LowercaseProcessorTests extends AbstractStringProcessorTestCase { + @Override + protected AbstractStringProcessor newProcessor(Collection fields) { + return new LowercaseProcessor(fields); + } + + @Override + protected String expectedResult(String input) { + return input.toLowerCase(Locale.ROOT); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java deleted file mode 100644 index 3ebd98c10c3..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorFactoryTests.java +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.mutate; - -import org.elasticsearch.test.ESTestCase; - -import java.util.*; -import java.util.regex.Pattern; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; - -public class MutateProcessorFactoryTests extends ESTestCase { - - public void testCreateUpdate() throws Exception { - MutateProcessor.Factory factory = new MutateProcessor.Factory(); - Map config = new HashMap<>(); - Map update = new HashMap<>(); - update.put("foo", 123); - config.put("update", update); - MutateProcessor processor = factory.create(config); - assertThat(processor.getRename(), nullValue()); - assertThat(processor.getRemove(), nullValue()); - assertThat(processor.getGsub(), nullValue()); - assertThat(processor.getConvert(), nullValue()); - assertThat(processor.getJoin(), nullValue()); - assertThat(processor.getLowercase(), nullValue()); - assertThat(processor.getUppercase(), nullValue()); - assertThat(processor.getSplit(), nullValue()); - assertThat(processor.getTrim(), nullValue()); - assertThat(processor.getUpdate(), equalTo(update)); - } - - public void testCreateRename() throws Exception { - MutateProcessor.Factory factory = new MutateProcessor.Factory(); - Map config = new HashMap<>(); - Map rename = new HashMap<>(); - rename.put("foo", "bar"); - config.put("rename", rename); - MutateProcessor processor = factory.create(config); - assertThat(processor.getUpdate(), nullValue()); - assertThat(processor.getRemove(), nullValue()); - assertThat(processor.getGsub(), nullValue()); - assertThat(processor.getConvert(), nullValue()); - assertThat(processor.getJoin(), nullValue()); - assertThat(processor.getLowercase(), nullValue()); - assertThat(processor.getUppercase(), nullValue()); - assertThat(processor.getSplit(), nullValue()); - assertThat(processor.getTrim(), nullValue()); - assertThat(processor.getRename(), equalTo(rename)); - } - - public void testCreateRemove() throws Exception { - MutateProcessor.Factory factory = new MutateProcessor.Factory(); - Map config = new HashMap<>(); - List remove = Collections.singletonList("foo"); - config.put("remove", remove); - MutateProcessor processor = factory.create(config); - assertThat(processor.getUpdate(), nullValue()); - assertThat(processor.getGsub(), nullValue()); - assertThat(processor.getConvert(), nullValue()); - assertThat(processor.getJoin(), nullValue()); - assertThat(processor.getLowercase(), nullValue()); - assertThat(processor.getUppercase(), nullValue()); - assertThat(processor.getSplit(), nullValue()); - assertThat(processor.getTrim(), nullValue()); - assertThat(processor.getRename(), nullValue()); - assertThat(processor.getRemove(), equalTo(remove)); - } - - public void testCreateConvert() throws Exception { - MutateProcessor.Factory factory = new MutateProcessor.Factory(); - Map config = new HashMap<>(); - Map convert = new HashMap<>(); - convert.put("foo", "integer"); - config.put("convert", convert); - MutateProcessor processor = factory.create(config); - assertThat(processor.getUpdate(), nullValue()); - assertThat(processor.getGsub(), nullValue()); - assertThat(processor.getJoin(), nullValue()); - assertThat(processor.getLowercase(), nullValue()); - assertThat(processor.getUppercase(), nullValue()); - assertThat(processor.getSplit(), nullValue()); - assertThat(processor.getTrim(), nullValue()); - assertThat(processor.getRename(), nullValue()); - assertThat(processor.getRemove(), nullValue()); - assertThat(processor.getConvert(), equalTo(convert)); - } - - public void testCreateJoin() throws Exception { - MutateProcessor.Factory factory = new MutateProcessor.Factory(); - Map config = new HashMap<>(); - Map join = new HashMap<>(); - join.put("foo", "bar"); - config.put("join", join); - MutateProcessor processor = factory.create(config); - assertThat(processor.getUpdate(), nullValue()); - assertThat(processor.getGsub(), nullValue()); - assertThat(processor.getConvert(), nullValue()); - assertThat(processor.getLowercase(), nullValue()); - assertThat(processor.getUppercase(), nullValue()); - assertThat(processor.getSplit(), nullValue()); - assertThat(processor.getTrim(), nullValue()); - assertThat(processor.getRename(), nullValue()); - assertThat(processor.getRemove(), nullValue()); - assertThat(processor.getJoin(), equalTo(join)); - } - - public void testCreateSplit() throws Exception { - MutateProcessor.Factory factory = new MutateProcessor.Factory(); - Map config = new HashMap<>(); - Map split = new HashMap<>(); - split.put("foo", "bar"); - config.put("split", split); - MutateProcessor processor = factory.create(config); - assertThat(processor.getUpdate(), nullValue()); - assertThat(processor.getGsub(), nullValue()); - assertThat(processor.getConvert(), nullValue()); - assertThat(processor.getLowercase(), nullValue()); - assertThat(processor.getUppercase(), nullValue()); - assertThat(processor.getJoin(), nullValue()); - assertThat(processor.getTrim(), nullValue()); - assertThat(processor.getRename(), nullValue()); - assertThat(processor.getRemove(), nullValue()); - assertThat(processor.getSplit(), equalTo(split)); - } - - public void testCreateLowercase() throws Exception { - MutateProcessor.Factory factory = new MutateProcessor.Factory(); - Map config = new HashMap<>(); - List lowercase = Collections.singletonList("foo"); - config.put("lowercase", lowercase); - MutateProcessor processor = factory.create(config); - assertThat(processor.getUpdate(), nullValue()); - assertThat(processor.getGsub(), nullValue()); - assertThat(processor.getConvert(), nullValue()); - assertThat(processor.getJoin(), nullValue()); - assertThat(processor.getRemove(), nullValue()); - assertThat(processor.getUppercase(), nullValue()); - assertThat(processor.getSplit(), nullValue()); - assertThat(processor.getTrim(), nullValue()); - assertThat(processor.getRename(), nullValue()); - assertThat(processor.getLowercase(), equalTo(lowercase)); - } - - public void testCreateUppercase() throws Exception { - MutateProcessor.Factory factory = new MutateProcessor.Factory(); - Map config = new HashMap<>(); - List uppercase = Collections.singletonList("foo"); - config.put("uppercase", uppercase); - MutateProcessor processor = factory.create(config); - assertThat(processor.getUpdate(), nullValue()); - assertThat(processor.getGsub(), nullValue()); - assertThat(processor.getConvert(), nullValue()); - assertThat(processor.getJoin(), nullValue()); - assertThat(processor.getRemove(), nullValue()); - assertThat(processor.getLowercase(), nullValue()); - assertThat(processor.getSplit(), nullValue()); - assertThat(processor.getTrim(), nullValue()); - assertThat(processor.getRename(), nullValue()); - assertThat(processor.getUppercase(), equalTo(uppercase)); - } - - public void testCreateTrim() throws Exception { - MutateProcessor.Factory factory = new MutateProcessor.Factory(); - Map config = new HashMap<>(); - List trim = Collections.singletonList("foo"); - config.put("trim", trim); - MutateProcessor processor = factory.create(config); - assertThat(processor.getUpdate(), nullValue()); - assertThat(processor.getGsub(), nullValue()); - assertThat(processor.getConvert(), nullValue()); - assertThat(processor.getJoin(), nullValue()); - assertThat(processor.getRemove(), nullValue()); - assertThat(processor.getUppercase(), nullValue()); - assertThat(processor.getSplit(), nullValue()); - assertThat(processor.getLowercase(), nullValue()); - assertThat(processor.getRename(), nullValue()); - assertThat(processor.getTrim(), equalTo(trim)); - } - - public void testCreateGsubPattern() throws Exception { - MutateProcessor.Factory factory = new MutateProcessor.Factory(); - Map config = new HashMap<>(); - Map> gsub = new HashMap<>(); - gsub.put("foo", Arrays.asList("\\s.*e\\s", "")); - config.put("gsub", gsub); - - MutateProcessor processor = factory.create(config); - assertThat(processor.getGsub().size(), equalTo(1)); - assertThat(processor.getGsub().get(0), equalTo(new GsubExpression("foo", Pattern.compile("\\s.*e\\s"), ""))); - } - - public void testCreateGsubPatternInvalidFormat() throws Exception { - MutateProcessor.Factory factory = new MutateProcessor.Factory(); - Map config = new HashMap<>(); - Map> gsub = new HashMap<>(); - gsub.put("foo", Collections.singletonList("only_one")); - config.put("gsub", gsub); - - try { - factory.create(config); - fail("processor creation should have failed"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Invalid search and replace values [only_one] for field: foo")); - } - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java deleted file mode 100644 index 9231a5db2be..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/mutate/MutateProcessorTests.java +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.mutate; - -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; - -import java.io.IOException; -import java.util.*; -import java.util.regex.Pattern; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; - - -public class MutateProcessorTests extends ESTestCase { - private IngestDocument ingestDocument; - - @Before - public void setData() { - Map document = new HashMap<>(); - document.put("foo", "bar"); - document.put("alpha", "aBcD"); - document.put("num", "64"); - document.put("to_strip", " clean "); - document.put("arr", Arrays.asList("1", "2", "3")); - document.put("ip", "127.0.0.1"); - Map fizz = new HashMap<>(); - fizz.put("buzz", "hello world"); - document.put("fizz", fizz); - - ingestDocument = new IngestDocument("index", "type", "id", document); - } - - public void testUpdate() throws IOException { - Map update = new HashMap<>(); - update.put("foo", 123); - Processor processor = new MutateProcessor(update, null, null, null, null, null, null, null, null, null); - processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(7)); - assertThat(ingestDocument.getPropertyValue("foo", Integer.class), equalTo(123)); - } - - public void testRename() throws IOException { - Map rename = new HashMap<>(); - rename.put("foo", "bar"); - Processor processor = new MutateProcessor(null, rename, null, null, null, null, null, null, null, null); - processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(7)); - assertThat(ingestDocument.getPropertyValue("bar", String.class), equalTo("bar")); - assertThat(ingestDocument.hasPropertyValue("foo"), is(false)); - } - - public void testConvert() throws IOException { - Map convert = new HashMap<>(); - convert.put("num", "integer"); - Processor processor = new MutateProcessor(null, null, convert, null, null, null, null, null, null, null); - processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(7)); - assertThat(ingestDocument.getPropertyValue("num", Integer.class), equalTo(64)); - } - - public void testConvertNullField() throws IOException { - Map convert = new HashMap<>(); - convert.put("null", "integer"); - Processor processor = new MutateProcessor(null, null, convert, null, null, null, null, null, null, null); - try { - processor.execute(ingestDocument); - fail("processor execute should have failed"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Field \"null\" is null, cannot be converted to a/an integer")); - } - } - - public void testConvertList() throws IOException { - Map convert = new HashMap<>(); - convert.put("arr", "integer"); - Processor processor = new MutateProcessor(null, null, convert, null, null, null, null, null, null, null); - processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(7)); - assertThat(ingestDocument.getPropertyValue("arr", List.class), equalTo(Arrays.asList(1, 2, 3))); - } - - public void testSplit() throws IOException { - Map split = new HashMap<>(); - split.put("ip", "\\."); - Processor processor = new MutateProcessor(null, null, null, split, null, null, null, null, null, null); - processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(7)); - assertThat(ingestDocument.getPropertyValue("ip", List.class), equalTo(Arrays.asList("127", "0", "0", "1"))); - } - - public void testSplitNullValue() throws IOException { - Map split = new HashMap<>(); - split.put("not.found", "\\."); - Processor processor = new MutateProcessor(null, null, null, split, null, null, null, null, null, null); - try { - processor.execute(ingestDocument); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Cannot split field. [not.found] is null.")); - } - } - - public void testGsub() throws IOException { - List gsubExpressions = Collections.singletonList(new GsubExpression("ip", Pattern.compile("\\."), "-")); - Processor processor = new MutateProcessor(null, null, null, null, gsubExpressions, null, null, null, null, null); - processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(7)); - assertThat(ingestDocument.getPropertyValue("ip", String.class), equalTo("127-0-0-1")); - } - - public void testGsub_NullValue() throws IOException { - List gsubExpressions = Collections.singletonList(new GsubExpression("null_field", Pattern.compile("\\."), "-")); - Processor processor = new MutateProcessor(null, null, null, null, gsubExpressions, null, null, null, null, null); - try { - processor.execute(ingestDocument); - fail("processor execution should have failed"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Field \"null_field\" is null, cannot match pattern.")); - } - } - - public void testJoin() throws IOException { - HashMap join = new HashMap<>(); - join.put("arr", "-"); - Processor processor = new MutateProcessor(null, null, null, null, null, join, null, null, null, null); - processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(7)); - assertThat(ingestDocument.getPropertyValue("arr", String.class), equalTo("1-2-3")); - } - - public void testRemove() throws IOException { - List remove = Arrays.asList("foo", "ip"); - Processor processor = new MutateProcessor(null, null, null, null, null, null, remove, null, null, null); - processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(5)); - assertThat(ingestDocument.getPropertyValue("foo", Object.class), nullValue()); - assertThat(ingestDocument.getPropertyValue("ip", Object.class), nullValue()); - } - - public void testTrim() throws IOException { - List trim = Arrays.asList("to_strip", "foo"); - Processor processor = new MutateProcessor(null, null, null, null, null, null, null, trim, null, null); - processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(7)); - assertThat(ingestDocument.getPropertyValue("foo", String.class), equalTo("bar")); - assertThat(ingestDocument.getPropertyValue("to_strip", String.class), equalTo("clean")); - } - - public void testTrimNullValue() throws IOException { - List trim = Collections.singletonList("not.found"); - Processor processor = new MutateProcessor(null, null, null, null, null, null, null, trim, null, null); - try { - processor.execute(ingestDocument); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Cannot trim field. [not.found] is null.")); - } - } - - public void testUppercase() throws IOException { - List uppercase = Collections.singletonList("foo"); - Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null); - processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(7)); - assertThat(ingestDocument.getPropertyValue("foo", String.class), equalTo("BAR")); - } - - public void testUppercaseNullValue() throws IOException { - List uppercase = Collections.singletonList("not.found"); - Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null); - try { - processor.execute(ingestDocument); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Cannot uppercase field. [not.found] is null.")); - } - } - - public void testLowercase() throws IOException { - List lowercase = Collections.singletonList("alpha"); - Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, null, lowercase); - processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(7)); - assertThat(ingestDocument.getPropertyValue("alpha", String.class), equalTo("abcd")); - } - - public void testLowercaseNullValue() throws IOException { - List lowercase = Collections.singletonList("not.found"); - Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, null, lowercase); - try { - processor.execute(ingestDocument); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Cannot lowercase field. [not.found] is null.")); - } - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java new file mode 100644 index 00000000000..2c6b0ca303c --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.remove; + +import org.elasticsearch.ingest.processor.join.JoinProcessor; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class RemoveProcessorFactoryTests extends ESTestCase { + + public void testCreate() throws IOException { + RemoveProcessor.Factory factory = new RemoveProcessor.Factory(); + Map config = new HashMap<>(); + List fields = Collections.singletonList("field1"); + config.put("fields", fields); + RemoveProcessor removeProcessor = factory.create(config); + assertThat(removeProcessor.getFields(), equalTo(fields)); + } + + public void testCreateMissingFields() throws IOException { + RemoveProcessor.Factory factory = new RemoveProcessor.Factory(); + Map config = new HashMap<>(); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java new file mode 100644 index 00000000000..4614ae2f6e1 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.remove; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class RemoveProcessorTests extends ESTestCase { + + public void testRemoveFields() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + int numFields = randomIntBetween(1, 5); + Set fields = new HashSet<>(); + for (int i = 0; i < numFields; i++) { + fields.add(RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument)); + } + Processor processor = new RemoveProcessor(fields); + processor.execute(ingestDocument); + for (String field : fields) { + assertThat(ingestDocument.getPropertyValue(field, Object.class), nullValue()); + assertThat(ingestDocument.hasPropertyValue(field), equalTo(false)); + } + } + + public void testRemoveNonExistingField() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + Processor processor = new RemoveProcessor(Collections.singletonList(RandomDocumentPicks.randomFieldName(random()))); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSource().size(), equalTo(0)); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java new file mode 100644 index 00000000000..ca8627cf67d --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.rename; + +import org.elasticsearch.ingest.processor.join.JoinProcessor; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class RenameProcessorFactoryTests extends ESTestCase { + + public void testCreate() throws IOException { + RenameProcessor.Factory factory = new RenameProcessor.Factory(); + Map config = new HashMap<>(); + Map fields = Collections.singletonMap("field1", "value1"); + config.put("fields", fields); + RenameProcessor renameProcessor = factory.create(config); + assertThat(renameProcessor.getFields(), equalTo(fields)); + } + + public void testCreateMissingFields() throws IOException { + RenameProcessor.Factory factory = new RenameProcessor.Factory(); + Map config = new HashMap<>(); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java new file mode 100644 index 00000000000..65501acb5d3 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.rename; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.*; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class RenameProcessorTests extends ESTestCase { + + public void testRename() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + int numFields = randomIntBetween(1, 5); + Map fields = new HashMap<>(); + Map newFields = new HashMap<>(); + for (int i = 0; i < numFields; i++) { + String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); + if (fields.containsKey(fieldName)) { + continue; + } + String newFieldName; + do { + newFieldName = RandomDocumentPicks.randomFieldName(random()); + } while (RandomDocumentPicks.canAddField(newFieldName, ingestDocument) == false || newFields.containsKey(newFieldName)); + newFields.put(newFieldName, ingestDocument.getPropertyValue(fieldName, Object.class)); + fields.put(fieldName, newFieldName); + } + Processor processor = new RenameProcessor(fields); + processor.execute(ingestDocument); + for (Map.Entry entry : newFields.entrySet()) { + assertThat(ingestDocument.getPropertyValue(entry.getKey(), Object.class), equalTo(entry.getValue())); + } + } + + public void testRenameNonExistingField() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + Processor processor = new RenameProcessor(Collections.singletonMap(RandomDocumentPicks.randomFieldName(random()), RandomDocumentPicks.randomFieldName(random()))); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSource().size(), equalTo(0)); + } + + public void testRenameExistingFieldNullValue() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + String fieldName = RandomDocumentPicks.randomFieldName(random()); + ingestDocument.setPropertyValue(fieldName, null); + String newFieldName = RandomDocumentPicks.randomFieldName(random()); + Processor processor = new RenameProcessor(Collections.singletonMap(fieldName, newFieldName)); + processor.execute(ingestDocument); + assertThat(ingestDocument.hasPropertyValue(fieldName), equalTo(false)); + assertThat(ingestDocument.hasPropertyValue(newFieldName), equalTo(true)); + assertThat(ingestDocument.getPropertyValue(newFieldName, Object.class), nullValue()); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java new file mode 100644 index 00000000000..e0ca9b34d4f --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.split; + +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class SplitProcessorFactoryTests extends ESTestCase { + + public void testCreate() throws IOException { + SplitProcessor.Factory factory = new SplitProcessor.Factory(); + Map config = new HashMap<>(); + Map fields = Collections.singletonMap("field1", "\\."); + config.put("fields", fields); + SplitProcessor splitProcessor = factory.create(config); + assertThat(splitProcessor.getFields(), equalTo(fields)); + } + + public void testCreateMissingFields() throws IOException { + SplitProcessor.Factory factory = new SplitProcessor.Factory(); + Map config = new HashMap<>(); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java new file mode 100644 index 00000000000..5d749e77e31 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.split; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.*; + +import static org.hamcrest.Matchers.equalTo; + +public class SplitProcessorTests extends ESTestCase { + + public void testSplit() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Map fields = new HashMap<>(); + int numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "127.0.0.1"); + fields.put(fieldName, "\\."); + } + Processor processor = new SplitProcessor(fields); + processor.execute(ingestDocument); + for (String field : fields.keySet()) { + assertThat(ingestDocument.getPropertyValue(field, List.class), equalTo(Arrays.asList("127", "0", "0", "1"))); + } + } + + public void testSplitNullValue() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + String fieldName = RandomDocumentPicks.randomFieldName(random()); + Map split = Collections.singletonMap(fieldName, "\\."); + Processor processor = new SplitProcessor(split); + try { + processor.execute(ingestDocument); + fail("split processor should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [" + fieldName + "] is null, cannot split.")); + } + } + + public void testSplitNonStringValue() throws IOException { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + String fieldName = RandomDocumentPicks.randomFieldName(random()); + ingestDocument.setPropertyValue(fieldName, randomInt()); + Processor processor = new SplitProcessor(Collections.singletonMap(fieldName, "\\.")); + try { + processor.execute(ingestDocument); + fail("split processor should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java new file mode 100644 index 00000000000..b955a929fe7 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.trim; + +import org.elasticsearch.ingest.processor.lowercase.LowercaseProcessor; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class TrimProcessorFactoryTests extends ESTestCase { + + public void testCreate() throws IOException { + TrimProcessor.Factory factory = new TrimProcessor.Factory(); + Map config = new HashMap<>(); + List fields = Collections.singletonList("field1"); + config.put("fields", fields); + TrimProcessor uppercaseProcessor = factory.create(config); + assertThat(uppercaseProcessor.getFields(), equalTo(fields)); + } + + public void testCreateMissingFields() throws IOException { + TrimProcessor.Factory factory = new TrimProcessor.Factory(); + Map config = new HashMap<>(); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorTests.java new file mode 100644 index 00000000000..586b9e5b4e3 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorTests.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.trim; + +import org.elasticsearch.ingest.processor.AbstractStringProcessor; +import org.elasticsearch.ingest.processor.AbstractStringProcessorTestCase; + +import java.util.Collection; + +public class TrimProcessorTests extends AbstractStringProcessorTestCase { + + @Override + protected AbstractStringProcessor newProcessor(Collection fields) { + return new TrimProcessor(fields); + } + + @Override + protected String modifyInput(String input) { + String updatedFieldValue = ""; + updatedFieldValue = addWhitespaces(updatedFieldValue); + updatedFieldValue += input; + updatedFieldValue = addWhitespaces(updatedFieldValue); + return updatedFieldValue; + } + + @Override + protected String expectedResult(String input) { + return input.trim(); + } + + private static String addWhitespaces(String input) { + int prefixLength = randomIntBetween(0, 10); + for (int i = 0; i < prefixLength; i++) { + input += ' '; + } + return input; + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java new file mode 100644 index 00000000000..822921ea16c --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.uppercase; + +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class UppercaseProcessorFactoryTests extends ESTestCase { + + public void testCreate() throws IOException { + UppercaseProcessor.Factory factory = new UppercaseProcessor.Factory(); + Map config = new HashMap<>(); + List fields = Collections.singletonList("field1"); + config.put("fields", fields); + UppercaseProcessor uppercaseProcessor = factory.create(config); + assertThat(uppercaseProcessor.getFields(), equalTo(fields)); + } + + public void testCreateMissingFields() throws IOException { + UppercaseProcessor.Factory factory = new UppercaseProcessor.Factory(); + Map config = new HashMap<>(); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java new file mode 100644 index 00000000000..f23f21ece74 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.uppercase; + +import org.elasticsearch.ingest.processor.AbstractStringProcessor; +import org.elasticsearch.ingest.processor.AbstractStringProcessorTestCase; + +import java.util.Collection; +import java.util.Locale; + +public class UppercaseProcessorTests extends AbstractStringProcessorTestCase { + + @Override + protected AbstractStringProcessor newProcessor(Collection fields) { + return new UppercaseProcessor(fields); + } + + @Override + protected String expectedResult(String input) { + return input.toUpperCase(Locale.ROOT); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java index 5b7e02bec28..559dd54487d 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.StringText; import org.elasticsearch.env.Environment; -import org.elasticsearch.ingest.processor.mutate.MutateProcessor; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.test.ESTestCase; @@ -37,9 +36,7 @@ import java.util.Collections; import java.util.Comparator; import java.util.List; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.*; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -56,7 +53,7 @@ public class PipelineStoreTests extends ESTestCase { ClusterService clusterService = mock(ClusterService.class); client = mock(PipelineStoreClient.class); Environment environment = mock(Environment.class); - store = new PipelineStore(Settings.EMPTY, threadPool, environment, clusterService, client, Collections.singletonMap(MutateProcessor.TYPE, new MutateProcessor.Factory())); + store = new PipelineStore(Settings.EMPTY, threadPool, environment, clusterService, client, Collections.emptyMap()); store.start(); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index 7185db944b2..e262c279909 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -33,7 +33,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.ingest.processor.mutate.MutateProcessor; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineExecutionService; import org.elasticsearch.plugin.ingest.PipelineStore; @@ -43,7 +42,9 @@ import org.junit.Before; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import java.util.*; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; import static org.elasticsearch.plugin.ingest.transport.IngestActionFilter.*; import static org.hamcrest.Matchers.equalTo; @@ -115,7 +116,7 @@ public class IngestActionFilterTests extends ESTestCase { verifyZeroInteractions(executionService, actionListener); } - public void testApply_executed() throws Exception { + public void testApplyExecuted() throws Exception { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); indexRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); @@ -139,7 +140,7 @@ public class IngestActionFilterTests extends ESTestCase { verifyZeroInteractions(actionListener); } - public void testApply_failed() throws Exception { + public void testApplyFailed() throws Exception { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); indexRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); @@ -163,7 +164,7 @@ public class IngestActionFilterTests extends ESTestCase { verifyZeroInteractions(actionFilterChain); } - public void testApply_withBulkRequest() throws Exception { + public void testApplyWithBulkRequest() throws Exception { ThreadPool threadPool = new ThreadPool( Settings.builder() .put("name", "_name") @@ -172,13 +173,18 @@ public class IngestActionFilterTests extends ESTestCase { ); PipelineStore store = mock(PipelineStore.class); - Map mutateConfig = new HashMap<>(); - Map update = new HashMap<>(); - update.put("field2", "value2"); - mutateConfig.put("update", update); + Processor processor = new Processor() { + @Override + public void execute(IngestDocument ingestDocument) { + ingestDocument.setPropertyValue("field2", "value2"); + } - Processor mutateProcessor = (new MutateProcessor.Factory()).create(mutateConfig); - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(mutateProcessor))); + @Override + public String getType() { + return null; + } + }; + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); executionService = new PipelineExecutionService(store, threadPool); filter = new IngestActionFilter(Settings.EMPTY, executionService); diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml index daf13a34c1b..c76500eda11 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml @@ -12,8 +12,8 @@ "description": "_description", "processors": [ { - "mutate" : { - "update" : { + "add" : { + "fields" : { "field2": "_value" } } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml new file mode 100644 index 00000000000..850d775fdc9 --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml @@ -0,0 +1,132 @@ +--- +"Test mutate processors": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "add" : { + "fields" : { + "new_field": "new_value" + } + } + }, + { + "rename" : { + "fields" : { + "field_to_rename": "renamed_field" + } + } + }, + { + "remove" : { + "fields" : [ + "field_to_remove" + ] + } + }, + { + "lowercase" : { + "fields" : [ + "field_to_lowercase" + ] + } + }, + { + "uppercase" : { + "fields" : [ + "field_to_uppercase" + ] + } + }, + { + "trim" : { + "fields" : [ + "field_to_trim" + ] + } + }, + { + "split" : { + "fields" : { + "field_to_split": "-" + } + } + }, + { + "join" : { + "fields" : { + "field_to_join": "-" + } + } + }, + { + "convert" : { + "fields" : { + "field_to_convert": "integer" + } + } + }, + { + "gsub" : { + "expressions" : [ + { + "field": "field_to_gsub", + "pattern" : "-", + "replacement" : "." + } + ] + } + } + ] + } + - match: { _id: "my_pipeline" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline_id: "my_pipeline" + body: { + field_to_rename: "value", + field_to_remove: "old_value", + field_to_lowercase: "LOWERCASE", + field_to_uppercase: "uppercase", + field_to_trim: " trimmed ", + field_to_split: "127-0-0-1", + field_to_join: ["127","0","0","1"], + field_to_convert: ["127","0","0","1"], + field_to_gsub: "127-0-0-1" + } + + - do: + get: + index: test + type: test + id: 1 + - is_false: _source.field_to_rename + - is_false: _source.field_to_remove + - match: { _source.renamed_field: "value" } + - match: { _source.field_to_lowercase: "lowercase" } + - match: { _source.field_to_uppercase: "UPPERCASE" } + - match: { _source.field_to_trim: "trimmed" } + - match: { _source.field_to_split: ["127","0","0","1"] } + - match: { _source.field_to_join: "127-0-0-1" } + - match: { _source.field_to_convert: [127,0,0,1] } + - match: { _source.field_to_gsub: "127.0.0.1" } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate_processor.yaml deleted file mode 100644 index 4ab6ba652ca..00000000000 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate_processor.yaml +++ /dev/null @@ -1,50 +0,0 @@ ---- -"Test mutate processor": - - do: - cluster.health: - wait_for_status: green - - - do: - ingest.put_pipeline: - id: "my_pipeline" - body: > - { - "description": "_description", - "processors": [ - { - "mutate" : { - "rename" : { - "field1": "foo" - }, - "update" : { - "field2": "bar" - } - } - } - ] - } - - match: { _id: "my_pipeline" } - - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - - do: - ingest.index: - index: test - type: test - id: 1 - pipeline_id: "my_pipeline" - body: {field1: "val"} - - - do: - get: - index: test - type: test - id: 1 - - match: { _source.foo: "val" } - - match: { _source.field2: "bar" } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml index c27d1438030..9aecdaf0ff4 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml @@ -12,8 +12,8 @@ "description": "_description", "processors": [ { - "mutate" : { - "update" : { + "add" : { + "fields" : { "field2" : "_value" } } @@ -66,8 +66,8 @@ "description": "_description", "processors": [ { - "mutate" : { - "update" : { + "add" : { + "fields" : { "field2" : "_value" } } @@ -129,15 +129,15 @@ "description": "_description", "processors": [ { - "mutate" : { - "update" : { + "add" : { + "fields" : { "field2" : "_value" } } }, { - "mutate" : { - "update" : { + "add" : { + "fields" : { "field3" : "third_val" } } @@ -157,7 +157,7 @@ } - length: { docs: 1 } - length: { docs.0.processor_results: 2 } - - match: { docs.0.processor_results.0.processor_id: "processor[mutate]-0" } + - match: { docs.0.processor_results.0.processor_id: "processor[add]-0" } - is_true: docs.0.processor_results.0.doc.modified - length: { docs.0.processor_results.0.doc._source: 2 } - match: { docs.0.processor_results.0.doc._source.foo: "bar" } @@ -181,8 +181,8 @@ "description": "_description", "processors": [ { - "mutate" : { - "uppercase" : ["foo"] + "uppercase" : { + "fields" : ["foo"] } } ] @@ -226,15 +226,15 @@ "description": "_description", "processors": [ { - "mutate" : { - "convert" : { + "convert" : { + "fields" : { "foo": "integer" } } }, { - "mutate" : { - "uppercase" : ["bar"] + "uppercase" : { + "fields" : ["bar"] } } ] @@ -262,7 +262,7 @@ } - length: { docs: 2 } - length: { docs.0.processor_results: 2 } - - match: { docs.0.processor_results.0.error.type: "number_format_exception" } + - match: { docs.0.processor_results.0.error.type: "illegal_argument_exception" } - match: { docs.0.processor_results.1.doc._index: "index" } - match: { docs.0.processor_results.1.doc._type: "type" } - match: { docs.0.processor_results.1.doc._id: "id" } From 49bfe6410e50621a8be98679aac3bfed7eed6378 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 24 Nov 2015 15:49:25 +0100 Subject: [PATCH 076/347] Rename Data leftovers --- ...Data.java => WriteableIngestDocument.java} | 16 +++++------ .../SimulateDocumentSimpleResult.java | 22 +++++++-------- .../simulate/SimulateProcessorResult.java | 22 +++++++-------- .../elasticsearch/ingest/IngestClientIT.java | 3 +- ...ataTests.java => IngestDocumentTests.java} | 4 +-- ...java => WriteableIngestDocumentTests.java} | 28 +++++++++---------- .../SimulateDocumentSimpleResultTests.java | 2 +- .../SimulateExecutionServiceTests.java | 18 ++++++------ .../SimulatePipelineResponseTests.java | 4 +-- .../SimulateProcessorResultTests.java | 2 +- 10 files changed, 60 insertions(+), 61 deletions(-) rename plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/{TransportData.java => WriteableIngestDocument.java} (84%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/{DataTests.java => IngestDocumentTests.java} (99%) rename plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/{TransportDataTests.java => WriteableIngestDocumentTests.java} (67%) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocument.java similarity index 84% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocument.java index bb26161582a..ec7ea798a86 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/TransportData.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocument.java @@ -35,31 +35,31 @@ import static org.elasticsearch.ingest.IngestDocument.MetaData.ID; import static org.elasticsearch.ingest.IngestDocument.MetaData.INDEX; import static org.elasticsearch.ingest.IngestDocument.MetaData.TYPE; -public class TransportData implements Writeable, ToXContent { +public class WriteableIngestDocument implements Writeable, ToXContent { - private static final TransportData PROTOTYPE = new TransportData(null); + private static final WriteableIngestDocument PROTOTYPE = new WriteableIngestDocument(null); private final IngestDocument ingestDocument; - public TransportData(IngestDocument ingestDocument) { + public WriteableIngestDocument(IngestDocument ingestDocument) { this.ingestDocument = ingestDocument; } - public IngestDocument get() { + public IngestDocument getIngestDocument() { return ingestDocument; } - public static TransportData readTransportDataFrom(StreamInput in) throws IOException { + public static WriteableIngestDocument readWriteableIngestDocumentFrom(StreamInput in) throws IOException { return PROTOTYPE.readFrom(in); } @Override - public TransportData readFrom(StreamInput in) throws IOException { + public WriteableIngestDocument readFrom(StreamInput in) throws IOException { String index = in.readString(); String type = in.readString(); String id = in.readString(); Map doc = in.readMap(); - return new TransportData(new IngestDocument(index, type, id, doc)); + return new WriteableIngestDocument(new IngestDocument(index, type, id, doc)); } @Override @@ -90,7 +90,7 @@ public class TransportData implements Writeable, ToXContent { if (o == null || getClass() != o.getClass()) { return false; } - TransportData that = (TransportData) o; + WriteableIngestDocument that = (WriteableIngestDocument) o; return Objects.equals(ingestDocument, that.ingestDocument); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java index 8cf08e3dc61..87f2b42ccaf 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.plugin.ingest.transport.TransportData; +import org.elasticsearch.plugin.ingest.transport.WriteableIngestDocument; import java.io.IOException; @@ -31,26 +31,26 @@ public class SimulateDocumentSimpleResult implements SimulateDocumentResult document = new HashMap<>(); document.put("foo", "bar"); document.put("int", 123); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocumentTests.java similarity index 67% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocumentTests.java index 165ea62e1f2..168afa40ccf 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/TransportDataTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocumentTests.java @@ -31,7 +31,7 @@ import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; -public class TransportDataTests extends ESTestCase { +public class WriteableIngestDocumentTests extends ESTestCase { public void testEqualsAndHashcode() throws Exception { String index = randomAsciiOfLengthBetween(1, 10); @@ -39,7 +39,7 @@ public class TransportDataTests extends ESTestCase { String id = randomAsciiOfLengthBetween(1, 10); String fieldName = randomAsciiOfLengthBetween(1, 10); String fieldValue = randomAsciiOfLengthBetween(1, 10); - TransportData transportData = new TransportData(new IngestDocument(index, type, id, Collections.singletonMap(fieldName, fieldValue))); + WriteableIngestDocument writeableIngestDocument = new WriteableIngestDocument(new IngestDocument(index, type, id, Collections.singletonMap(fieldName, fieldValue))); boolean changed = false; String otherIndex; @@ -71,28 +71,28 @@ public class TransportDataTests extends ESTestCase { document = Collections.singletonMap(fieldName, fieldValue); } - TransportData otherTransportData = new TransportData(new IngestDocument(otherIndex, otherType, otherId, document)); + WriteableIngestDocument otherWriteableIngestDocument = new WriteableIngestDocument(new IngestDocument(otherIndex, otherType, otherId, document)); if (changed) { - assertThat(transportData, not(equalTo(otherTransportData))); - assertThat(otherTransportData, not(equalTo(transportData))); + assertThat(writeableIngestDocument, not(equalTo(otherWriteableIngestDocument))); + assertThat(otherWriteableIngestDocument, not(equalTo(writeableIngestDocument))); } else { - assertThat(transportData, equalTo(otherTransportData)); - assertThat(otherTransportData, equalTo(transportData)); - TransportData thirdTransportData = new TransportData(new IngestDocument(index, type, id, Collections.singletonMap(fieldName, fieldValue))); - assertThat(thirdTransportData, equalTo(transportData)); - assertThat(transportData, equalTo(thirdTransportData)); + assertThat(writeableIngestDocument, equalTo(otherWriteableIngestDocument)); + assertThat(otherWriteableIngestDocument, equalTo(writeableIngestDocument)); + WriteableIngestDocument thirdWriteableIngestDocument = new WriteableIngestDocument(new IngestDocument(index, type, id, Collections.singletonMap(fieldName, fieldValue))); + assertThat(thirdWriteableIngestDocument, equalTo(writeableIngestDocument)); + assertThat(writeableIngestDocument, equalTo(thirdWriteableIngestDocument)); } } public void testSerialization() throws IOException { IngestDocument ingestDocument = new IngestDocument(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); - TransportData transportData = new TransportData(ingestDocument); + WriteableIngestDocument writeableIngestDocument = new WriteableIngestDocument(ingestDocument); BytesStreamOutput out = new BytesStreamOutput(); - transportData.writeTo(out); + writeableIngestDocument.writeTo(out); StreamInput streamInput = StreamInput.wrap(out.bytes()); - TransportData otherTransportData = TransportData.readTransportDataFrom(streamInput); - assertThat(otherTransportData, equalTo(transportData)); + WriteableIngestDocument otherWriteableIngestDocument = WriteableIngestDocument.readWriteableIngestDocumentFrom(streamInput); + assertThat(otherWriteableIngestDocument, equalTo(writeableIngestDocument)); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java index 183aed0a5a4..7954b4a51a9 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java @@ -48,7 +48,7 @@ public class SimulateDocumentSimpleResultTests extends ESTestCase { StreamInput streamInput = StreamInput.wrap(out.bytes()); SimulateDocumentSimpleResult otherSimulateDocumentSimpleResult = SimulateDocumentSimpleResult.readSimulateDocumentSimpleResult(streamInput); - assertThat(otherSimulateDocumentSimpleResult.getData(), equalTo(simulateDocumentSimpleResult.getData())); + assertThat(otherSimulateDocumentSimpleResult.getIngestDocument(), equalTo(simulateDocumentSimpleResult.getIngestDocument())); if (isFailure) { assertThat(otherSimulateDocumentSimpleResult.getFailure(), instanceOf(IllegalArgumentException.class)); IllegalArgumentException e = (IllegalArgumentException) otherSimulateDocumentSimpleResult.getFailure(); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java index 901ecb29dca..bc6de44fd5c 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java @@ -68,12 +68,12 @@ public class SimulateExecutionServiceTests extends ESTestCase { SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(2)); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getProcessorId(), equalTo("processor[mock]-0")); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getData(), not(sameInstance(ingestDocument))); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getData(), equalTo(ingestDocument)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), not(sameInstance(ingestDocument))); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), equalTo(ingestDocument)); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(), nullValue()); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorId(), equalTo("processor[mock]-1")); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), not(sameInstance(ingestDocument))); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), equalTo(ingestDocument)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), not(sameInstance(ingestDocument))); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), equalTo(ingestDocument)); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getFailure(), nullValue()); } @@ -82,7 +82,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { verify(processor, times(2)).execute(ingestDocument); assertThat(actualItemResponse, instanceOf(SimulateDocumentSimpleResult.class)); SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) actualItemResponse; - assertThat(simulateDocumentSimpleResult.getData(), equalTo(ingestDocument)); + assertThat(simulateDocumentSimpleResult.getIngestDocument(), equalTo(ingestDocument)); assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); } @@ -95,13 +95,13 @@ public class SimulateExecutionServiceTests extends ESTestCase { SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(2)); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getProcessorId(), equalTo("processor[mock]-0")); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getData(), nullValue()); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), nullValue()); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(), instanceOf(RuntimeException.class)); RuntimeException runtimeException = (RuntimeException) simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(); assertThat(runtimeException.getMessage(), equalTo("processor failed")); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorId(), equalTo("processor[mock]-1")); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), not(sameInstance(ingestDocument))); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getData(), equalTo(ingestDocument)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), not(sameInstance(ingestDocument))); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), equalTo(ingestDocument)); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getFailure(), nullValue()); runtimeException = (RuntimeException) simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(); assertThat(runtimeException.getMessage(), equalTo("processor failed")); @@ -114,7 +114,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { verify(processor, times(1)).execute(ingestDocument); assertThat(actualItemResponse, instanceOf(SimulateDocumentSimpleResult.class)); SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) actualItemResponse; - assertThat(simulateDocumentSimpleResult.getData(), nullValue()); + assertThat(simulateDocumentSimpleResult.getIngestDocument(), nullValue()); assertThat(simulateDocumentSimpleResult.getFailure(), instanceOf(RuntimeException.class)); RuntimeException runtimeException = (RuntimeException) simulateDocumentSimpleResult.getFailure(); assertThat(runtimeException.getMessage(), equalTo("processor failed")); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java index ab7803c719d..1e4ca765970 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java @@ -91,7 +91,7 @@ public class SimulatePipelineResponseTests extends ESTestCase { for (SimulateProcessorResult simulateProcessorResult : simulateDocumentVerboseResult.getProcessorResults()) { SimulateProcessorResult expectedProcessorResult = expectedProcessorResultIterator.next(); assertThat(simulateProcessorResult.getProcessorId(), equalTo(expectedProcessorResult.getProcessorId())); - assertThat(simulateProcessorResult.getData(), equalTo(expectedProcessorResult.getData())); + assertThat(simulateProcessorResult.getIngestDocument(), equalTo(expectedProcessorResult.getIngestDocument())); if (expectedProcessorResult.getFailure() == null) { assertThat(simulateProcessorResult.getFailure(), nullValue()); } else { @@ -104,7 +104,7 @@ public class SimulatePipelineResponseTests extends ESTestCase { SimulateDocumentSimpleResult expectedSimulateDocumentSimpleResult = (SimulateDocumentSimpleResult) expectedResultIterator.next(); assertThat(result, instanceOf(SimulateDocumentSimpleResult.class)); SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) result; - assertThat(simulateDocumentSimpleResult.getData(), equalTo(expectedSimulateDocumentSimpleResult.getData())); + assertThat(simulateDocumentSimpleResult.getIngestDocument(), equalTo(expectedSimulateDocumentSimpleResult.getIngestDocument())); if (expectedSimulateDocumentSimpleResult.getFailure() == null) { assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); } else { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java index d347c6749e3..7f7925f451e 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java @@ -49,7 +49,7 @@ public class SimulateProcessorResultTests extends ESTestCase { StreamInput streamInput = StreamInput.wrap(out.bytes()); SimulateProcessorResult otherSimulateProcessorResult = SimulateProcessorResult.readSimulateProcessorResultFrom(streamInput); assertThat(otherSimulateProcessorResult.getProcessorId(), equalTo(simulateProcessorResult.getProcessorId())); - assertThat(otherSimulateProcessorResult.getData(), equalTo(simulateProcessorResult.getData())); + assertThat(otherSimulateProcessorResult.getIngestDocument(), equalTo(simulateProcessorResult.getIngestDocument())); if (isFailure) { assertThat(otherSimulateProcessorResult.getFailure(), instanceOf(IllegalArgumentException.class)); IllegalArgumentException e = (IllegalArgumentException) otherSimulateProcessorResult.getFailure(); From 388e637fa9aaeb0bca1ca93077b86e790047c907 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 24 Nov 2015 19:43:54 +0100 Subject: [PATCH 077/347] add a few more asserts to IngestActionFilterTests --- .../transport/IngestActionFilterTests.java | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index e262c279909..176dd4530b9 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -47,9 +47,7 @@ import java.util.HashSet; import java.util.Set; import static org.elasticsearch.plugin.ingest.transport.IngestActionFilter.*; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.*; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.*; @@ -254,13 +252,19 @@ public class IngestActionFilterTests extends ESTestCase { }; doAnswer(answer).when(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); - @SuppressWarnings("unchecked") - ActionListener actionListener = mock(ActionListener.class); + CaptureActionListener actionListener = new CaptureActionListener(); RecordRequestAFC actionFilterChain = new RecordRequestAFC(); filter.apply("_action", bulkRequest, actionListener, actionFilterChain); - verify(actionListener, times(1)).onResponse(any()); + assertThat(actionFilterChain.request, nullValue()); + ActionResponse response = actionListener.response; + assertThat(response, instanceOf(BulkResponse.class)); + BulkResponse bulkResponse = (BulkResponse) response; + assertThat(bulkResponse.getItems().length, equalTo(numRequest)); + for (BulkItemResponse bulkItemResponse : bulkResponse) { + assertThat(bulkItemResponse.isFailed(), equalTo(true)); + } } public void testApplyWithBulkRequestWithFailure() throws Exception { @@ -269,20 +273,20 @@ public class IngestActionFilterTests extends ESTestCase { int numRequest = scaledRandomIntBetween(8, 64); int numNonIndexRequests = 0; for (int i = 0; i < numRequest; i++) { - if (i % 2 == 0) { + ActionRequest request; + if (randomBoolean()) { numNonIndexRequests++; - ActionRequest request; if (randomBoolean()) { request = new DeleteRequest("_index", "_type", "_id"); } else { request = new UpdateRequest("_index", "_type", "_id"); } - bulkRequest.add(request); } else { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field1", "value1"); - bulkRequest.add(indexRequest); + request = indexRequest; } + bulkRequest.add(request); } RuntimeException exception = new RuntimeException(); @@ -316,7 +320,7 @@ public class IngestActionFilterTests extends ESTestCase { int i = 0; Set failedSlots = new HashSet<>(); while (bulkRequestModifier.hasNext()) { - IndexRequest indexRequest = (IndexRequest) bulkRequestModifier.next(); + bulkRequestModifier.next(); if (randomBoolean()) { bulkRequestModifier.markCurrentItemAsFailed(new RuntimeException()); failedSlots.add(i); @@ -358,6 +362,7 @@ public class IngestActionFilterTests extends ESTestCase { } + @SuppressWarnings("unchecked") public > T getRequest() { return (T) request; } From e0fcee642ef2cd04ce6bf9ffa7a042aaee13451f Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 25 Nov 2015 10:26:46 +0100 Subject: [PATCH 078/347] [TEST] fix locale comparison --- .../ingest/processor/date/DateProcessorFactoryTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java index e637bf19012..a852c597d86 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java @@ -85,7 +85,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("locale", locale.toLanguageTag()); DateProcessor processor = factory.create(config); - assertThat(processor.getLocale(), equalTo(locale)); + assertThat(processor.getLocale().toLanguageTag(), equalTo(locale.toLanguageTag())); } public void testParseTimezone() throws Exception { From 5daa73b3508c74f27f29f99b666316f31e4c69ee Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 25 Nov 2015 14:53:20 +0100 Subject: [PATCH 079/347] date formats: use a function instead of our own interface Also turn the different date formats into an enum. --- .../ingest/processor/date/DateFormat.java | 99 +++++++++++++++++++ .../ingest/processor/date/DateParser.java | 35 ------- .../processor/date/DateParserFactory.java | 48 --------- .../ingest/processor/date/DateProcessor.java | 21 ++-- .../processor/date/ISO8601DateParser.java | 39 -------- .../processor/date/JodaPatternDateParser.java | 43 -------- .../processor/date/TAI64NDateParser.java | 47 --------- .../ingest/processor/date/UnixDateParser.java | 40 -------- .../processor/date/UnixMsDateParser.java | 36 ------- .../processor/date/DateFormatTests.java | 84 ++++++++++++++++ .../processor/date/DateParserTests.java | 75 -------------- .../processor/date/DateProcessorTests.java | 6 +- 12 files changed, 199 insertions(+), 374 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateFormat.java delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParser.java delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParserFactory.java delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/ISO8601DateParser.java delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateFormatTests.java delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateParserTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateFormat.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateFormat.java new file mode 100644 index 00000000000..224d43f238f --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateFormat.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.ISODateTimeFormat; + +import java.util.Locale; +import java.util.Optional; +import java.util.function.Function; + +enum DateFormat { + Iso8601 { + @Override + Function getFunction(DateTimeZone timezone) { + return ISODateTimeFormat.dateTimeParser().withZone(timezone)::parseDateTime; + } + }, + Unix { + @Override + Function getFunction(DateTimeZone timezone) { + return (date) -> new DateTime((long)(Float.parseFloat(date) * 1000), timezone); + } + }, + UnixMs { + @Override + Function getFunction(DateTimeZone timezone) { + return (date) -> new DateTime(Long.parseLong(date), timezone); + } + + @Override + public String toString() { + return "UNIX_MS"; + } + }, + Tai64n { + @Override + Function getFunction(DateTimeZone timezone) { + return (date) -> new DateTime(parseMillis(date), timezone); + } + + private long parseMillis(String date) { + if (date.startsWith("@")) { + date = date.substring(1); + } + long base = Long.parseLong(date.substring(1, 16), 16); + // 1356138046000 + long rest = Long.parseLong(date.substring(16, 24), 16); + return ((base * 1000) - 10000) + (rest/1000000); + } + }; + + abstract Function getFunction(DateTimeZone timezone); + + static Optional fromString(String format) { + switch (format) { + case "ISO8601": + return Optional.of(Iso8601); + case "UNIX": + return Optional.of(Unix); + case "UNIX_MS": + return Optional.of(UnixMs); + case "TAI64N": + return Optional.of(Tai64n); + default: + return Optional.empty(); + } + } + + static Function getJodaFunction(String matchFormat, DateTimeZone timezone, Locale locale) { + return DateTimeFormat.forPattern(matchFormat) + .withDefaultYear((new DateTime(DateTimeZone.UTC)).getYear()) + .withZone(timezone).withLocale(locale)::parseDateTime; + } + + @Override + public String toString() { + return name().toUpperCase(Locale.ROOT); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParser.java deleted file mode 100644 index c062dbdac37..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParser.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.date; - -import org.joda.time.DateTime; - -/** - * Parser for dates provided as strings. Parses into a joda {@link DateTime} object. - * We use our own joda wrapper as we support some formats that are not supported directly by joda. - * - */ -public interface DateParser { - - /** - * Parser the date provided as a string argument into a joda {@link DateTime} object - */ - DateTime parseDateTime(String date); -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParserFactory.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParserFactory.java deleted file mode 100644 index c0041c4ca56..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateParserFactory.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to ElasticSearch and Shay Banon under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. ElasticSearch licenses this - * file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.date; - -import org.joda.time.DateTimeZone; - -import java.util.Locale; - -public class DateParserFactory { - - public static final String ISO8601 = "ISO8601"; - public static final String UNIX = "UNIX"; - public static final String UNIX_MS = "UNIX_MS"; - public static final String TAI64N = "TAI64N"; - - public static DateParser createDateParser(String format, DateTimeZone timezone, Locale locale) { - switch(format) { - case ISO8601: - // TODO(talevy): fallback solution for almost ISO8601 - return new ISO8601DateParser(timezone); - case UNIX: - return new UnixDateParser(timezone); - case UNIX_MS: - return new UnixMsDateParser(timezone); - case TAI64N: - return new TAI64NDateParser(timezone); - default: - return new JodaPatternDateParser(format, timezone, locale); - } - } -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java index 6f2016fc5c5..8c5340065ba 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java @@ -26,10 +26,8 @@ import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; -import java.util.ArrayList; -import java.util.List; -import java.util.Locale; -import java.util.Map; +import java.util.*; +import java.util.function.Function; public final class DateProcessor implements Processor { @@ -41,7 +39,7 @@ public final class DateProcessor implements Processor { private final String matchField; private final String targetField; private final List matchFormats; - private final List dateParsers; + private final List> dateParsers; DateProcessor(DateTimeZone timezone, Locale locale, String matchField, List matchFormats, String targetField) { this.timezone = timezone; @@ -51,7 +49,14 @@ public final class DateProcessor implements Processor { this.matchFormats = matchFormats; this.dateParsers = new ArrayList<>(); for (String matchFormat : matchFormats) { - dateParsers.add(DateParserFactory.createDateParser(matchFormat, timezone, locale)); + Optional dateFormat = DateFormat.fromString(matchFormat); + Function stringToDateFunction; + if (dateFormat.isPresent()) { + stringToDateFunction = dateFormat.get().getFunction(timezone); + } else { + stringToDateFunction = DateFormat.getJodaFunction(matchFormat, timezone, locale); + } + dateParsers.add(stringToDateFunction); } } @@ -62,9 +67,9 @@ public final class DateProcessor implements Processor { DateTime dateTime = null; Exception lastException = null; - for (DateParser dateParser : dateParsers) { + for (Function dateParser : dateParsers) { try { - dateTime = dateParser.parseDateTime(value); + dateTime = dateParser.apply(value); } catch(Exception e) { //try the next parser and keep track of the last exception lastException = e; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/ISO8601DateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/ISO8601DateParser.java deleted file mode 100644 index f480daba61c..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/ISO8601DateParser.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.date; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; - -public class ISO8601DateParser implements DateParser { - - private final DateTimeFormatter formatter; - - public ISO8601DateParser(DateTimeZone timezone) { - this.formatter = ISODateTimeFormat.dateTimeParser().withZone(timezone); - } - - @Override - public DateTime parseDateTime(String date) { - return formatter.parseDateTime(date); - } -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java deleted file mode 100644 index a17c8f1703a..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/JodaPatternDateParser.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.date; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; - -import java.util.Locale; - -public class JodaPatternDateParser implements DateParser { - - private final DateTimeFormatter formatter; - - public JodaPatternDateParser(String format, DateTimeZone timezone, Locale locale) { - formatter = DateTimeFormat.forPattern(format) - .withDefaultYear((new DateTime(DateTimeZone.UTC)).getYear()) - .withZone(timezone).withLocale(locale); - } - - @Override - public DateTime parseDateTime(String date) { - return formatter.parseDateTime(date); - } -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java deleted file mode 100644 index 5e199595a7a..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/TAI64NDateParser.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.date; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; - -public class TAI64NDateParser implements DateParser { - private DateTimeZone timezone; - - public TAI64NDateParser(DateTimeZone timezone) { - this.timezone = timezone; - } - - @Override - public DateTime parseDateTime(String date) { - return new DateTime(parseMillis(date), timezone); - } - - private static long parseMillis(String date) { - if (date.startsWith("@")) { - date = date.substring(1); - } - long base = Long.parseLong(date.substring(1, 16), 16); - // 1356138046000 - long rest = Long.parseLong(date.substring(16, 24), 16); - - return ((base * 1000) - 10000) + (rest/1000000); - } -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java deleted file mode 100644 index 83b4168b6cc..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixDateParser.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.date; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; - -public class UnixDateParser implements DateParser { - private final DateTimeZone timezone; - - public UnixDateParser(DateTimeZone timezone) { - this.timezone = timezone; - } - - @Override - public DateTime parseDateTime(String date) { - return new DateTime(parseMillis(date), timezone); - } - - private static long parseMillis(String date) { - return (long) (Float.parseFloat(date) * 1000); - } -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java deleted file mode 100644 index 8e5e5b167b5..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/UnixMsDateParser.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.date; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; - -public class UnixMsDateParser implements DateParser { - private final DateTimeZone timezone; - - public UnixMsDateParser(DateTimeZone timezone) { - this.timezone = timezone; - } - - @Override - public DateTime parseDateTime(String date) { - return new DateTime(Long.parseLong(date), timezone); - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateFormatTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateFormatTests.java new file mode 100644 index 00000000000..a3f5c337fca --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateFormatTests.java @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.date; + +import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; + +import java.time.Instant; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.util.Locale; +import java.util.Optional; +import java.util.function.Function; + +import static org.hamcrest.core.IsEqual.equalTo; + +public class DateFormatTests extends ESTestCase { + + public void testParseJoda() { + Function jodaFunction = DateFormat.getJodaFunction("MMM dd HH:mm:ss Z", DateTimeZone.forOffsetHours(-8), Locale.ENGLISH); + assertThat(Instant.ofEpochMilli(jodaFunction.apply("Nov 24 01:29:01 -0800").getMillis()) + .atZone(ZoneId.of("GMT-8")) + .format(DateTimeFormatter.ofPattern("MM dd HH:mm:ss", Locale.ENGLISH)), + equalTo("11 24 01:29:01")); + } + + public void testParseUnixMs() { + assertThat(DateFormat.UnixMs.getFunction(DateTimeZone.UTC).apply("1000500").getMillis(), equalTo(1000500L)); + } + + public void testParseUnix() { + assertThat(DateFormat.Unix.getFunction(DateTimeZone.UTC).apply("1000.5").getMillis(), equalTo(1000500L)); + } + + public void testParseISO8601() { + assertThat(DateFormat.Iso8601.getFunction(DateTimeZone.UTC).apply("2001-01-01T00:00:00-0800").getMillis(), equalTo(978336000000L)); + } + + public void testParseISO8601Failure() { + Function function = DateFormat.Iso8601.getFunction(DateTimeZone.UTC); + try { + function.apply("2001-01-0:00-0800"); + fail("parse should have failed"); + } catch(IllegalArgumentException e) { + //all good + } + } + + public void testTAI64NParse() { + String input = "4000000050d506482dbdf024"; + String expected = "2012-12-22T03:00:46.767+02:00"; + assertThat(DateFormat.Tai64n.getFunction(DateTimeZone.forOffsetHours(2)).apply((randomBoolean() ? "@" : "") + input).toString(), equalTo(expected)); + } + + public void testFromString() { + assertThat(DateFormat.fromString("UNIX_MS"), equalTo(Optional.of(DateFormat.UnixMs))); + assertThat(DateFormat.fromString("unix_ms"), equalTo(Optional.empty())); + assertThat(DateFormat.fromString("UNIX"), equalTo(Optional.of(DateFormat.Unix))); + assertThat(DateFormat.fromString("unix"), equalTo(Optional.empty())); + assertThat(DateFormat.fromString("ISO8601"), equalTo(Optional.of(DateFormat.Iso8601))); + assertThat(DateFormat.fromString("iso8601"), equalTo(Optional.empty())); + assertThat(DateFormat.fromString("TAI64N"), equalTo(Optional.of(DateFormat.Tai64n))); + assertThat(DateFormat.fromString("tai64n"), equalTo(Optional.empty())); + assertThat(DateFormat.fromString("prefix-" + randomAsciiOfLengthBetween(1, 10)), equalTo(Optional.empty())); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateParserTests.java deleted file mode 100644 index 19c0e8dba8e..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateParserTests.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.date; - -import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTimeZone; - -import java.time.Instant; -import java.time.ZoneId; -import java.time.format.DateTimeFormatter; -import java.util.Locale; - -import static org.hamcrest.core.IsEqual.equalTo; - -public class DateParserTests extends ESTestCase { - - public void testJodaPatternParse() { - JodaPatternDateParser parser = new JodaPatternDateParser("MMM dd HH:mm:ss Z", - DateTimeZone.forOffsetHours(-8), Locale.ENGLISH); - - assertThat(Instant.ofEpochMilli(parser.parseDateTime("Nov 24 01:29:01 -0800").getMillis()) - .atZone(ZoneId.of("GMT-8")) - .format(DateTimeFormatter.ofPattern("MM dd HH:mm:ss", Locale.ENGLISH)), - equalTo("11 24 01:29:01")); - } - - public void testParseUnixMs() { - UnixMsDateParser parser = new UnixMsDateParser(DateTimeZone.UTC); - assertThat(parser.parseDateTime("1000500").getMillis(), equalTo(1000500L)); - } - - public void testUnixParse() { - UnixDateParser parser = new UnixDateParser(DateTimeZone.UTC); - assertThat(parser.parseDateTime("1000.5").getMillis(), equalTo(1000500L)); - } - - public void testParseISO8601() { - ISO8601DateParser parser = new ISO8601DateParser(DateTimeZone.UTC); - assertThat(parser.parseDateTime("2001-01-01T00:00:00-0800").getMillis(), equalTo(978336000000L)); - } - - public void testParseISO8601Failure() { - ISO8601DateParser parser = new ISO8601DateParser(DateTimeZone.UTC); - try { - parser.parseDateTime("2001-01-0:00-0800"); - fail("parse should have failed"); - } catch(IllegalArgumentException e) { - //all good - } - } - - public void testTAI64NParse() { - TAI64NDateParser parser = new TAI64NDateParser(DateTimeZone.forOffsetHours(2)); - String input = "4000000050d506482dbdf024"; - String expected = "2012-12-22T03:00:46.767+02:00"; - assertThat(parser.parseDateTime((randomBoolean() ? "@" : "") + input).toString(), equalTo(expected)); - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java index 8b8c1da25f1..10514ee46a9 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java @@ -100,7 +100,7 @@ public class DateProcessorTests extends ESTestCase { public void testTAI64N() { DateProcessor dateProcessor = new DateProcessor(DateTimeZone.forOffsetHours(2), randomLocale(random()), - "date_as_string", Collections.singletonList(DateParserFactory.TAI64N), "date_as_date"); + "date_as_string", Collections.singletonList(DateFormat.Tai64n.toString()), "date_as_date"); Map document = new HashMap<>(); String dateAsString = (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024"; document.put("date_as_string", dateAsString); @@ -111,7 +111,7 @@ public class DateProcessorTests extends ESTestCase { public void testUnixMs() { DateProcessor dateProcessor = new DateProcessor(DateTimeZone.UTC, randomLocale(random()), - "date_as_string", Collections.singletonList(DateParserFactory.UNIX_MS), "date_as_date"); + "date_as_string", Collections.singletonList(DateFormat.UnixMs.toString()), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "1000500"); IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); @@ -121,7 +121,7 @@ public class DateProcessorTests extends ESTestCase { public void testUnix() { DateProcessor dateProcessor = new DateProcessor(DateTimeZone.UTC, randomLocale(random()), - "date_as_string", Collections.singletonList(DateParserFactory.UNIX), "date_as_date"); + "date_as_string", Collections.singletonList(DateFormat.Unix.toString()), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "1000.5"); IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); From c4cf55c19669023371be74570ff6c939ae0ef1a8 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 25 Nov 2015 15:58:01 +0100 Subject: [PATCH 080/347] [TEST] generate random timezone out of the available ones in joda --- .../processor/date/DateProcessorFactoryTests.java | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java index a852c597d86..62f726cf0cd 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java @@ -94,13 +94,21 @@ public class DateProcessorFactoryTests extends ESTestCase { String sourceField = randomAsciiOfLengthBetween(1, 10); config.put("match_field", sourceField); config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); - DateTimeZone timeZone = DateTimeZone.forTimeZone(randomTimeZone(random())); - config.put("timezone", timeZone.getID()); + DateTimeZone timezone = randomTimezone(); + config.put("timezone", timezone.getID()); DateProcessor processor = factory.create(config); - assertThat(processor.getTimezone(), equalTo(timeZone)); + assertThat(processor.getTimezone(), equalTo(timezone)); } + //we generate a timezone out of the available ones in joda, some available in the jdk are not available in joda by default + private static DateTimeZone randomTimezone() { + List ids = new ArrayList<>(DateTimeZone.getAvailableIDs()); + Collections.sort(ids); + return DateTimeZone.forID(randomFrom(ids)); + } + + public void testParseMatchFormats() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); From ec162c458e0a5581b2c79b337d4f7a3b0a600f4a Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 25 Nov 2015 16:08:32 +0100 Subject: [PATCH 081/347] Replace property with field in IngestDocument getPropertyValue => getFieldValue hasPropertyValue => hasFieldValue setPropertyValue => setFieldValue removeProperty => removeField --- .../elasticsearch/ingest/IngestDocument.java | 28 ++-- .../processor/AbstractStringProcessor.java | 4 +- .../ingest/processor/add/AddProcessor.java | 2 +- .../processor/convert/ConvertProcessor.java | 4 +- .../ingest/processor/date/DateProcessor.java | 4 +- .../processor/geoip/GeoIpProcessor.java | 4 +- .../ingest/processor/grok/GrokProcessor.java | 4 +- .../ingest/processor/gsub/GsubProcessor.java | 4 +- .../ingest/processor/join/JoinProcessor.java | 4 +- .../processor/remove/RemoveProcessor.java | 2 +- .../processor/rename/RenameProcessor.java | 10 +- .../processor/split/SplitProcessor.java | 4 +- .../ingest/IngestDocumentTests.java | 124 +++++++++--------- .../ingest/RandomDocumentPicks.java | 4 +- .../AbstractStringProcessorTestCase.java | 4 +- .../processor/add/AddProcessorTests.java | 10 +- .../convert/ConvertProcessorTests.java | 22 ++-- .../processor/date/DateProcessorTests.java | 18 +-- .../processor/gsub/GsubProcessorTests.java | 4 +- .../processor/join/JoinProcessorTests.java | 6 +- .../remove/RemoveProcessorTests.java | 4 +- .../rename/RenameProcessorTests.java | 12 +- .../processor/split/SplitProcessorTests.java | 4 +- .../transport/IngestActionFilterTests.java | 2 +- 24 files changed, 144 insertions(+), 144 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 27606090d29..c9e11253c61 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -53,7 +53,7 @@ public final class IngestDocument { * @return the value for the provided path if existing, null otherwise * @throws IllegalArgumentException if the field is present but is not of the type provided as argument. */ - public T getPropertyValue(String path, Class clazz) { + public T getFieldValue(String path, Class clazz) { if (path == null || path.length() == 0) { return null; } @@ -66,22 +66,22 @@ public final class IngestDocument { } String leafKey = pathElements[pathElements.length - 1]; - Object property = innerMap.get(leafKey); - if (property == null) { + Object fieldValue = innerMap.get(leafKey); + if (fieldValue == null) { return null; } - if (clazz.isInstance(property)) { - return clazz.cast(property); + if (clazz.isInstance(fieldValue)) { + return clazz.cast(fieldValue); } - throw new IllegalArgumentException("field [" + path + "] of type [" + property.getClass().getName() + "] cannot be cast to [" + clazz.getName() + "]"); + throw new IllegalArgumentException("field [" + path + "] of type [" + fieldValue.getClass().getName() + "] cannot be cast to [" + clazz.getName() + "]"); } /** * Checks whether the document contains a value for the provided path * @param path The path within the document in dot-notation - * @return true if the document contains a value for the property, false otherwise + * @return true if the document contains a value for the field, false otherwise */ - public boolean hasPropertyValue(String path) { + public boolean hasFieldValue(String path) { if (path == null || path.length() == 0) { return false; } @@ -96,10 +96,10 @@ public final class IngestDocument { } /** - * Removes the property identified by the provided path - * @param path the path of the property to be removed + * Removes the field identified by the provided path + * @param path the path of the field to be removed */ - public void removeProperty(String path) { + public void removeField(String path) { if (path == null || path.length() == 0) { return; } @@ -136,7 +136,7 @@ public final class IngestDocument { * @param path The path within the document in dot-notation * @param value The value to put in for the path key */ - public void setPropertyValue(String path, Object value) { + public void setFieldValue(String path, Object value) { if (path == null || path.length() == 0) { throw new IllegalArgumentException("cannot add null or empty field"); } @@ -175,8 +175,8 @@ public final class IngestDocument { /** * Returns the document. Should be used only for reading. Any change made to the returned map will - * not be reflected to the modified flag. Modify the document instead using {@link #setPropertyValue(String, Object)} - * and {@link #removeProperty(String)} + * not be reflected to the modified flag. Modify the document instead using {@link #setFieldValue(String, Object)} + * and {@link #removeField(String)} */ public Map getSource() { return source; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java index 409c67924e9..00cbe73e508 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java @@ -46,11 +46,11 @@ public abstract class AbstractStringProcessor implements Processor { @Override public final void execute(IngestDocument document) { for(String field : fields) { - String val = document.getPropertyValue(field, String.class); + String val = document.getFieldValue(field, String.class); if (val == null) { throw new IllegalArgumentException("field [" + field + "] is null, cannot process it."); } - document.setPropertyValue(field, process(val)); + document.setFieldValue(field, process(val)); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/add/AddProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/add/AddProcessor.java index e17fa622070..7bbb33a8f57 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/add/AddProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/add/AddProcessor.java @@ -48,7 +48,7 @@ public class AddProcessor implements Processor { @Override public void execute(IngestDocument document) { for(Map.Entry entry : fields.entrySet()) { - document.setPropertyValue(entry.getKey(), entry.getValue()); + document.setFieldValue(entry.getKey(), entry.getValue()); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java index 6ea3a955fd7..769ecb90a4b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java @@ -102,7 +102,7 @@ public class ConvertProcessor implements Processor { public void execute(IngestDocument document) { for(Map.Entry entry : fields.entrySet()) { Type type = entry.getValue(); - Object oldValue = document.getPropertyValue(entry.getKey(), Object.class); + Object oldValue = document.getFieldValue(entry.getKey(), Object.class); Object newValue; if (oldValue == null) { throw new IllegalArgumentException("Field [" + entry.getKey() + "] is null, cannot be converted to type [" + type + "]"); @@ -118,7 +118,7 @@ public class ConvertProcessor implements Processor { } else { newValue = type.convert(oldValue); } - document.setPropertyValue(entry.getKey(), newValue); + document.setFieldValue(entry.getKey(), newValue); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java index 8c5340065ba..a7cd3eb4fbe 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java @@ -62,7 +62,7 @@ public final class DateProcessor implements Processor { @Override public void execute(IngestDocument ingestDocument) { - String value = ingestDocument.getPropertyValue(matchField, String.class); + String value = ingestDocument.getFieldValue(matchField, String.class); // TODO(talevy): handle custom timestamp fields DateTime dateTime = null; @@ -80,7 +80,7 @@ public final class DateProcessor implements Processor { throw new IllegalArgumentException("unable to parse date [" + value + "]", lastException); } - ingestDocument.setPropertyValue(targetField, ISODateTimeFormat.dateTime().print(dateTime)); + ingestDocument.setFieldValue(targetField, ISODateTimeFormat.dateTime().print(dateTime)); } @Override diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index 400632e828c..5187f731494 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -61,7 +61,7 @@ public final class GeoIpProcessor implements Processor { @Override public void execute(IngestDocument ingestDocument) { - String ip = ingestDocument.getPropertyValue(sourceField, String.class); + String ip = ingestDocument.getFieldValue(sourceField, String.class); final InetAddress ipAddress; try { ipAddress = InetAddress.getByName(ip); @@ -88,7 +88,7 @@ public final class GeoIpProcessor implements Processor { default: throw new IllegalStateException("Unsupported database type [" + dbReader.getMetadata().getDatabaseType() + "]"); } - ingestDocument.setPropertyValue(targetField, geoData); + ingestDocument.setFieldValue(targetField, geoData); } @Override diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index 31440ad9e53..c2cc009ef81 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -46,12 +46,12 @@ public final class GrokProcessor implements Processor { @Override public void execute(IngestDocument ingestDocument) { - Object field = ingestDocument.getPropertyValue(matchField, Object.class); + Object field = ingestDocument.getFieldValue(matchField, Object.class); // TODO(talevy): handle invalid field types if (field instanceof String) { Map matches = grok.captures((String) field); if (matches != null) { - matches.forEach((k, v) -> ingestDocument.setPropertyValue(k, v)); + matches.forEach((k, v) -> ingestDocument.setFieldValue(k, v)); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java index d436a888fd7..bbbb7b9cfd4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java @@ -51,13 +51,13 @@ public class GsubProcessor implements Processor { @Override public void execute(IngestDocument document) { for (GsubExpression gsubExpression : gsubExpressions) { - String oldVal = document.getPropertyValue(gsubExpression.getFieldName(), String.class); + String oldVal = document.getFieldValue(gsubExpression.getFieldName(), String.class); if (oldVal == null) { throw new IllegalArgumentException("field [" + gsubExpression.getFieldName() + "] is null, cannot match pattern."); } Matcher matcher = gsubExpression.getPattern().matcher(oldVal); String newVal = matcher.replaceAll(gsubExpression.getReplacement()); - document.setPropertyValue(gsubExpression.getFieldName(), newVal); + document.setFieldValue(gsubExpression.getFieldName(), newVal); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java index 283a7ee49f8..a30b43ebde7 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java @@ -50,14 +50,14 @@ public class JoinProcessor implements Processor { @Override public void execute(IngestDocument document) { for(Map.Entry entry : fields.entrySet()) { - List list = document.getPropertyValue(entry.getKey(), List.class); + List list = document.getFieldValue(entry.getKey(), List.class); if (list == null) { throw new IllegalArgumentException("field [" + entry.getKey() + "] is null, cannot join."); } String joined = list.stream() .map(Object::toString) .collect(Collectors.joining(entry.getValue())); - document.setPropertyValue(entry.getKey(), joined); + document.setFieldValue(entry.getKey(), joined); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java index 744b668a27a..41aba178741 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java @@ -49,7 +49,7 @@ public class RemoveProcessor implements Processor { @Override public void execute(IngestDocument document) { for(String field : fields) { - document.removeProperty(field); + document.removeField(field); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java index f829e7ab0d7..d532564146f 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java @@ -47,13 +47,13 @@ public class RenameProcessor implements Processor { @Override public void execute(IngestDocument document) { for(Map.Entry entry : fields.entrySet()) { - if (document.hasPropertyValue(entry.getKey())) { - if (document.hasPropertyValue(entry.getKey()) == false) { + if (document.hasFieldValue(entry.getKey())) { + if (document.hasFieldValue(entry.getKey()) == false) { throw new IllegalArgumentException("field [" + entry.getKey() + "] doesn't exist"); } - Object oldValue = document.getPropertyValue(entry.getKey(), Object.class); - document.removeProperty(entry.getKey()); - document.setPropertyValue(entry.getValue(), oldValue); + Object oldValue = document.getFieldValue(entry.getKey(), Object.class); + document.removeField(entry.getKey()); + document.setFieldValue(entry.getValue(), oldValue); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java index bc83bc9c794..92e87cc0628 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java @@ -50,11 +50,11 @@ public class SplitProcessor implements Processor { @Override public void execute(IngestDocument document) { for(Map.Entry entry : fields.entrySet()) { - String oldVal = document.getPropertyValue(entry.getKey(), String.class); + String oldVal = document.getFieldValue(entry.getKey(), String.class); if (oldVal == null) { throw new IllegalArgumentException("field [" + entry.getKey() + "] is null, cannot split."); } - document.setPropertyValue(entry.getKey(), Arrays.asList(oldVal.split(entry.getValue()))); + document.setFieldValue(entry.getKey(), Arrays.asList(oldVal.split(entry.getValue()))); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index 24073a85526..375e86f71c6 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -44,91 +44,91 @@ public class IngestDocumentTests extends ESTestCase { ingestDocument = new IngestDocument("index", "type", "id", document); } - public void testSimpleGetPropertyValue() { - assertThat(ingestDocument.getPropertyValue("foo", String.class), equalTo("bar")); - assertThat(ingestDocument.getPropertyValue("int", Integer.class), equalTo(123)); + public void testSimpleGetFieldValue() { + assertThat(ingestDocument.getFieldValue("foo", String.class), equalTo("bar")); + assertThat(ingestDocument.getFieldValue("int", Integer.class), equalTo(123)); } - public void testGetPropertyValueNullValue() { - assertThat(ingestDocument.getPropertyValue("fizz.foo_null", Object.class), nullValue()); + public void testGetFieldValueNullValue() { + assertThat(ingestDocument.getFieldValue("fizz.foo_null", Object.class), nullValue()); } - public void testSimpleGetPropertyValueTypeMismatch() { + public void testSimpleGetFieldValueTypeMismatch() { try { - ingestDocument.getPropertyValue("int", String.class); - fail("getProperty should have failed"); + ingestDocument.getFieldValue("int", String.class); + fail("getFieldValue should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("field [int] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); } try { - ingestDocument.getPropertyValue("foo", Integer.class); - fail("getProperty should have failed"); + ingestDocument.getFieldValue("foo", Integer.class); + fail("getFieldValue should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("field [foo] of type [java.lang.String] cannot be cast to [java.lang.Integer]")); } } - public void testNestedGetPropertyValue() { - assertThat(ingestDocument.getPropertyValue("fizz.buzz", String.class), equalTo("hello world")); + public void testNestedGetFieldValue() { + assertThat(ingestDocument.getFieldValue("fizz.buzz", String.class), equalTo("hello world")); } - public void testGetPropertyValueNotFound() { - assertThat(ingestDocument.getPropertyValue("not.here", String.class), nullValue()); + public void testGetFieldValueNotFound() { + assertThat(ingestDocument.getFieldValue("not.here", String.class), nullValue()); } - public void testGetPropertyValueNull() { - assertNull(ingestDocument.getPropertyValue(null, String.class)); + public void testGetFieldValueNull() { + assertNull(ingestDocument.getFieldValue(null, String.class)); } - public void testGetPropertyValueEmpty() { - assertNull(ingestDocument.getPropertyValue("", String.class)); + public void testGetFieldValueEmpty() { + assertNull(ingestDocument.getFieldValue("", String.class)); } - public void testHasProperty() { - assertTrue(ingestDocument.hasPropertyValue("fizz")); + public void testHasFieldValue() { + assertTrue(ingestDocument.hasFieldValue("fizz")); } - public void testHasPropertyValueNested() { - assertTrue(ingestDocument.hasPropertyValue("fizz.buzz")); + public void testHasFieldValueNested() { + assertTrue(ingestDocument.hasFieldValue("fizz.buzz")); } - public void testHasPropertyValueNotFound() { - assertFalse(ingestDocument.hasPropertyValue("doesnotexist")); + public void testHasFieldValueNotFound() { + assertFalse(ingestDocument.hasFieldValue("doesnotexist")); } - public void testHasPropertyValueNestedNotFound() { - assertFalse(ingestDocument.hasPropertyValue("fizz.doesnotexist")); + public void testHasFieldValueNestedNotFound() { + assertFalse(ingestDocument.hasFieldValue("fizz.doesnotexist")); } - public void testHasPropertyValueNull() { - assertFalse(ingestDocument.hasPropertyValue(null)); + public void testHasFieldValueNull() { + assertFalse(ingestDocument.hasFieldValue(null)); } - public void testHasPropertyValueNullValue() { - assertTrue(ingestDocument.hasPropertyValue("fizz.foo_null")); + public void testHasFieldValueNullValue() { + assertTrue(ingestDocument.hasFieldValue("fizz.foo_null")); } - public void testHasPropertyValueEmpty() { - assertFalse(ingestDocument.hasPropertyValue("")); + public void testHasFieldValueEmpty() { + assertFalse(ingestDocument.hasFieldValue("")); } - public void testSimpleSetPropertyValue() { - ingestDocument.setPropertyValue("new_field", "foo"); + public void testSimpleSetFieldValue() { + ingestDocument.setFieldValue("new_field", "foo"); assertThat(ingestDocument.getSource().get("new_field"), equalTo("foo")); assertThat(ingestDocument.isModified(), equalTo(true)); } - public void testSetPropertyValueNullValue() { - ingestDocument.setPropertyValue("new_field", null); + public void testSetFieldValueNullValue() { + ingestDocument.setFieldValue("new_field", null); assertThat(ingestDocument.getSource().containsKey("new_field"), equalTo(true)); assertThat(ingestDocument.getSource().get("new_field"), nullValue()); assertThat(ingestDocument.isModified(), equalTo(true)); } @SuppressWarnings("unchecked") - public void testNestedSetPropertyValue() { - ingestDocument.setPropertyValue("a.b.c.d", "foo"); + public void testNestedSetFieldValue() { + ingestDocument.setFieldValue("a.b.c.d", "foo"); assertThat(ingestDocument.getSource().get("a"), instanceOf(Map.class)); Map a = (Map) ingestDocument.getSource().get("a"); assertThat(a.get("b"), instanceOf(Map.class)); @@ -141,14 +141,14 @@ public class IngestDocumentTests extends ESTestCase { assertThat(ingestDocument.isModified(), equalTo(true)); } - public void testSetPropertyValueOnExistingField() { - ingestDocument.setPropertyValue("foo", "newbar"); + public void testSetFieldValueOnExistingField() { + ingestDocument.setFieldValue("foo", "newbar"); assertThat(ingestDocument.getSource().get("foo"), equalTo("newbar")); } @SuppressWarnings("unchecked") - public void testSetPropertyValueOnExistingParent() { - ingestDocument.setPropertyValue("fizz.new", "bar"); + public void testSetFieldValueOnExistingParent() { + ingestDocument.setFieldValue("fizz.new", "bar"); assertThat(ingestDocument.getSource().get("fizz"), instanceOf(Map.class)); Map innerMap = (Map) ingestDocument.getSource().get("fizz"); assertThat(innerMap.get("new"), instanceOf(String.class)); @@ -157,9 +157,9 @@ public class IngestDocumentTests extends ESTestCase { assertThat(ingestDocument.isModified(), equalTo(true)); } - public void testSetPropertyValueOnExistingParentTypeMismatch() { + public void testSetFieldValueOnExistingParentTypeMismatch() { try { - ingestDocument.setPropertyValue("fizz.buzz.new", "bar"); + ingestDocument.setFieldValue("fizz.buzz.new", "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add field to parent [buzz] of type [java.lang.String], [java.util.Map] expected instead.")); @@ -167,9 +167,9 @@ public class IngestDocumentTests extends ESTestCase { } } - public void testSetPropertyValueOnExistingNullParent() { + public void testSetFieldValueOnExistingNullParent() { try { - ingestDocument.setPropertyValue("fizz.foo_null.test", "bar"); + ingestDocument.setFieldValue("fizz.foo_null.test", "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add field to null parent, [java.util.Map] expected instead.")); @@ -177,9 +177,9 @@ public class IngestDocumentTests extends ESTestCase { } } - public void testSetPropertyValueNullName() { + public void testSetFieldValueNullName() { try { - ingestDocument.setPropertyValue(null, "bar"); + ingestDocument.setFieldValue(null, "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add null or empty field")); @@ -187,9 +187,9 @@ public class IngestDocumentTests extends ESTestCase { } } - public void testSetPropertyValueEmptyName() { + public void testSetFieldValueEmptyName() { try { - ingestDocument.setPropertyValue("", "bar"); + ingestDocument.setFieldValue("", "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add null or empty field")); @@ -197,15 +197,15 @@ public class IngestDocumentTests extends ESTestCase { } } - public void testRemoveProperty() { - ingestDocument.removeProperty("foo"); + public void testRemoveField() { + ingestDocument.removeField("foo"); assertThat(ingestDocument.isModified(), equalTo(true)); assertThat(ingestDocument.getSource().size(), equalTo(2)); assertThat(ingestDocument.getSource().containsKey("foo"), equalTo(false)); } - public void testRemoveInnerProperty() { - ingestDocument.removeProperty("fizz.buzz"); + public void testRemoveInnerField() { + ingestDocument.removeField("fizz.buzz"); assertThat(ingestDocument.getSource().size(), equalTo(3)); assertThat(ingestDocument.getSource().get("fizz"), instanceOf(Map.class)); @SuppressWarnings("unchecked") @@ -213,33 +213,33 @@ public class IngestDocumentTests extends ESTestCase { assertThat(map.size(), equalTo(1)); assertThat(map.containsKey("buzz"), equalTo(false)); - ingestDocument.removeProperty("fizz.foo_null"); + ingestDocument.removeField("fizz.foo_null"); assertThat(map.size(), equalTo(0)); assertThat(ingestDocument.getSource().size(), equalTo(3)); assertThat(ingestDocument.getSource().containsKey("fizz"), equalTo(true)); assertThat(ingestDocument.isModified(), equalTo(true)); } - public void testRemoveNonExistingProperty() { - ingestDocument.removeProperty("does_not_exist"); + public void testRemoveNonExistingField() { + ingestDocument.removeField("does_not_exist"); assertThat(ingestDocument.isModified(), equalTo(false)); assertThat(ingestDocument.getSource().size(), equalTo(3)); } public void testRemoveExistingParentTypeMismatch() { - ingestDocument.removeProperty("foo.test"); + ingestDocument.removeField("foo.test"); assertThat(ingestDocument.isModified(), equalTo(false)); assertThat(ingestDocument.getSource().size(), equalTo(3)); } - public void testRemoveNullProperty() { - ingestDocument.removeProperty(null); + public void testRemoveNullField() { + ingestDocument.removeField(null); assertThat(ingestDocument.isModified(), equalTo(false)); assertThat(ingestDocument.getSource().size(), equalTo(3)); } - public void testRemoveEmptyProperty() { - ingestDocument.removeProperty(""); + public void testRemoveEmptyField() { + ingestDocument.removeField(""); assertThat(ingestDocument.isModified(), equalTo(false)); assertThat(ingestDocument.getSource().size(), equalTo(3)); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java index 5277d6b790d..0443dd0b68f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java @@ -74,7 +74,7 @@ public final class RandomDocumentPicks { randomEntry = RandomPicks.randomFrom(random, treeMap.entrySet()); key += "." + randomEntry.getKey(); } - assert ingestDocument.getPropertyValue(key, Object.class) != null; + assert ingestDocument.getFieldValue(key, Object.class) != null; return key; } @@ -88,7 +88,7 @@ public final class RandomDocumentPicks { do { fieldName = randomFieldName(random); } while (canAddField(fieldName, ingestDocument) == false); - ingestDocument.setPropertyValue(fieldName, value); + ingestDocument.setFieldValue(fieldName, value); return fieldName; } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java index 94bdce9f663..ecd9fadcef5 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java @@ -53,7 +53,7 @@ public abstract class AbstractStringProcessorTestCase extends ESTestCase { Processor processor = newProcessor(expected.keySet()); processor.execute(ingestDocument); for (Map.Entry entry : expected.entrySet()) { - assertThat(ingestDocument.getPropertyValue(entry.getKey(), String.class), equalTo(entry.getValue())); + assertThat(ingestDocument.getFieldValue(entry.getKey(), String.class), equalTo(entry.getValue())); } } @@ -73,7 +73,7 @@ public abstract class AbstractStringProcessorTestCase extends ESTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); Processor processor = newProcessor(Collections.singletonList(fieldName)); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); - ingestDocument.setPropertyValue(fieldName, randomInt()); + ingestDocument.setFieldValue(fieldName, randomInt()); try { processor.execute(ingestDocument); fail("processor should have failed"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java index cecf2c9d9e2..ff32c3e2f05 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java @@ -44,8 +44,8 @@ public class AddProcessorTests extends ESTestCase { processor.execute(ingestDocument); for (Map.Entry field : fields.entrySet()) { - assertThat(ingestDocument.hasPropertyValue(field.getKey()), equalTo(true)); - assertThat(ingestDocument.getPropertyValue(field.getKey(), Object.class), equalTo(field.getValue())); + assertThat(ingestDocument.hasFieldValue(field.getKey()), equalTo(true)); + assertThat(ingestDocument.getFieldValue(field.getKey(), Object.class), equalTo(field.getValue())); } } @@ -63,14 +63,14 @@ public class AddProcessorTests extends ESTestCase { Processor processor = new AddProcessor(fields); processor.execute(ingestDocument); for (Map.Entry field : fields.entrySet()) { - assertThat(ingestDocument.hasPropertyValue(field.getKey()), equalTo(true)); - assertThat(ingestDocument.getPropertyValue(field.getKey(), Object.class), equalTo(field.getValue())); + assertThat(ingestDocument.hasFieldValue(field.getKey()), equalTo(true)); + assertThat(ingestDocument.getFieldValue(field.getKey(), Object.class), equalTo(field.getValue())); } } public void testAddFieldsTypeMismatch() throws IOException { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); - ingestDocument.setPropertyValue("field", "value"); + ingestDocument.setFieldValue("field", "value"); Processor processor = new AddProcessor(Collections.singletonMap("field.inner", "value")); try { processor.execute(ingestDocument); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java index b89f51166df..56d74c6f12a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java @@ -46,7 +46,7 @@ public class ConvertProcessorTests extends ESTestCase { Processor processor = new ConvertProcessor(fields); processor.execute(ingestDocument); for (Map.Entry entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getPropertyValue(entry.getKey(), Integer.class), equalTo(entry.getValue())); + assertThat(ingestDocument.getFieldValue(entry.getKey(), Integer.class), equalTo(entry.getValue())); } } @@ -71,7 +71,7 @@ public class ConvertProcessorTests extends ESTestCase { Processor processor = new ConvertProcessor(fields); processor.execute(ingestDocument); for (Map.Entry> entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getPropertyValue(entry.getKey(), List.class), equalTo(entry.getValue())); + assertThat(ingestDocument.getFieldValue(entry.getKey(), List.class), equalTo(entry.getValue())); } } @@ -79,7 +79,7 @@ public class ConvertProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); String value = "string-" + randomAsciiOfLengthBetween(1, 10); - ingestDocument.setPropertyValue(fieldName, value); + ingestDocument.setFieldValue(fieldName, value); Map convert = Collections.singletonMap(fieldName, Type.INTEGER); Processor processor = new ConvertProcessor(convert); @@ -106,7 +106,7 @@ public class ConvertProcessorTests extends ESTestCase { Processor processor = new ConvertProcessor(fields); processor.execute(ingestDocument); for (Map.Entry entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getPropertyValue(entry.getKey(), Float.class), equalTo(entry.getValue())); + assertThat(ingestDocument.getFieldValue(entry.getKey(), Float.class), equalTo(entry.getValue())); } } @@ -131,7 +131,7 @@ public class ConvertProcessorTests extends ESTestCase { Processor processor = new ConvertProcessor(fields); processor.execute(ingestDocument); for (Map.Entry> entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getPropertyValue(entry.getKey(), List.class), equalTo(entry.getValue())); + assertThat(ingestDocument.getFieldValue(entry.getKey(), List.class), equalTo(entry.getValue())); } } @@ -139,7 +139,7 @@ public class ConvertProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); String value = "string-" + randomAsciiOfLengthBetween(1, 10); - ingestDocument.setPropertyValue(fieldName, value); + ingestDocument.setFieldValue(fieldName, value); Map convert = Collections.singletonMap(fieldName, Type.FLOAT); Processor processor = new ConvertProcessor(convert); @@ -170,7 +170,7 @@ public class ConvertProcessorTests extends ESTestCase { Processor processor = new ConvertProcessor(fields); processor.execute(ingestDocument); for (Map.Entry entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getPropertyValue(entry.getKey(), Boolean.class), equalTo(entry.getValue())); + assertThat(ingestDocument.getFieldValue(entry.getKey(), Boolean.class), equalTo(entry.getValue())); } } @@ -199,7 +199,7 @@ public class ConvertProcessorTests extends ESTestCase { Processor processor = new ConvertProcessor(fields); processor.execute(ingestDocument); for (Map.Entry> entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getPropertyValue(entry.getKey(), List.class), equalTo(entry.getValue())); + assertThat(ingestDocument.getFieldValue(entry.getKey(), List.class), equalTo(entry.getValue())); } } @@ -213,7 +213,7 @@ public class ConvertProcessorTests extends ESTestCase { //verify that only proper boolean values are supported and we are strict about it fieldValue = randomFrom("on", "off", "yes", "no", "0", "1"); } - ingestDocument.setPropertyValue(fieldName, fieldValue); + ingestDocument.setFieldValue(fieldName, fieldValue); Map convert = Collections.singletonMap(fieldName, Type.BOOLEAN); Processor processor = new ConvertProcessor(convert); @@ -260,7 +260,7 @@ public class ConvertProcessorTests extends ESTestCase { Processor processor = new ConvertProcessor(fields); processor.execute(ingestDocument); for (Map.Entry entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getPropertyValue(entry.getKey(), String.class), equalTo(entry.getValue())); + assertThat(ingestDocument.getFieldValue(entry.getKey(), String.class), equalTo(entry.getValue())); } } @@ -305,7 +305,7 @@ public class ConvertProcessorTests extends ESTestCase { Processor processor = new ConvertProcessor(fields); processor.execute(ingestDocument); for (Map.Entry> entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getPropertyValue(entry.getKey(), List.class), equalTo(entry.getValue())); + assertThat(ingestDocument.getFieldValue(entry.getKey(), List.class), equalTo(entry.getValue())); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java index 10514ee46a9..182ab1dbaaf 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java @@ -38,7 +38,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "2010 12 06 11:05:15"); IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); dateProcessor.execute(ingestDocument); - assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T11:05:15.000+02:00")); + assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T11:05:15.000+02:00")); } public void testJodaPatternMultipleFormats() { @@ -53,19 +53,19 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "2010 12 06"); IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); dateProcessor.execute(ingestDocument); - assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); + assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "12/06/2010"); ingestDocument = new IngestDocument("index", "type", "id", document); dateProcessor.execute(ingestDocument); - assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); + assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "12-06-2010"); ingestDocument = new IngestDocument("index", "type", "id", document); dateProcessor.execute(ingestDocument); - assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); + assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "2010"); @@ -85,7 +85,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "2010 12 giugno"); IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); dateProcessor.execute(ingestDocument); - assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); + assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); } public void testJodaPatternDefaultYear() { @@ -95,7 +95,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "12/06"); IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); dateProcessor.execute(ingestDocument); - assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00")); + assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00")); } public void testTAI64N() { @@ -106,7 +106,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", dateAsString); IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); dateProcessor.execute(ingestDocument); - assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("2012-12-22T03:00:46.767+02:00")); + assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2012-12-22T03:00:46.767+02:00")); } public void testUnixMs() { @@ -116,7 +116,7 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "1000500"); IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); dateProcessor.execute(ingestDocument); - assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); + assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); } public void testUnix() { @@ -126,6 +126,6 @@ public class DateProcessorTests extends ESTestCase { document.put("date_as_string", "1000.5"); IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); dateProcessor.execute(ingestDocument); - assertThat(ingestDocument.getPropertyValue("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); + assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java index 9dce4fe48c7..7f91a8b3a56 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java @@ -46,14 +46,14 @@ public class GsubProcessorTests extends ESTestCase { Processor processor = new GsubProcessor(expressions); processor.execute(ingestDocument); for (GsubExpression expression : expressions) { - assertThat(ingestDocument.getPropertyValue(expression.getFieldName(), String.class), equalTo("127-0-0-1")); + assertThat(ingestDocument.getFieldValue(expression.getFieldName(), String.class), equalTo("127-0-0-1")); } } public void testGsubNotAStringValue() throws IOException { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - ingestDocument.setPropertyValue(fieldName, 123); + ingestDocument.setFieldValue(fieldName, 123); List gsubExpressions = Collections.singletonList(new GsubExpression(fieldName, Pattern.compile("\\."), "-")); Processor processor = new GsubProcessor(gsubExpressions); try { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java index 8ad6c7bb3c0..391c9a641ea 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java @@ -58,7 +58,7 @@ public class JoinProcessorTests extends ESTestCase { Processor processor = new JoinProcessor(fields); processor.execute(ingestDocument); for (Map.Entry entry : expectedResultMap.entrySet()) { - assertThat(ingestDocument.getPropertyValue(entry.getKey(), String.class), equalTo(entry.getValue())); + assertThat(ingestDocument.getFieldValue(entry.getKey(), String.class), equalTo(entry.getValue())); } } @@ -87,14 +87,14 @@ public class JoinProcessorTests extends ESTestCase { Processor processor = new JoinProcessor(fields); processor.execute(ingestDocument); for (Map.Entry entry : expectedResultMap.entrySet()) { - assertThat(ingestDocument.getPropertyValue(entry.getKey(), String.class), equalTo(entry.getValue())); + assertThat(ingestDocument.getFieldValue(entry.getKey(), String.class), equalTo(entry.getValue())); } } public void testJoinNonListField() throws IOException { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - ingestDocument.setPropertyValue(fieldName, randomAsciiOfLengthBetween(1, 10)); + ingestDocument.setFieldValue(fieldName, randomAsciiOfLengthBetween(1, 10)); Map join = Collections.singletonMap(fieldName, "-"); Processor processor = new JoinProcessor(join); try { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java index 4614ae2f6e1..3c110fd7b60 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java @@ -45,8 +45,8 @@ public class RemoveProcessorTests extends ESTestCase { Processor processor = new RemoveProcessor(fields); processor.execute(ingestDocument); for (String field : fields) { - assertThat(ingestDocument.getPropertyValue(field, Object.class), nullValue()); - assertThat(ingestDocument.hasPropertyValue(field), equalTo(false)); + assertThat(ingestDocument.getFieldValue(field, Object.class), nullValue()); + assertThat(ingestDocument.hasFieldValue(field), equalTo(false)); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java index 65501acb5d3..51ed5da1750 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java @@ -46,13 +46,13 @@ public class RenameProcessorTests extends ESTestCase { do { newFieldName = RandomDocumentPicks.randomFieldName(random()); } while (RandomDocumentPicks.canAddField(newFieldName, ingestDocument) == false || newFields.containsKey(newFieldName)); - newFields.put(newFieldName, ingestDocument.getPropertyValue(fieldName, Object.class)); + newFields.put(newFieldName, ingestDocument.getFieldValue(fieldName, Object.class)); fields.put(fieldName, newFieldName); } Processor processor = new RenameProcessor(fields); processor.execute(ingestDocument); for (Map.Entry entry : newFields.entrySet()) { - assertThat(ingestDocument.getPropertyValue(entry.getKey(), Object.class), equalTo(entry.getValue())); + assertThat(ingestDocument.getFieldValue(entry.getKey(), Object.class), equalTo(entry.getValue())); } } @@ -66,12 +66,12 @@ public class RenameProcessorTests extends ESTestCase { public void testRenameExistingFieldNullValue() throws IOException { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - ingestDocument.setPropertyValue(fieldName, null); + ingestDocument.setFieldValue(fieldName, null); String newFieldName = RandomDocumentPicks.randomFieldName(random()); Processor processor = new RenameProcessor(Collections.singletonMap(fieldName, newFieldName)); processor.execute(ingestDocument); - assertThat(ingestDocument.hasPropertyValue(fieldName), equalTo(false)); - assertThat(ingestDocument.hasPropertyValue(newFieldName), equalTo(true)); - assertThat(ingestDocument.getPropertyValue(newFieldName, Object.class), nullValue()); + assertThat(ingestDocument.hasFieldValue(fieldName), equalTo(false)); + assertThat(ingestDocument.hasFieldValue(newFieldName), equalTo(true)); + assertThat(ingestDocument.getFieldValue(newFieldName, Object.class), nullValue()); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java index 5d749e77e31..14cd6d02a39 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java @@ -42,7 +42,7 @@ public class SplitProcessorTests extends ESTestCase { Processor processor = new SplitProcessor(fields); processor.execute(ingestDocument); for (String field : fields.keySet()) { - assertThat(ingestDocument.getPropertyValue(field, List.class), equalTo(Arrays.asList("127", "0", "0", "1"))); + assertThat(ingestDocument.getFieldValue(field, List.class), equalTo(Arrays.asList("127", "0", "0", "1"))); } } @@ -62,7 +62,7 @@ public class SplitProcessorTests extends ESTestCase { public void testSplitNonStringValue() throws IOException { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - ingestDocument.setPropertyValue(fieldName, randomInt()); + ingestDocument.setFieldValue(fieldName, randomInt()); Processor processor = new SplitProcessor(Collections.singletonMap(fieldName, "\\.")); try { processor.execute(ingestDocument); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index 176dd4530b9..974367aec17 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -174,7 +174,7 @@ public class IngestActionFilterTests extends ESTestCase { Processor processor = new Processor() { @Override public void execute(IngestDocument ingestDocument) { - ingestDocument.setPropertyValue("field2", "value2"); + ingestDocument.setFieldValue("field2", "value2"); } @Override From 5d510b59c828c7442a44d670e6341fa8890babad Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 25 Nov 2015 16:14:52 +0100 Subject: [PATCH 082/347] use MetaData enum for metadata field names Also rename getName to getFieldName in MetaData to prevent confusion with name() enum method. --- .../elasticsearch/ingest/IngestDocument.java | 18 +++++++++--------- .../transport/WriteableIngestDocument.java | 6 +++--- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index c9e11253c61..97274d4cffb 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -35,9 +35,9 @@ public final class IngestDocument { public IngestDocument(String index, String type, String id, Map source) { this.metaData = new HashMap<>(); - this.metaData.put("_index", index); - this.metaData.put("_type", type); - this.metaData.put("_id", id); + this.metaData.put(MetaData.INDEX.getFieldName(), index); + this.metaData.put(MetaData.TYPE.getFieldName(), type); + this.metaData.put(MetaData.ID.getFieldName(), id); this.source = source; } @@ -170,7 +170,7 @@ public final class IngestDocument { } public String getMetadata(MetaData metaData) { - return this.metaData.get(metaData.getName()); + return this.metaData.get(metaData.getFieldName()); } /** @@ -213,14 +213,14 @@ public final class IngestDocument { TIMESTAMP("_timestamp"), TTL("_ttl"); - private final String name; + private final String fieldName; - MetaData(String name) { - this.name = name; + MetaData(String fieldName) { + this.fieldName = fieldName; } - public String getName() { - return name; + public String getFieldName() { + return fieldName; } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocument.java index ec7ea798a86..4b65a16596d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocument.java @@ -102,9 +102,9 @@ public class WriteableIngestDocument implements Writeable Date: Wed, 25 Nov 2015 18:32:56 +0100 Subject: [PATCH 083/347] made updatePipelines() to not make it prone to race conditions --- .../java/org/elasticsearch/plugin/ingest/PipelineStore.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index 3bbddb1b842..fae1457ad12 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -122,7 +122,7 @@ public class PipelineStore extends AbstractLifecycleComponent { return factory.create(id, config, processorFactoryRegistry); } - void updatePipelines() throws IOException { + synchronized void updatePipelines() throws IOException { // note: this process isn't fast or smart, but the idea is that there will not be many pipelines, // so for that reason the goal is to keep the update logic simple. From 1a7391070f877e8418b44148ecc8f3664e8b85f3 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 25 Nov 2015 17:49:02 +0100 Subject: [PATCH 084/347] Simulate api improvements Move ParsedSimulateRequest to SimulatePipelineRequest and remove Parser class in favor of static parse methods. Simplified execute methods in SimulateExecutionService. --- .../simulate/ParsedSimulateRequest.java | 89 ------------------- .../simulate/SimulateExecutionService.java | 71 +++++++-------- .../simulate/SimulatePipelineRequest.java | 65 ++++++++++++++ .../SimulatePipelineTransportAction.java | 7 +- .../SimulateExecutionServiceTests.java | 8 +- ... SimulatePipelineRequestParsingTests.java} | 14 +-- 6 files changed, 110 insertions(+), 144 deletions(-) delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java rename plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/{ParsedSimulateRequestParserTests.java => SimulatePipelineRequestParsingTests.java} (90%) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java deleted file mode 100644 index 4f55ef8424d..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequest.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugin.ingest.transport.simulate; - -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.plugin.ingest.PipelineStore; - -import java.io.IOException; -import java.util.*; - -import static org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequest.Fields; - -public class ParsedSimulateRequest { - private final List documents; - private final Pipeline pipeline; - private final boolean verbose; - - ParsedSimulateRequest(Pipeline pipeline, List documents, boolean verbose) { - this.pipeline = pipeline; - this.documents = Collections.unmodifiableList(documents); - this.verbose = verbose; - } - - public Pipeline getPipeline() { - return pipeline; - } - - public List getDocuments() { - return documents; - } - - public boolean isVerbose() { - return verbose; - } - - public static class Parser { - private static final Pipeline.Factory PIPELINE_FACTORY = new Pipeline.Factory(); - public static final String SIMULATED_PIPELINE_ID = "_simulate_pipeline"; - - private List parseDocs(Map config) { - List> docs = ConfigurationUtils.readList(config, Fields.DOCS); - List ingestDocumentList = new ArrayList<>(); - for (Map dataMap : docs) { - Map document = ConfigurationUtils.readMap(dataMap, Fields.SOURCE); - IngestDocument ingestDocument = new IngestDocument(ConfigurationUtils.readStringProperty(dataMap, Fields.INDEX), - ConfigurationUtils.readStringProperty(dataMap, Fields.TYPE), - ConfigurationUtils.readStringProperty(dataMap, Fields.ID), - document); - ingestDocumentList.add(ingestDocument); - } - return ingestDocumentList; - } - - public ParsedSimulateRequest parseWithPipelineId(String pipelineId, Map config, boolean verbose, PipelineStore pipelineStore) { - if (pipelineId == null) { - throw new IllegalArgumentException("param [pipeline] is null"); - } - Pipeline pipeline = pipelineStore.get(pipelineId); - List ingestDocumentList = parseDocs(config); - return new ParsedSimulateRequest(pipeline, ingestDocumentList, verbose); - - } - - public ParsedSimulateRequest parse(Map config, boolean verbose, PipelineStore pipelineStore) throws IOException { - Map pipelineConfig = ConfigurationUtils.readMap(config, Fields.PIPELINE); - Pipeline pipeline = PIPELINE_FACTORY.create(SIMULATED_PIPELINE_ID, pipelineConfig, pipelineStore.getProcessorFactoryRegistry()); - List ingestDocumentList = parseDocs(config); - return new ParsedSimulateRequest(pipeline, ingestDocumentList, verbose); - } - } -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java index 4f26562dafc..fcf6e00c657 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java @@ -40,48 +40,39 @@ public class SimulateExecutionService { this.threadPool = threadPool; } - SimulateDocumentResult executeItem(Pipeline pipeline, IngestDocument ingestDocument) { - try { - pipeline.execute(ingestDocument); - return new SimulateDocumentSimpleResult(ingestDocument); - } catch (Exception e) { - return new SimulateDocumentSimpleResult(e); - } - } - - SimulateDocumentVerboseResult executeVerboseItem(Pipeline pipeline, IngestDocument ingestDocument) { - List processorResultList = new ArrayList<>(); - IngestDocument currentIngestDocument = new IngestDocument(ingestDocument); - for (int i = 0; i < pipeline.getProcessors().size(); i++) { - Processor processor = pipeline.getProcessors().get(i); - String processorId = "processor[" + processor.getType() + "]-" + i; - - try { - processor.execute(currentIngestDocument); - processorResultList.add(new SimulateProcessorResult(processorId, currentIngestDocument)); - } catch (Exception e) { - processorResultList.add(new SimulateProcessorResult(processorId, e)); - } - - currentIngestDocument = new IngestDocument(currentIngestDocument); - } - return new SimulateDocumentVerboseResult(processorResultList); - } - - public void execute(ParsedSimulateRequest request, ActionListener listener) { - threadPool.executor(THREAD_POOL_NAME).execute(new Runnable() { - @Override - public void run() { - List responses = new ArrayList<>(); - for (IngestDocument ingestDocument : request.getDocuments()) { - if (request.isVerbose()) { - responses.add(executeVerboseItem(request.getPipeline(), ingestDocument)); - } else { - responses.add(executeItem(request.getPipeline(), ingestDocument)); - } + SimulateDocumentResult executeDocument(Pipeline pipeline, IngestDocument ingestDocument, boolean verbose) { + if (verbose) { + List processorResultList = new ArrayList<>(); + IngestDocument currentIngestDocument = new IngestDocument(ingestDocument); + for (int i = 0; i < pipeline.getProcessors().size(); i++) { + Processor processor = pipeline.getProcessors().get(i); + String processorId = "processor[" + processor.getType() + "]-" + i; + try { + processor.execute(currentIngestDocument); + processorResultList.add(new SimulateProcessorResult(processorId, currentIngestDocument)); + } catch (Exception e) { + processorResultList.add(new SimulateProcessorResult(processorId, e)); } - listener.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(), request.isVerbose(), responses)); + currentIngestDocument = new IngestDocument(currentIngestDocument); } + return new SimulateDocumentVerboseResult(processorResultList); + } else { + try { + pipeline.execute(ingestDocument); + return new SimulateDocumentSimpleResult(ingestDocument); + } catch (Exception e) { + return new SimulateDocumentSimpleResult(e); + } + } + } + + public void execute(SimulatePipelineRequest.Parsed request, ActionListener listener) { + threadPool.executor(THREAD_POOL_NAME).execute(() -> { + List responses = new ArrayList<>(); + for (IngestDocument ingestDocument : request.getDocuments()) { + responses.add(executeDocument(request.getPipeline(), ingestDocument, request.isVerbose())); + } + listener.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(), request.isVerbose(), responses)); }); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java index 3394de413c8..8d89e508386 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java @@ -24,8 +24,16 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.processor.ConfigurationUtils; +import org.elasticsearch.plugin.ingest.PipelineStore; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -92,4 +100,61 @@ public class SimulatePipelineRequest extends ActionRequest { static final String TYPE = "_type"; static final String ID = "_id"; } + + static class Parsed { + private final List documents; + private final Pipeline pipeline; + private final boolean verbose; + + Parsed(Pipeline pipeline, List documents, boolean verbose) { + this.pipeline = pipeline; + this.documents = Collections.unmodifiableList(documents); + this.verbose = verbose; + } + + public Pipeline getPipeline() { + return pipeline; + } + + public List getDocuments() { + return documents; + } + + public boolean isVerbose() { + return verbose; + } + } + + private static final Pipeline.Factory PIPELINE_FACTORY = new Pipeline.Factory(); + static final String SIMULATED_PIPELINE_ID = "_simulate_pipeline"; + + static Parsed parseWithPipelineId(String pipelineId, Map config, boolean verbose, PipelineStore pipelineStore) { + if (pipelineId == null) { + throw new IllegalArgumentException("param [pipeline] is null"); + } + Pipeline pipeline = pipelineStore.get(pipelineId); + List ingestDocumentList = parseDocs(config); + return new Parsed(pipeline, ingestDocumentList, verbose); + } + + static Parsed parse(Map config, boolean verbose, PipelineStore pipelineStore) throws IOException { + Map pipelineConfig = ConfigurationUtils.readMap(config, Fields.PIPELINE); + Pipeline pipeline = PIPELINE_FACTORY.create(SIMULATED_PIPELINE_ID, pipelineConfig, pipelineStore.getProcessorFactoryRegistry()); + List ingestDocumentList = parseDocs(config); + return new Parsed(pipeline, ingestDocumentList, verbose); + } + + private static List parseDocs(Map config) { + List> docs = ConfigurationUtils.readList(config, Fields.DOCS); + List ingestDocumentList = new ArrayList<>(); + for (Map dataMap : docs) { + Map document = ConfigurationUtils.readMap(dataMap, Fields.SOURCE); + IngestDocument ingestDocument = new IngestDocument(ConfigurationUtils.readStringProperty(dataMap, Fields.INDEX), + ConfigurationUtils.readStringProperty(dataMap, Fields.TYPE), + ConfigurationUtils.readStringProperty(dataMap, Fields.ID), + document); + ingestDocumentList.add(ingestDocument); + } + return ingestDocumentList; + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java index 305c4122203..f3e327c527f 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java @@ -48,13 +48,12 @@ public class SimulatePipelineTransportAction extends HandledTransportAction listener) { Map source = XContentHelper.convertToMap(request.getSource(), false).v2(); - ParsedSimulateRequest simulateRequest; - ParsedSimulateRequest.Parser parser = new ParsedSimulateRequest.Parser(); + SimulatePipelineRequest.Parsed simulateRequest; try { if (request.getId() != null) { - simulateRequest = parser.parseWithPipelineId(request.getId(), source, request.isVerbose(), pipelineStore); + simulateRequest = SimulatePipelineRequest.parseWithPipelineId(request.getId(), source, request.isVerbose(), pipelineStore); } else { - simulateRequest = parser.parse(source, request.isVerbose(), pipelineStore); + simulateRequest = SimulatePipelineRequest.parse(source, request.isVerbose(), pipelineStore); } } catch (IOException e) { listener.onFailure(e); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java index bc6de44fd5c..42cedf520ad 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java @@ -62,7 +62,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { } public void testExecuteVerboseItem() throws Exception { - SimulateDocumentResult actualItemResponse = executionService.executeVerboseItem(pipeline, ingestDocument); + SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); verify(processor, times(2)).execute(ingestDocument); assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; @@ -78,7 +78,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { } public void testExecuteItem() throws Exception { - SimulateDocumentResult actualItemResponse = executionService.executeItem(pipeline, ingestDocument); + SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, false); verify(processor, times(2)).execute(ingestDocument); assertThat(actualItemResponse, instanceOf(SimulateDocumentSimpleResult.class)); SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) actualItemResponse; @@ -89,7 +89,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { public void testExecuteVerboseItemWithFailure() throws Exception { Exception e = new RuntimeException("processor failed"); doThrow(e).doNothing().when(processor).execute(ingestDocument); - SimulateDocumentResult actualItemResponse = executionService.executeVerboseItem(pipeline, ingestDocument); + SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); verify(processor, times(2)).execute(ingestDocument); assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; @@ -110,7 +110,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { public void testExecuteItemWithFailure() throws Exception { Exception e = new RuntimeException("processor failed"); doThrow(e).when(processor).execute(ingestDocument); - SimulateDocumentResult actualItemResponse = executionService.executeItem(pipeline, ingestDocument); + SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, false); verify(processor, times(1)).execute(ingestDocument); assertThat(actualItemResponse, instanceOf(SimulateDocumentSimpleResult.class)); SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) actualItemResponse; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java similarity index 90% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java index 29c4faa17f7..e15f2f92915 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/ParsedSimulateRequestParserTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java @@ -38,17 +38,17 @@ import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class ParsedSimulateRequestParserTests extends ESTestCase { +public class SimulatePipelineRequestParsingTests extends ESTestCase { private PipelineStore store; @Before public void init() throws IOException { - Pipeline pipeline = new Pipeline(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID, null, Collections.singletonList(mock(Processor.class))); + Pipeline pipeline = new Pipeline(SimulatePipelineRequest.SIMULATED_PIPELINE_ID, null, Collections.singletonList(mock(Processor.class))); Map processorRegistry = new HashMap<>(); processorRegistry.put("mock_processor", mock(Processor.Factory.class)); store = mock(PipelineStore.class); - when(store.get(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID)).thenReturn(pipeline); + when(store.get(SimulatePipelineRequest.SIMULATED_PIPELINE_ID)).thenReturn(pipeline); when(store.getProcessorFactoryRegistry()).thenReturn(processorRegistry); } @@ -79,7 +79,7 @@ public class ParsedSimulateRequestParserTests extends ESTestCase { expectedDocs.add(expectedDoc); } - ParsedSimulateRequest actualRequest = new ParsedSimulateRequest.Parser().parseWithPipelineId(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID, requestContent, false, store); + SimulatePipelineRequest.Parsed actualRequest = SimulatePipelineRequest.parseWithPipelineId(SimulatePipelineRequest.SIMULATED_PIPELINE_ID, requestContent, false, store); assertThat(actualRequest.isVerbose(), equalTo(false)); assertThat(actualRequest.getDocuments().size(), equalTo(numDocs)); Iterator> expectedDocsIterator = expectedDocs.iterator(); @@ -91,7 +91,7 @@ public class ParsedSimulateRequestParserTests extends ESTestCase { assertThat(ingestDocument.getMetadata(ID), equalTo(expectedDocument.get(Fields.ID))); } - assertThat(actualRequest.getPipeline().getId(), equalTo(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID)); + assertThat(actualRequest.getPipeline().getId(), equalTo(SimulatePipelineRequest.SIMULATED_PIPELINE_ID)); assertThat(actualRequest.getPipeline().getDescription(), nullValue()); assertThat(actualRequest.getPipeline().getProcessors().size(), equalTo(1)); } @@ -132,7 +132,7 @@ public class ParsedSimulateRequestParserTests extends ESTestCase { pipelineConfig.put("processors", processors); requestContent.put(Fields.PIPELINE, pipelineConfig); - ParsedSimulateRequest actualRequest = new ParsedSimulateRequest.Parser().parse(requestContent, false, store); + SimulatePipelineRequest.Parsed actualRequest = SimulatePipelineRequest.parse(requestContent, false, store); assertThat(actualRequest.isVerbose(), equalTo(false)); assertThat(actualRequest.getDocuments().size(), equalTo(numDocs)); Iterator> expectedDocsIterator = expectedDocs.iterator(); @@ -144,7 +144,7 @@ public class ParsedSimulateRequestParserTests extends ESTestCase { assertThat(ingestDocument.getMetadata(ID), equalTo(expectedDocument.get(Fields.ID))); } - assertThat(actualRequest.getPipeline().getId(), equalTo(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID)); + assertThat(actualRequest.getPipeline().getId(), equalTo(SimulatePipelineRequest.SIMULATED_PIPELINE_ID)); assertThat(actualRequest.getPipeline().getDescription(), nullValue()); assertThat(actualRequest.getPipeline().getProcessors().size(), equalTo(numProcessors)); } From afc9069c99f81334c6f68c67b5d1b838ce686963 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 25 Nov 2015 18:27:04 +0100 Subject: [PATCH 085/347] * Inlined PipelineStoreClient class into the PipelineStore class * Moved PipelineReference to a top level class and named it PipelineDefinition * Pulled some logic from the crud transport classes to the PipelineStore * Use IOUtils#close(...) where appropriate --- .../common/SearchScrollIterator.java | 93 +++++++++++ .../common/SearchScrollIteratorTests.java | 30 ++-- .../plugin/ingest/IngestModule.java | 1 - .../plugin/ingest/IngestPlugin.java | 2 +- .../plugin/ingest/PipelineDefinition.java | 115 ++++++++++++++ .../plugin/ingest/PipelineStore.java | 146 +++++++++++------- .../plugin/ingest/PipelineStoreClient.java | 135 ---------------- .../delete/DeletePipelineTransportAction.java | 13 +- .../transport/get/GetPipelineResponse.java | 37 ++--- .../get/GetPipelineTransportAction.java | 11 +- .../put/PutPipelineTransportAction.java | 21 +-- .../elasticsearch/ingest/IngestClientIT.java | 8 +- .../plugin/ingest/PipelineStoreTests.java | 106 ++++++++++--- 13 files changed, 420 insertions(+), 298 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/common/SearchScrollIterator.java rename plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreClientTests.java => core/src/test/java/org/elasticsearch/common/SearchScrollIteratorTests.java (54%) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineDefinition.java delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStoreClient.java diff --git a/core/src/main/java/org/elasticsearch/common/SearchScrollIterator.java b/core/src/main/java/org/elasticsearch/common/SearchScrollIterator.java new file mode 100644 index 00000000000..18535d1626e --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/SearchScrollIterator.java @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common; + +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.search.SearchHit; + +import java.util.Collections; +import java.util.Iterator; + +/** + * An iterator that easily helps to consume all hits from a scroll search. + */ +public final class SearchScrollIterator implements Iterator { + + /** + * Creates an iterator that returns all matching hits of a scroll search via an iterator. + * The iterator will return all hits per scroll search and execute additional scroll searches + * to get more hits until all hits have been returned by the scroll search on the ES side. + */ + public static Iterable createIterator(Client client, TimeValue scrollTimeout, SearchRequest searchRequest) { + searchRequest.scroll(scrollTimeout); + SearchResponse searchResponse = client.search(searchRequest).actionGet(scrollTimeout); + if (searchResponse.getHits().getTotalHits() == 0) { + return Collections.emptyList(); + } else { + return () -> new SearchScrollIterator(client, scrollTimeout, searchResponse); + } + } + + private final Client client; + private final TimeValue scrollTimeout; + + private int currentIndex; + private SearchHit[] currentHits; + private SearchResponse searchResponse; + + private SearchScrollIterator(Client client, TimeValue scrollTimeout, SearchResponse searchResponse) { + this.client = client; + this.scrollTimeout = scrollTimeout; + this.searchResponse = searchResponse; + this.currentHits = searchResponse.getHits().getHits(); + } + + @Override + public boolean hasNext() { + if (currentIndex < currentHits.length) { + return true; + } else { + if (searchResponse == null) { + return false; + } + + SearchScrollRequest request = new SearchScrollRequest(searchResponse.getScrollId()); + request.scroll(scrollTimeout); + searchResponse = client.searchScroll(request).actionGet(scrollTimeout); + if (searchResponse.getHits().getHits().length == 0) { + searchResponse = null; + return false; + } else { + currentHits = searchResponse.getHits().getHits(); + currentIndex = 0; + return true; + } + } + } + + @Override + public SearchHit next() { + return currentHits[currentIndex++]; + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreClientTests.java b/core/src/test/java/org/elasticsearch/common/SearchScrollIteratorTests.java similarity index 54% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreClientTests.java rename to core/src/test/java/org/elasticsearch/common/SearchScrollIteratorTests.java index f670bdab7f9..886d9b94e84 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreClientTests.java +++ b/core/src/test/java/org/elasticsearch/common/SearchScrollIteratorTests.java @@ -17,33 +17,39 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.common; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESSingleNodeTestCase; import static org.hamcrest.Matchers.equalTo; -public class PipelineStoreClientTests extends ESSingleNodeTestCase { +// Not a real unit tests with mocks, but with a single node, because we mock the scroll +// search behaviour and it changes then this test will not catch this. +public class SearchScrollIteratorTests extends ESSingleNodeTestCase { - public void testReadAll() { - PipelineStoreClient reader = new PipelineStoreClient(Settings.EMPTY, node().injector()); - reader.start(); - - createIndex(PipelineStore.INDEX); - int numDocs = scaledRandomIntBetween(32, 128); + public void testSearchScrollIterator() { + createIndex("index"); + int numDocs = scaledRandomIntBetween(0, 128); for (int i = 0; i < numDocs; i++) { - client().prepareIndex(PipelineStore.INDEX, PipelineStore.TYPE, Integer.toString(i)) + client().prepareIndex("index", "type", Integer.toString(i)) .setSource("field", "value" + i) .get(); } client().admin().indices().prepareRefresh().get(); int i = 0; - for (SearchHit hit : reader.readAllPipelines()) { + SearchRequest searchRequest = new SearchRequest("index"); + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); + // randomize size, because that also controls how many actual searches will happen: + sourceBuilder.size(scaledRandomIntBetween(1, 10)); + searchRequest.source(sourceBuilder); + Iterable hits = SearchScrollIterator.createIterator(client(), TimeValue.timeValueSeconds(10), searchRequest); + for (SearchHit hit : hits) { assertThat(hit.getId(), equalTo(Integer.toString(i))); - assertThat(hit.getVersion(), equalTo(1l)); assertThat(hit.getSource().get("field"), equalTo("value" + i)); i++; } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index 5cb4e703ea0..dd30334e422 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -50,7 +50,6 @@ public class IngestModule extends AbstractModule { binder().bind(IngestRestFilter.class).asEagerSingleton(); binder().bind(PipelineExecutionService.class).asEagerSingleton(); binder().bind(PipelineStore.class).asEagerSingleton(); - binder().bind(PipelineStoreClient.class).asEagerSingleton(); binder().bind(SimulateExecutionService.class).asEagerSingleton(); addProcessor(GeoIpProcessor.TYPE, new GeoIpProcessor.Factory()); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 34411fd60e7..5405459b28a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -87,7 +87,7 @@ public class IngestPlugin extends Plugin { if (transportClient) { return Collections.emptyList(); } else { - return Arrays.asList(PipelineStore.class, PipelineStoreClient.class); + return Collections.singletonList(PipelineStore.class); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineDefinition.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineDefinition.java new file mode 100644 index 00000000000..d78274ce7f6 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineDefinition.java @@ -0,0 +1,115 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.ingest.Pipeline; + +import java.io.IOException; + +public class PipelineDefinition implements Writeable, ToXContent { + + private static final PipelineDefinition PROTOTYPE = new PipelineDefinition((String) null, -1, null); + + public static PipelineDefinition readPipelineDefinitionFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + private final String id; + private final long version; + private final BytesReference source; + + private final Pipeline pipeline; + + PipelineDefinition(Pipeline pipeline, long version, BytesReference source) { + this.id = pipeline.getId(); + this.version = version; + this.source = source; + this.pipeline = pipeline; + } + + PipelineDefinition(String id, long version, BytesReference source) { + this.id = id; + this.version = version; + this.source = source; + this.pipeline = null; + } + + public String getId() { + return id; + } + + public long getVersion() { + return version; + } + + public BytesReference getSource() { + return source; + } + + Pipeline getPipeline() { + return pipeline; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + PipelineDefinition holder = (PipelineDefinition) o; + return source.equals(holder.source); + } + + @Override + public int hashCode() { + return source.hashCode(); + } + + @Override + public PipelineDefinition readFrom(StreamInput in) throws IOException { + String id = in.readString(); + long version = in.readLong(); + BytesReference source = in.readBytesReference(); + return new PipelineDefinition(id, version, source); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + out.writeLong(version); + out.writeBytesReference(source); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(id); + XContentHelper.writeRawField("_source", source, builder, params); + builder.field("_version", version); + builder.endObject(); + return builder; + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index fae1457ad12..cbfbf269dd3 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -19,21 +19,39 @@ package org.elasticsearch.plugin.ingest; +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.common.SearchScrollIterator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequest; +import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequest; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -44,22 +62,25 @@ public class PipelineStore extends AbstractLifecycleComponent { public final static String INDEX = ".ingest"; public final static String TYPE = "pipeline"; + private final Injector injector; private final ThreadPool threadPool; + private final TimeValue scrollTimeout; private final ClusterService clusterService; private final TimeValue pipelineUpdateInterval; - private final PipelineStoreClient client; private final Pipeline.Factory factory = new Pipeline.Factory(); private final Map processorFactoryRegistry; - private volatile Map pipelines = new HashMap<>(); + private volatile Client client; + private volatile Map pipelines = new HashMap<>(); @Inject - public PipelineStore(Settings settings, ThreadPool threadPool, Environment environment, ClusterService clusterService, PipelineStoreClient client, Map processors) { + public PipelineStore(Settings settings, Injector injector, ThreadPool threadPool, Environment environment, ClusterService clusterService, Map processors) { super(settings); + this.injector = injector; this.threadPool = threadPool; this.clusterService = clusterService; + this.scrollTimeout = settings.getAsTime("ingest.pipeline.store.scroll.timeout", TimeValue.timeValueSeconds(30)); this.pipelineUpdateInterval = settings.getAsTime("ingest.pipeline.store.update.interval", TimeValue.timeValueSeconds(1)); - this.client = client; for (Processor.Factory factory : processors.values()) { factory.setConfigDirectory(environment.configFile()); } @@ -69,6 +90,7 @@ public class PipelineStore extends AbstractLifecycleComponent { @Override protected void doStart() { + client = injector.getInstance(Client.class); } @Override @@ -77,17 +99,43 @@ public class PipelineStore extends AbstractLifecycleComponent { @Override protected void doClose() { - for (Processor.Factory factory : processorFactoryRegistry.values()) { - try { - factory.close(); - } catch (IOException e) { - throw new RuntimeException(e); - } + try { + IOUtils.close(processorFactoryRegistry.values()); + } catch (IOException e) { + throw new RuntimeException(e); } } + public void delete(DeletePipelineRequest request, ActionListener listener) { + DeleteRequest deleteRequest = new DeleteRequest(request); + deleteRequest.index(PipelineStore.INDEX); + deleteRequest.type(PipelineStore.TYPE); + deleteRequest.id(request.id()); + deleteRequest.refresh(true); + client.delete(deleteRequest, listener); + } + + public void put(PutPipelineRequest request, ActionListener listener) { + // validates the pipeline and processor configuration: + Map pipelineConfig = XContentHelper.convertToMap(request.source(), false).v2(); + try { + constructPipeline(request.id(), pipelineConfig); + } catch (IOException e) { + listener.onFailure(e); + return; + } + + IndexRequest indexRequest = new IndexRequest(request); + indexRequest.index(PipelineStore.INDEX); + indexRequest.type(PipelineStore.TYPE); + indexRequest.id(request.id()); + indexRequest.source(request.source()); + indexRequest.refresh(true); + client.index(indexRequest, listener); + } + public Pipeline get(String id) { - PipelineReference ref = pipelines.get(id); + PipelineDefinition ref = pipelines.get(id); if (ref != null) { return ref.getPipeline(); } else { @@ -99,17 +147,17 @@ public class PipelineStore extends AbstractLifecycleComponent { return processorFactoryRegistry; } - public List getReference(String... ids) { - List result = new ArrayList<>(ids.length); + public List getReference(String... ids) { + List result = new ArrayList<>(ids.length); for (String id : ids) { if (Regex.isSimpleMatchPattern(id)) { - for (Map.Entry entry : pipelines.entrySet()) { + for (Map.Entry entry : pipelines.entrySet()) { if (Regex.simpleMatch(id, entry.getKey())) { result.add(entry.getValue()); } } } else { - PipelineReference reference = pipelines.get(id); + PipelineDefinition reference = pipelines.get(id); if (reference != null) { result.add(reference); } @@ -118,7 +166,7 @@ public class PipelineStore extends AbstractLifecycleComponent { return result; } - public Pipeline constructPipeline(String id, Map config) throws IOException { + Pipeline constructPipeline(String id, Map config) throws IOException { return factory.create(id, config, processorFactoryRegistry); } @@ -127,11 +175,11 @@ public class PipelineStore extends AbstractLifecycleComponent { // so for that reason the goal is to keep the update logic simple. int changed = 0; - Map newPipelines = new HashMap<>(pipelines); - for (SearchHit hit : client.readAllPipelines()) { + Map newPipelines = new HashMap<>(pipelines); + for (SearchHit hit : readAllPipelines()) { String pipelineId = hit.getId(); BytesReference pipelineSource = hit.getSourceRef(); - PipelineReference previous = newPipelines.get(pipelineId); + PipelineDefinition previous = newPipelines.get(pipelineId); if (previous != null) { if (previous.getSource().equals(pipelineSource)) { continue; @@ -140,12 +188,12 @@ public class PipelineStore extends AbstractLifecycleComponent { changed++; Pipeline pipeline = constructPipeline(hit.getId(), hit.sourceAsMap()); - newPipelines.put(pipelineId, new PipelineReference(pipeline, hit.getVersion(), pipelineSource)); + newPipelines.put(pipelineId, new PipelineDefinition(pipeline, hit.getVersion(), pipelineSource)); } int removed = 0; for (String existingPipelineId : pipelines.keySet()) { - if (!client.existPipeline(existingPipelineId)) { + if (!existPipeline(existingPipelineId)) { newPipelines.remove(existingPipelineId); removed++; } @@ -165,6 +213,23 @@ public class PipelineStore extends AbstractLifecycleComponent { } } + boolean existPipeline(String pipelineId) { + GetRequest request = new GetRequest(PipelineStore.INDEX, PipelineStore.TYPE, pipelineId); + GetResponse response = client.get(request).actionGet(); + return response.isExists(); + } + + Iterable readAllPipelines() { + // TODO: the search should be replaced with an ingest API when it is available + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); + sourceBuilder.version(true); + sourceBuilder.sort("_doc", SortOrder.ASC); + SearchRequest searchRequest = new SearchRequest(PipelineStore.INDEX); + searchRequest.source(sourceBuilder); + searchRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); + return SearchScrollIterator.createIterator(client, scrollTimeout, searchRequest); + } + class Updater implements Runnable { @Override @@ -191,43 +256,4 @@ public class PipelineStore extends AbstractLifecycleComponent { } } - public static class PipelineReference { - - private final Pipeline pipeline; - private final long version; - private final BytesReference source; - - PipelineReference(Pipeline pipeline, long version, BytesReference source) { - this.pipeline = pipeline; - this.version = version; - this.source = source; - } - - public Pipeline getPipeline() { - return pipeline; - } - - public long getVersion() { - return version; - } - - public BytesReference getSource() { - return source; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - PipelineReference holder = (PipelineReference) o; - return source.equals(holder.source); - } - - @Override - public int hashCode() { - return source.hashCode(); - } - } - } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStoreClient.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStoreClient.java deleted file mode 100644 index c11969f840c..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStoreClient.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugin.ingest; - -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.Client; -import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.sort.SortOrder; - -import java.util.Collections; -import java.util.Iterator; - -public class PipelineStoreClient extends AbstractLifecycleComponent { - - private volatile Client client; - private final Injector injector; - private final TimeValue scrollTimeout; - - @Inject - public PipelineStoreClient(Settings settings, Injector injector) { - super(settings); - this.injector = injector; - this.scrollTimeout = settings.getAsTime("ingest.pipeline.store.scroll.timeout", TimeValue.timeValueSeconds(30)); - } - - @Override - protected void doStart() { - client = injector.getInstance(Client.class); - } - - @Override - protected void doStop() { - client.close(); - } - - @Override - protected void doClose() { - } - - public Iterable readAllPipelines() { - // TODO: the search should be replaced with an ingest API when it is available - SearchResponse searchResponse = client.prepareSearch(PipelineStore.INDEX) - .setVersion(true) - .setScroll(scrollTimeout) - .addSort("_doc", SortOrder.ASC) - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .get(); - - if (searchResponse.getHits().getTotalHits() == 0) { - return Collections.emptyList(); - } - logger.debug("reading [{}] pipeline documents", searchResponse.getHits().totalHits()); - return new Iterable() { - @Override - public Iterator iterator() { - return new SearchScrollIterator(searchResponse); - } - }; - } - - public boolean existPipeline(String pipelineId) { - GetResponse response = client.prepareGet(PipelineStore.INDEX, PipelineStore.TYPE, pipelineId).get(); - return response.isExists(); - } - - class SearchScrollIterator implements Iterator { - - private SearchResponse searchResponse; - - private int currentIndex; - private SearchHit[] currentHits; - - SearchScrollIterator(SearchResponse searchResponse) { - this.searchResponse = searchResponse; - this.currentHits = searchResponse.getHits().getHits(); - } - - @Override - public boolean hasNext() { - if (currentIndex < currentHits.length) { - return true; - } else { - if (searchResponse == null) { - return false; - } - - searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()) - .setScroll(scrollTimeout) - .get(); - if (searchResponse.getHits().getHits().length == 0) { - searchResponse = null; - return false; - } else { - currentHits = searchResponse.getHits().getHits(); - currentIndex = 0; - return true; - } - } - } - - @Override - public SearchHit next() { - SearchHit hit = currentHits[currentIndex++]; - if (logger.isTraceEnabled()) { - logger.trace("reading pipeline document [{}] with source [{}]", hit.getId(), hit.sourceAsString()); - } - return hit; - } - } - -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java index c8b60a0252f..dff4d4697b0 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java @@ -34,22 +34,17 @@ import org.elasticsearch.transport.TransportService; public class DeletePipelineTransportAction extends HandledTransportAction { - private final TransportDeleteAction deleteAction; + private final PipelineStore pipelineStore; @Inject - public DeletePipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportDeleteAction deleteAction) { + public DeletePipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PipelineStore pipelineStore) { super(settings, DeletePipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, DeletePipelineRequest::new); - this.deleteAction = deleteAction; + this.pipelineStore = pipelineStore; } @Override protected void doExecute(DeletePipelineRequest request, ActionListener listener) { - DeleteRequest deleteRequest = new DeleteRequest(request); - deleteRequest.index(PipelineStore.INDEX); - deleteRequest.type(PipelineStore.TYPE); - deleteRequest.id(request.id()); - deleteRequest.refresh(true); - deleteAction.execute(deleteRequest, new ActionListener() { + pipelineStore.delete(request, new ActionListener() { @Override public void onResponse(DeleteResponse deleteResponse) { listener.onResponse(new DeletePipelineResponse(deleteResponse.getId(), deleteResponse.isFound())); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java index 020c8004631..8d7bf9cc258 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java @@ -27,26 +27,27 @@ import org.elasticsearch.common.xcontent.StatusToXContent; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.plugin.ingest.PipelineDefinition; import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; public class GetPipelineResponse extends ActionResponse implements StatusToXContent { - private Map pipelines; - private Map versions; + private List pipelines; public GetPipelineResponse() { } - public GetPipelineResponse(Map pipelines, Map versions) { + public GetPipelineResponse(List pipelines) { this.pipelines = pipelines; - this.versions = versions; } - public Map pipelines() { + public List pipelines() { return pipelines; } @@ -54,14 +55,9 @@ public class GetPipelineResponse extends ActionResponse implements StatusToXCont public void readFrom(StreamInput in) throws IOException { super.readFrom(in); int size = in.readVInt(); - pipelines = new HashMap<>(size); + pipelines = new ArrayList<>(size); for (int i = 0; i < size; i++) { - pipelines.put(in.readString(), in.readBytesReference()); - } - size = in.readVInt(); - versions = new HashMap<>(size); - for (int i = 0; i < size; i++) { - versions.put(in.readString(), in.readVLong()); + pipelines.add(PipelineDefinition.readPipelineDefinitionFrom(in)); } } @@ -69,14 +65,8 @@ public class GetPipelineResponse extends ActionResponse implements StatusToXCont public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(pipelines.size()); - for (Map.Entry entry : pipelines.entrySet()) { - out.writeString(entry.getKey()); - out.writeBytesReference(entry.getValue()); - } - out.writeVInt(versions.size()); - for (Map.Entry entry : versions.entrySet()) { - out.writeString(entry.getKey()); - out.writeVLong(entry.getValue()); + for (PipelineDefinition pipeline : pipelines) { + pipeline.writeTo(out); } } @@ -91,11 +81,8 @@ public class GetPipelineResponse extends ActionResponse implements StatusToXCont @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - for (Map.Entry entry : pipelines.entrySet()) { - builder.startObject(entry.getKey()); - XContentHelper.writeRawField("_source", entry.getValue(), builder, params); - builder.field("_version", versions.get(entry.getKey())); - builder.endObject(); + for (PipelineDefinition definition : pipelines) { + definition.toXContent(builder, params); } return builder; } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java index 89ba97d81b3..ada112cade8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugin.ingest.PipelineDefinition; import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -46,13 +47,7 @@ public class GetPipelineTransportAction extends HandledTransportAction listener) { - List references = pipelineStore.getReference(request.ids()); - Map result = new HashMap<>(); - Map versions = new HashMap<>(); - for (PipelineStore.PipelineReference reference : references) { - result.put(reference.getPipeline().getId(), reference.getSource()); - versions.put(reference.getPipeline().getId(), reference.getVersion()); - } - listener.onResponse(new GetPipelineResponse(result, versions)); + List references = pipelineStore.getReference(request.ids()); + listener.onResponse(new GetPipelineResponse(references)); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java index b6c59ff8ed8..72e6391167f 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java @@ -38,34 +38,17 @@ import java.util.Map; public class PutPipelineTransportAction extends HandledTransportAction { - private final TransportIndexAction indexAction; private final PipelineStore pipelineStore; @Inject - public PutPipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportIndexAction indexAction, PipelineStore pipelineStore) { + public PutPipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PipelineStore pipelineStore) { super(settings, PutPipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, PutPipelineRequest::new); - this.indexAction = indexAction; this.pipelineStore = pipelineStore; } @Override protected void doExecute(PutPipelineRequest request, ActionListener listener) { - // validates the pipeline and processor configuration: - Map pipelineConfig = XContentHelper.convertToMap(request.source(), false).v2(); - try { - pipelineStore.constructPipeline(request.id(), pipelineConfig); - } catch (IOException e) { - listener.onFailure(e); - return; - } - - IndexRequest indexRequest = new IndexRequest(request); - indexRequest.index(PipelineStore.INDEX); - indexRequest.type(PipelineStore.TYPE); - indexRequest.id(request.id()); - indexRequest.source(request.source()); - indexRequest.refresh(true); - indexAction.execute(indexRequest, new ActionListener() { + pipelineStore.put(request, new ActionListener() { @Override public void onResponse(IndexResponse indexResponse) { PutPipelineResponse response = new PutPipelineResponse(); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 67b484488de..985f91bfb8e 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -89,7 +89,8 @@ public class IngestClientIT extends ESIntegTestCase { .setIds("_id") .get(); assertThat(response.isFound(), is(true)); - assertThat(response.pipelines().get("_id"), notNullValue()); + assertThat(response.pipelines().size(), equalTo(1)); + assertThat(response.pipelines().get(0).getId(), equalTo("_id")); } }); @@ -178,7 +179,8 @@ public class IngestClientIT extends ESIntegTestCase { .setIds("_id") .get(); assertThat(response.isFound(), is(true)); - assertThat(response.pipelines().get("_id"), notNullValue()); + assertThat(response.pipelines().size(), equalTo(1)); + assertThat(response.pipelines().get(0).getId(), equalTo("_id")); } }); @@ -232,7 +234,7 @@ public class IngestClientIT extends ESIntegTestCase { .setIds("_id") .get(); assertThat(response.isFound(), is(false)); - assertThat(response.pipelines().get("_id"), nullValue()); + assertThat(response.pipelines().size(), equalTo(0)); } }); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java index 559dd54487d..1dfc64f65b5 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java @@ -19,41 +19,59 @@ package org.elasticsearch.plugin.ingest; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.StringText; import org.elasticsearch.env.Environment; +import org.elasticsearch.index.get.GetResult; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.internal.InternalSearchHit; +import org.elasticsearch.search.internal.InternalSearchHits; +import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; +import org.mockito.ArgumentMatcher; +import org.mockito.Matchers; import java.util.ArrayList; import java.util.Collections; -import java.util.Comparator; import java.util.List; +import java.util.Objects; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.*; -import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import static org.mockito.Matchers.any; public class PipelineStoreTests extends ESTestCase { - private PipelineStore store; private ThreadPool threadPool; - private PipelineStoreClient client; + private PipelineStore store; + private Client client; @Before public void init() { threadPool = new ThreadPool("test"); + client = mock(Client.class); + Injector injector = mock(Injector.class); + when(injector.getInstance(Client.class)).thenReturn(client); + ClusterService clusterService = mock(ClusterService.class); - client = mock(PipelineStoreClient.class); + when(client.searchScroll(any())).thenReturn(expectedSearchReponse(Collections.emptyList())); Environment environment = mock(Environment.class); - store = new PipelineStore(Settings.EMPTY, threadPool, environment, clusterService, client, Collections.emptyMap()); + store = new PipelineStore(Settings.EMPTY, injector, threadPool, environment, clusterService, Collections.emptyMap()); store.start(); } @@ -63,22 +81,21 @@ public class PipelineStoreTests extends ESTestCase { threadPool.shutdown(); } - public void testUpdatePipeline() throws Exception { List hits = new ArrayList<>(); hits.add(new InternalSearchHit(0, "1", new StringText("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) ); - when(client.readAllPipelines()).thenReturn(hits); - when(client.existPipeline("1")).thenReturn(true); + when(client.search(any())).thenReturn(expectedSearchReponse(hits)); + when(client.get(any())).thenReturn(expectedGetResponse(true)); assertThat(store.get("1"), nullValue()); store.updatePipelines(); assertThat(store.get("1").getId(), equalTo("1")); assertThat(store.get("1").getDescription(), equalTo("_description1")); - when(client.existPipeline("2")).thenReturn(true); + when(client.get(any())).thenReturn(expectedGetResponse(true)); hits.add(new InternalSearchHit(0, "2", new StringText("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description2\"}")) ); @@ -89,7 +106,7 @@ public class PipelineStoreTests extends ESTestCase { assertThat(store.get("2").getDescription(), equalTo("_description2")); hits.remove(1); - when(client.existPipeline("2")).thenReturn(false); + when(client.get(eqGetRequest(PipelineStore.INDEX, PipelineStore.TYPE, "2"))).thenReturn(expectedGetResponse(false)); store.updatePipelines(); assertThat(store.get("1").getId(), equalTo("1")); assertThat(store.get("1").getDescription(), equalTo("_description1")); @@ -101,8 +118,8 @@ public class PipelineStoreTests extends ESTestCase { hits.add(new InternalSearchHit(0, "1", new StringText("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) ); - when(client.readAllPipelines()).thenReturn(hits); - when(client.existPipeline(anyString())).thenReturn(true); + when(client.search(any())).thenReturn(expectedSearchReponse(hits)); + when(client.get(any())).thenReturn(expectedGetResponse(true)); assertThat(store.get("1"), nullValue()); store.startUpdateWorker(); @@ -131,20 +148,17 @@ public class PipelineStoreTests extends ESTestCase { hits.add(new InternalSearchHit(0, "foo", new StringText("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); hits.add(new InternalSearchHit(0, "bar", new StringText("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); hits.add(new InternalSearchHit(0, "foobar", new StringText("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); - when(client.readAllPipelines()).thenReturn(hits); + when(client.search(any())).thenReturn(expectedSearchReponse(hits)); store.updatePipelines(); - List result = store.getReference("foo"); + List result = store.getReference("foo"); assertThat(result.size(), equalTo(1)); assertThat(result.get(0).getPipeline().getId(), equalTo("foo")); result = store.getReference("foo*"); // to make sure the order is consistent in the test: - Collections.sort(result, new Comparator() { - @Override - public int compare(PipelineStore.PipelineReference first, PipelineStore.PipelineReference second) { - return first.getPipeline().getId().compareTo(second.getPipeline().getId()); - } + result.sort((first, second) -> { + return first.getPipeline().getId().compareTo(second.getPipeline().getId()); }); assertThat(result.size(), equalTo(2)); assertThat(result.get(0).getPipeline().getId(), equalTo("foo")); @@ -156,11 +170,8 @@ public class PipelineStoreTests extends ESTestCase { result = store.getReference("*"); // to make sure the order is consistent in the test: - Collections.sort(result, new Comparator() { - @Override - public int compare(PipelineStore.PipelineReference first, PipelineStore.PipelineReference second) { - return first.getPipeline().getId().compareTo(second.getPipeline().getId()); - } + result.sort((first, second) -> { + return first.getPipeline().getId().compareTo(second.getPipeline().getId()); }); assertThat(result.size(), equalTo(3)); assertThat(result.get(0).getPipeline().getId(), equalTo("bar")); @@ -173,4 +184,49 @@ public class PipelineStoreTests extends ESTestCase { assertThat(result.get(1).getPipeline().getId(), equalTo("bar")); } + ActionFuture expectedSearchReponse(List hits) { + return new PlainActionFuture() { + + @Override + public SearchResponse get(long timeout, TimeUnit unit) { + InternalSearchHits hits1 = new InternalSearchHits(hits.toArray(new InternalSearchHit[0]), hits.size(), 1f); + return new SearchResponse(new InternalSearchResponse(hits1, null, null, false, null), "_scrollId", 1, 1, 1, null); + } + }; + } + + ActionFuture expectedGetResponse(boolean exists) { + return new PlainActionFuture() { + @Override + public GetResponse get() throws InterruptedException, ExecutionException { + return new GetResponse(new GetResult("_index", "_type", "_id", 1, exists, null, null)); + } + }; + } + + GetRequest eqGetRequest(String index, String type, String id) { + return Matchers.argThat(new GetRequestMatcher(index, type, id)); + } + + static class GetRequestMatcher extends ArgumentMatcher { + + private final String index; + private final String type; + private final String id; + + public GetRequestMatcher(String index, String type, String id) { + this.index = index; + this.type = type; + this.id = id; + } + + @Override + public boolean matches(Object o) { + GetRequest getRequest = (GetRequest) o; + return Objects.equals(getRequest.index(), index) && + Objects.equals(getRequest.type(), type) && + Objects.equals(getRequest.id(), id); + } + } + } From 9d1fa0d6da58688def216a6ee6b03b3757b0ab6a Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 24 Nov 2015 23:59:44 +0100 Subject: [PATCH 086/347] ingest: Add `meta` processor that allows to modify the metadata attributes of document being processed --- docs/plugins/ingest.asciidoc | 49 ++++++ .../elasticsearch/ingest/IngestDocument.java | 60 ++++++- .../ingest/processor/Processor.java | 5 +- .../processor/meta/MetaDataProcessor.java | 71 ++++++++ .../plugin/ingest/IngestModule.java | 2 + .../ingest/PipelineExecutionService.java | 48 ++++-- .../ingest/transport/IngestActionFilter.java | 15 +- .../transport/WriteableIngestDocument.java | 24 ++- .../simulate/SimulatePipelineRequest.java | 14 +- .../ingest/IngestDocumentTests.java | 28 +-- .../meta/MetaDataProcessorFactoryTests.java | 65 +++++++ .../meta/MetaDataProcessorTests.java | 33 ++++ .../ingest/PipelineExecutionServiceTests.java | 159 +++++++++++++----- .../transport/IngestActionFilterTests.java | 28 ++- .../test/ingest/70_meta_processor.yaml | 45 +++++ 15 files changed, 538 insertions(+), 108 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_meta_processor.yaml diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 3532204fee2..9c1877f2a49 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -396,6 +396,55 @@ An example that adds the parsed date to the `timestamp` field based on the `init } -------------------------------------------------- +==== Meta processor + +The `meta` processor allows to modify metadata properties of a document being processed. + +The following example changes the index of a document to `alternative_index` instead of indexing it into an index +that was specified in the index or bulk request: + +[source,js] +-------------------------------------------------- +{ + "description" : "...", + "processors" : [ + { + "meta" : { + "_index" : "alternative_index" + } + } + ] +} +-------------------------------------------------- + +The following metadata attributes can be modified in this processor: `_index`, `_type`, `_id`, `_routing`, `_parent`, +`_timestamp` and `_ttl`. All these metadata attributes can be specified in the body of the `meta` processor. + +Also the metadata settings in this processor are templatable which allows metadata field values to be replaced with +field values in the source of the document being indexed. The mustache template language is used and anything between +`{{` and `}}` can contain a template and point to any field in the source of the document. + +The following example documents being processed end up being indexed into an index based on the resolved city name by +the `geoip` processor. (for example `city-amsterdam`) + +[source,js] +-------------------------------------------------- +{ + "description" : "...", + "processors" : [ + { + "geoip" : { + "source" : "ip" + } + }, + { + "meta" : { + "_index" : "city-{{geoip.city_name}}" + } + } + ] +} +-------------------------------------------------- === Put pipeline API diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 97274d4cffb..bbb47aed8b1 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -31,13 +31,29 @@ public final class IngestDocument { private final Map metaData; private final Map source; - private boolean modified = false; + private boolean sourceModified = false; public IngestDocument(String index, String type, String id, Map source) { + this(index, type, id, null, null, null, null, source); + } + + public IngestDocument(String index, String type, String id, String routing, String parent, String timestamp, String ttl, Map source) { this.metaData = new HashMap<>(); this.metaData.put(MetaData.INDEX.getFieldName(), index); this.metaData.put(MetaData.TYPE.getFieldName(), type); this.metaData.put(MetaData.ID.getFieldName(), id); + if (routing != null) { + this.metaData.put(MetaData.ROUTING.getFieldName(), routing); + } + if (parent != null) { + this.metaData.put(MetaData.PARENT.getFieldName(), parent); + } + if (timestamp != null) { + this.metaData.put(MetaData.TIMESTAMP.getFieldName(), timestamp); + } + if (ttl != null) { + this.metaData.put(MetaData.TTL.getFieldName(), ttl); + } this.source = source; } @@ -109,7 +125,7 @@ public final class IngestDocument { if (parent != null) { String leafKey = pathElements[pathElements.length - 1]; if (parent.containsKey(leafKey)) { - modified = true; + sourceModified = true; parent.remove(leafKey); } } @@ -166,13 +182,17 @@ public final class IngestDocument { String leafKey = pathElements[pathElements.length - 1]; inner.put(leafKey, value); - modified = true; + sourceModified = true; } public String getMetadata(MetaData metaData) { return this.metaData.get(metaData.getFieldName()); } + public void setMetaData(MetaData metaData, String value) { + this.metaData.put(metaData.getFieldName(), value); + } + /** * Returns the document. Should be used only for reading. Any change made to the returned map will * not be reflected to the modified flag. Modify the document instead using {@link #setFieldValue(String, Object)} @@ -182,8 +202,8 @@ public final class IngestDocument { return source; } - public boolean isModified() { - return modified; + public boolean isSourceModified() { + return sourceModified; } @Override @@ -203,6 +223,14 @@ public final class IngestDocument { return Objects.hash(metaData, source); } + @Override + public String toString() { + return "IngestDocument{" + + "metaData=" + metaData + + ", source=" + source + + '}'; + } + public enum MetaData { INDEX("_index"), @@ -222,6 +250,28 @@ public final class IngestDocument { public String getFieldName() { return fieldName; } + + public static MetaData fromString(String value) { + switch (value) { + case "_index": + return INDEX; + case "_type": + return TYPE; + case "_id": + return ID; + case "_routing": + return ROUTING; + case "_parent": + return PARENT; + case "_timestamp": + return TIMESTAMP; + case "_ttl": + return TTL; + default: + throw new IllegalArgumentException("no valid metadata field name [" + value + "]"); + } + } + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java index 6e7d276876c..67a1cad45a7 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java @@ -49,7 +49,10 @@ public interface Processor { interface Factory

extends Closeable { /** - * Creates a processor based on the specified map of maps config + * Creates a processor based on the specified map of maps config. + * + * Implementations are responsible for removing the used keys, so that after creating a pipeline ingest can + * verify if all configurations settings have been used. */ P create(Map config) throws IOException; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java new file mode 100644 index 00000000000..e2b3b31498d --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java @@ -0,0 +1,71 @@ +package org.elasticsearch.ingest.processor.meta; + +import com.github.mustachejava.DefaultMustacheFactory; +import com.github.mustachejava.Mustache; +import com.github.mustachejava.MustacheFactory; +import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.IngestDocument.MetaData; +import org.elasticsearch.ingest.processor.Processor; + +import java.io.IOException; +import java.io.StringWriter; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +public final class MetaDataProcessor implements Processor { + + public final static String TYPE = "meta"; + + private final Map templates; + + public MetaDataProcessor(Map templates) { + this.templates = templates; + } + + @Override + public void execute(IngestDocument ingestDocument) { + Map model = ingestDocument.getSource(); + for (Map.Entry entry : templates.entrySet()) { + StringWriter writer = new StringWriter(); + entry.getValue().execute(writer, model); + ingestDocument.setMetaData(entry.getKey(), writer.toString()); + } + } + + @Override + public String getType() { + return TYPE; + } + + Map getTemplates() { + return templates; + } + + public final static class Factory implements Processor.Factory { + + private final MustacheFactory mustacheFactory = new DefaultMustacheFactory(); + + @Override + public MetaDataProcessor create(Map config) throws IOException { + Map templates = new HashMap<>(); + Iterator> iterator = config.entrySet().iterator(); + while (iterator.hasNext()) { + Map.Entry entry = iterator.next(); + MetaData metaData = MetaData.fromString(entry.getKey()); + Mustache mustache = mustacheFactory.compile(new FastStringReader(entry.getValue().toString()), ""); + templates.put(metaData, mustache); + iterator.remove(); + } + + if (templates.isEmpty()) { + throw new IllegalArgumentException("no meta fields specified"); + } + + return new MetaDataProcessor(Collections.unmodifiableMap(templates)); + } + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index dd30334e422..75a962ba272 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -35,6 +35,7 @@ import org.elasticsearch.ingest.processor.rename.RenameProcessor; import org.elasticsearch.ingest.processor.split.SplitProcessor; import org.elasticsearch.ingest.processor.trim.TrimProcessor; import org.elasticsearch.ingest.processor.uppercase.UppercaseProcessor; +import org.elasticsearch.ingest.processor.meta.MetaDataProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; import org.elasticsearch.plugin.ingest.transport.simulate.SimulateExecutionService; @@ -65,6 +66,7 @@ public class IngestModule extends AbstractModule { addProcessor(TrimProcessor.TYPE, new TrimProcessor.Factory()); addProcessor(ConvertProcessor.TYPE, new ConvertProcessor.Factory()); addProcessor(GsubProcessor.TYPE, new GsubProcessor.Factory()); + addProcessor(MetaDataProcessor.TYPE, new MetaDataProcessor.Factory()); MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, Processor.Factory.class); for (Map.Entry entry : processors.entrySet()) { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index 4a963beecc8..e66697f02d6 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -19,14 +19,17 @@ package org.elasticsearch.plugin.ingest; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.logging.support.LoggerMessageFormat; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.threadpool.ThreadPool; +import java.util.Map; + public class PipelineExecutionService { static final String THREAD_POOL_NAME = IngestPlugin.NAME; @@ -40,22 +43,47 @@ public class PipelineExecutionService { this.threadPool = threadPool; } - public void execute(IngestDocument ingestDocument, String pipelineId, Listener listener) { + public void execute(IndexRequest indexRequest, String pipelineId, Listener listener) { Pipeline pipeline = store.get(pipelineId); if (pipeline == null) { listener.failed(new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist")); return; } - threadPool.executor(THREAD_POOL_NAME).execute(new Runnable() { - @Override - public void run() { - try { - pipeline.execute(ingestDocument); - listener.executed(ingestDocument); - } catch (Throwable e) { - listener.failed(e); + threadPool.executor(THREAD_POOL_NAME).execute(() -> { + String index = indexRequest.index(); + String type = indexRequest.type(); + String id = indexRequest.id(); + String routing = indexRequest.routing(); + String parent = indexRequest.parent(); + String timestamp = indexRequest.timestamp(); + String ttl = null; + if (indexRequest.ttl() != -1) { + // At this point we don't know the original string ttl that was specified, + // so we covert the ttl which is a long to a string using 'ms' as unit: + ttl = TimeValue.timeValueMillis(indexRequest.ttl()).toString(); + } + Map sourceAsMap = indexRequest.sourceAsMap(); + IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, parent, timestamp, ttl, sourceAsMap); + try { + pipeline.execute(ingestDocument); + if (ingestDocument.isSourceModified()) { + indexRequest.source(ingestDocument.getSource()); } + indexRequest.index(ingestDocument.getMetadata(IngestDocument.MetaData.INDEX)); + indexRequest.type(ingestDocument.getMetadata(IngestDocument.MetaData.TYPE)); + indexRequest.id(ingestDocument.getMetadata(IngestDocument.MetaData.ID)); + indexRequest.routing(ingestDocument.getMetadata(IngestDocument.MetaData.ROUTING)); + indexRequest.parent(ingestDocument.getMetadata(IngestDocument.MetaData.PARENT)); + indexRequest.timestamp(ingestDocument.getMetadata(IngestDocument.MetaData.TIMESTAMP)); + String ttlStr = ingestDocument.getMetadata(IngestDocument.MetaData.TTL); + if (ttlStr != null) { + TimeValue timeValue = TimeValue.parseTimeValue(ttlStr, null, "ttl"); + indexRequest.ttl(timeValue.millis()); + } + listener.executed(ingestDocument); + } catch (Throwable e) { + listener.failed(e); } }); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index a80d10a18e4..910f26341e7 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -84,15 +84,9 @@ public final class IngestActionFilter extends AbstractComponent implements Actio chain.proceed(action, indexRequest, listener); return; } - - Map sourceAsMap = indexRequest.sourceAsMap(); - IngestDocument ingestDocument = new IngestDocument(indexRequest.index(), indexRequest.type(), indexRequest.id(), sourceAsMap); - executionService.execute(ingestDocument, pipelineId, new PipelineExecutionService.Listener() { + executionService.execute(indexRequest, pipelineId, new PipelineExecutionService.Listener() { @Override public void executed(IngestDocument ingestDocument) { - if (ingestDocument.isModified()) { - indexRequest.source(ingestDocument.getSource()); - } indexRequest.putHeader(IngestPlugin.PIPELINE_ALREADY_PROCESSED, true); chain.proceed(action, indexRequest, listener); } @@ -127,14 +121,9 @@ public final class IngestActionFilter extends AbstractComponent implements Actio } IndexRequest indexRequest = (IndexRequest) actionRequest; - Map sourceAsMap = indexRequest.sourceAsMap(); - IngestDocument ingestDocument = new IngestDocument(indexRequest.index(), indexRequest.type(), indexRequest.id(), sourceAsMap); - executionService.execute(ingestDocument, pipelineId, new PipelineExecutionService.Listener() { + executionService.execute(indexRequest, pipelineId, new PipelineExecutionService.Listener() { @Override public void executed(IngestDocument ingestDocument) { - if (ingestDocument.isModified()) { - indexRequest.source(ingestDocument.getSource()); - } processBulkIndexRequest(bulkRequestModifier, pipelineId, action, chain, listener); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocument.java index 4b65a16596d..3a75218857f 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocument.java @@ -31,9 +31,7 @@ import java.io.IOException; import java.util.Map; import java.util.Objects; -import static org.elasticsearch.ingest.IngestDocument.MetaData.ID; -import static org.elasticsearch.ingest.IngestDocument.MetaData.INDEX; -import static org.elasticsearch.ingest.IngestDocument.MetaData.TYPE; +import static org.elasticsearch.ingest.IngestDocument.MetaData.*; public class WriteableIngestDocument implements Writeable, ToXContent { @@ -58,8 +56,12 @@ public class WriteableIngestDocument implements Writeable doc = in.readMap(); - return new WriteableIngestDocument(new IngestDocument(index, type, id, doc)); + return new WriteableIngestDocument(new IngestDocument(index, type, id, routing, parent, timestamp, ttl, doc)); } @Override @@ -67,16 +69,24 @@ public class WriteableIngestDocument implements Writeable ingestDocumentList = new ArrayList<>(); for (Map dataMap : docs) { Map document = ConfigurationUtils.readMap(dataMap, Fields.SOURCE); - IngestDocument ingestDocument = new IngestDocument(ConfigurationUtils.readStringProperty(dataMap, Fields.INDEX), - ConfigurationUtils.readStringProperty(dataMap, Fields.TYPE), - ConfigurationUtils.readStringProperty(dataMap, Fields.ID), + IngestDocument ingestDocument = new IngestDocument(ConfigurationUtils.readStringProperty(dataMap, MetaData.INDEX.getFieldName()), + ConfigurationUtils.readStringProperty(dataMap, MetaData.TYPE.getFieldName()), + ConfigurationUtils.readStringProperty(dataMap, MetaData.ID.getFieldName()), + ConfigurationUtils.readOptionalStringProperty(dataMap, MetaData.ROUTING.getFieldName()), + ConfigurationUtils.readOptionalStringProperty(dataMap, MetaData.PARENT.getFieldName()), + ConfigurationUtils.readOptionalStringProperty(dataMap, MetaData.TIMESTAMP.getFieldName()), + ConfigurationUtils.readOptionalStringProperty(dataMap, MetaData.TTL.getFieldName()), document); ingestDocumentList.add(ingestDocument); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index 375e86f71c6..ad28fd663dc 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -116,14 +116,14 @@ public class IngestDocumentTests extends ESTestCase { public void testSimpleSetFieldValue() { ingestDocument.setFieldValue("new_field", "foo"); assertThat(ingestDocument.getSource().get("new_field"), equalTo("foo")); - assertThat(ingestDocument.isModified(), equalTo(true)); + assertThat(ingestDocument.isSourceModified(), equalTo(true)); } public void testSetFieldValueNullValue() { ingestDocument.setFieldValue("new_field", null); assertThat(ingestDocument.getSource().containsKey("new_field"), equalTo(true)); assertThat(ingestDocument.getSource().get("new_field"), nullValue()); - assertThat(ingestDocument.isModified(), equalTo(true)); + assertThat(ingestDocument.isSourceModified(), equalTo(true)); } @SuppressWarnings("unchecked") @@ -138,7 +138,7 @@ public class IngestDocumentTests extends ESTestCase { assertThat(c.get("d"), instanceOf(String.class)); String d = (String) c.get("d"); assertThat(d, equalTo("foo")); - assertThat(ingestDocument.isModified(), equalTo(true)); + assertThat(ingestDocument.isSourceModified(), equalTo(true)); } public void testSetFieldValueOnExistingField() { @@ -154,7 +154,7 @@ public class IngestDocumentTests extends ESTestCase { assertThat(innerMap.get("new"), instanceOf(String.class)); String value = (String) innerMap.get("new"); assertThat(value, equalTo("bar")); - assertThat(ingestDocument.isModified(), equalTo(true)); + assertThat(ingestDocument.isSourceModified(), equalTo(true)); } public void testSetFieldValueOnExistingParentTypeMismatch() { @@ -163,7 +163,7 @@ public class IngestDocumentTests extends ESTestCase { fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add field to parent [buzz] of type [java.lang.String], [java.util.Map] expected instead.")); - assertThat(ingestDocument.isModified(), equalTo(false)); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); } } @@ -173,7 +173,7 @@ public class IngestDocumentTests extends ESTestCase { fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add field to null parent, [java.util.Map] expected instead.")); - assertThat(ingestDocument.isModified(), equalTo(false)); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); } } @@ -183,7 +183,7 @@ public class IngestDocumentTests extends ESTestCase { fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add null or empty field")); - assertThat(ingestDocument.isModified(), equalTo(false)); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); } } @@ -193,13 +193,13 @@ public class IngestDocumentTests extends ESTestCase { fail("add field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("cannot add null or empty field")); - assertThat(ingestDocument.isModified(), equalTo(false)); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); } } public void testRemoveField() { ingestDocument.removeField("foo"); - assertThat(ingestDocument.isModified(), equalTo(true)); + assertThat(ingestDocument.isSourceModified(), equalTo(true)); assertThat(ingestDocument.getSource().size(), equalTo(2)); assertThat(ingestDocument.getSource().containsKey("foo"), equalTo(false)); } @@ -217,30 +217,30 @@ public class IngestDocumentTests extends ESTestCase { assertThat(map.size(), equalTo(0)); assertThat(ingestDocument.getSource().size(), equalTo(3)); assertThat(ingestDocument.getSource().containsKey("fizz"), equalTo(true)); - assertThat(ingestDocument.isModified(), equalTo(true)); + assertThat(ingestDocument.isSourceModified(), equalTo(true)); } public void testRemoveNonExistingField() { ingestDocument.removeField("does_not_exist"); - assertThat(ingestDocument.isModified(), equalTo(false)); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); assertThat(ingestDocument.getSource().size(), equalTo(3)); } public void testRemoveExistingParentTypeMismatch() { ingestDocument.removeField("foo.test"); - assertThat(ingestDocument.isModified(), equalTo(false)); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); assertThat(ingestDocument.getSource().size(), equalTo(3)); } public void testRemoveNullField() { ingestDocument.removeField(null); - assertThat(ingestDocument.isModified(), equalTo(false)); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); assertThat(ingestDocument.getSource().size(), equalTo(3)); } public void testRemoveEmptyField() { ingestDocument.removeField(""); - assertThat(ingestDocument.isModified(), equalTo(false)); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); assertThat(ingestDocument.getSource().size(), equalTo(3)); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorFactoryTests.java new file mode 100644 index 00000000000..ee4cb0228a8 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorFactoryTests.java @@ -0,0 +1,65 @@ +package org.elasticsearch.ingest.processor.meta; + +import com.github.mustachejava.DefaultMustacheFactory; +import com.github.mustachejava.Mustache; +import com.github.mustachejava.MustacheException; +import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.ingest.IngestDocument.MetaData; + +public class MetaDataProcessorFactoryTests extends ESTestCase { + + public void testCreate() throws Exception { + MetaDataProcessor.Factory factory = new MetaDataProcessor.Factory(); + Map config = new HashMap<>(); + for (MetaData metaData : MetaData.values()) { + config.put(metaData.getFieldName(), randomBoolean() ? "static text" : "{{expression}}"); + } + MetaDataProcessor processor = factory.create(config); + assertThat(processor.getTemplates().size(), Matchers.equalTo(7)); + assertThat(processor.getTemplates().get(MetaData.INDEX), Matchers.notNullValue()); + assertThat(processor.getTemplates().get(MetaData.TIMESTAMP), Matchers.notNullValue()); + assertThat(processor.getTemplates().get(MetaData.ID), Matchers.notNullValue()); + assertThat(processor.getTemplates().get(MetaData.ROUTING), Matchers.notNullValue()); + assertThat(processor.getTemplates().get(MetaData.PARENT), Matchers.notNullValue()); + assertThat(processor.getTemplates().get(MetaData.TIMESTAMP), Matchers.notNullValue()); + assertThat(processor.getTemplates().get(MetaData.TTL), Matchers.notNullValue()); + } + + public void testCreateIllegalMetaData() throws Exception { + MetaDataProcessor.Factory factory = new MetaDataProcessor.Factory(); + try { + factory.create(Collections.singletonMap("_field", "text {{expression}}")); + fail("exception should have been thrown"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), Matchers.equalTo("no valid metadata field name [_field]")); + } + } + + public void testCreateIllegalEmpty() throws Exception { + MetaDataProcessor.Factory factory = new MetaDataProcessor.Factory(); + try { + factory.create(Collections.emptyMap()); + fail("exception should have been thrown"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), Matchers.equalTo("no meta fields specified")); + } + } + + public void testIlegalMustacheExpression() throws Exception { + try { + new MetaDataProcessor.Factory().create(Collections.singletonMap("_index", "text {{var")); + fail("exception expected"); + } catch (MustacheException e) { + assertThat(e.getMessage(), Matchers.equalTo("Improperly closed variable in :1")); + } + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java new file mode 100644 index 00000000000..13d56017d60 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java @@ -0,0 +1,33 @@ +package org.elasticsearch.ingest.processor.meta; + +import com.github.mustachejava.DefaultMustacheFactory; +import com.github.mustachejava.Mustache; +import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.ingest.IngestDocument.*; + +public class MetaDataProcessorTests extends ESTestCase { + + public void testExecute() throws Exception { + Map templates = new HashMap<>(); + for (MetaData metaData : MetaData.values()) { + templates.put(metaData, new DefaultMustacheFactory().compile(new FastStringReader("some {{field}}"), "noname")); + } + + MetaDataProcessor processor = new MetaDataProcessor(templates); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", Collections.singletonMap("field", "value")); + processor.execute(ingestDocument); + + for (MetaData metaData : MetaData.values()) { + assertThat(ingestDocument.getMetadata(metaData), Matchers.equalTo("some value")); + } + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index 3f9ec3517e6..917d4b1815c 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -19,85 +19,168 @@ package org.elasticsearch.plugin.ingest; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.processor.meta.MetaDataProcessor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; +import org.mockito.ArgumentMatcher; +import org.mockito.Matchers; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; -import java.util.Arrays; -import java.util.Collections; +import java.util.*; +import java.util.concurrent.Executor; +import static org.hamcrest.Matchers.*; import static org.mockito.Matchers.any; import static org.mockito.Mockito.*; public class PipelineExecutionServiceTests extends ESTestCase { private PipelineStore store; - private ThreadPool threadPool; private PipelineExecutionService executionService; @Before public void setup() { store = mock(PipelineStore.class); - threadPool = new ThreadPool( - Settings.builder() - .put("name", "_name") - .put(PipelineExecutionService.additionalSettings(Settings.EMPTY)) - .build() - ); + ThreadPool threadPool = mock(ThreadPool.class); + when(threadPool.executor(anyString())).thenReturn(Runnable::run); executionService = new PipelineExecutionService(store, threadPool); } - @After - public void destroy() { - threadPool.shutdown(); - } - public void testExecute_pipelineDoesNotExist() { when(store.get("_id")).thenReturn(null); - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", Collections.emptyMap()); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); - executionService.execute(ingestDocument, "_id", listener); + executionService.execute(indexRequest, "_id", listener); verify(listener).failed(any(IllegalArgumentException.class)); - verify(listener, times(0)).executed(ingestDocument); + verify(listener, times(0)).executed(any()); } - public void testExecute_success() throws Exception { + public void testExecuteSuccess() throws Exception { Processor processor = mock(Processor.class); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(processor))); - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", Collections.emptyMap()); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); - executionService.execute(ingestDocument, "_id", listener); - assertBusy(new Runnable() { - @Override - public void run() { - verify(processor).execute(ingestDocument); - verify(listener).executed(ingestDocument); - verify(listener, times(0)).failed(any(Exception.class)); + executionService.execute(indexRequest, "_id", listener); + verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + verify(listener).executed(eqID("_index", "_type", "_id", Collections.emptyMap())); + verify(listener, times(0)).failed(any(Exception.class)); + } + + public void testExecutePropagateAllMetaDataUpdates() throws Exception { + Processor processor = mock(Processor.class); + doAnswer((InvocationOnMock invocationOnMock) -> { + IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; + for (IngestDocument.MetaData metaData : IngestDocument.MetaData.values()) { + if (metaData == IngestDocument.MetaData.TTL) { + ingestDocument.setMetaData(IngestDocument.MetaData.TTL, "5w"); + } else { + ingestDocument.setMetaData(metaData, "update" + metaData.getFieldName()); + } + } - }); + return null; + }).when(processor).execute(any()); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(processor))); + + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); + executionService.execute(indexRequest, "_id", listener); + verify(processor).execute(any()); + verify(listener).executed(any()); + verify(listener, times(0)).failed(any(Exception.class)); + + assertThat(indexRequest.index(), equalTo("update_index")); + assertThat(indexRequest.type(), equalTo("update_type")); + assertThat(indexRequest.id(), equalTo("update_id")); + assertThat(indexRequest.routing(), equalTo("update_routing")); + assertThat(indexRequest.parent(), equalTo("update_parent")); + assertThat(indexRequest.timestamp(), equalTo("update_timestamp")); + assertThat(indexRequest.ttl(), equalTo(3024000000l)); } public void testExecute_failure() throws Exception { Processor processor = mock(Processor.class); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(processor))); - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", Collections.emptyMap()); - doThrow(new RuntimeException()).when(processor).execute(ingestDocument); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); - executionService.execute(ingestDocument, "_id", listener); - assertBusy(new Runnable() { - @Override - public void run() { - verify(processor).execute(ingestDocument); - verify(listener, times(0)).executed(ingestDocument); - verify(listener).failed(any(RuntimeException.class)); - } - }); + executionService.execute(indexRequest, "_id", listener); + verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + verify(listener, times(0)).executed(eqID("_index", "_type", "_id", Collections.emptyMap())); + verify(listener).failed(any(RuntimeException.class)); + } + + public void testExecuteTTL() throws Exception { + // test with valid ttl + MetaDataProcessor.Factory metaProcessorFactory = new MetaDataProcessor.Factory(); + Map config = new HashMap<>(); + config.put("_ttl", "5d"); + MetaDataProcessor processor = metaProcessorFactory.create(config); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); + + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); + executionService.execute(indexRequest, "_id", listener); + + assertThat(indexRequest.ttl(), equalTo(TimeValue.parseTimeValue("5d", null, "ttl").millis())); + verify(listener, times(1)).executed(any()); + verify(listener, never()).failed(any()); + + // test with invalid ttl + metaProcessorFactory = new MetaDataProcessor.Factory(); + config = new HashMap<>(); + config.put("_ttl", "abc"); + processor = metaProcessorFactory.create(config); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); + + indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + listener = mock(PipelineExecutionService.Listener.class); + executionService.execute(indexRequest, "_id", listener); + + verify(listener, never()).executed(any()); + verify(listener, times(1)).failed(any(ElasticsearchParseException.class)); + + // test with provided ttl + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.emptyList())); + + indexRequest = new IndexRequest("_index", "_type", "_id") + .source(Collections.emptyMap()) + .ttl(1000l); + listener = mock(PipelineExecutionService.Listener.class); + executionService.execute(indexRequest, "_id", listener); + + assertThat(indexRequest.ttl(), equalTo(1000l)); + verify(listener, times(1)).executed(any()); + verify(listener, never()).failed(any(Throwable.class)); + } + + private IngestDocument eqID(String index, String type, String id, Map source) { + return Matchers.argThat(new IngestDocumentMatcher(index, type, id, source)); + } + + private class IngestDocumentMatcher extends ArgumentMatcher { + + private final IngestDocument ingestDocument; + + public IngestDocumentMatcher(String index, String type, String id, Map source) { + this.ingestDocument = new IngestDocument(index, type, id, source); + } + + @Override + public boolean matches(Object o) { + return Objects.equals(ingestDocument, o); + } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index 974367aec17..67ea1b32d3e 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -83,7 +83,7 @@ public class IngestActionFilterTests extends ESTestCase { filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); verifyZeroInteractions(actionFilterChain); } @@ -96,7 +96,7 @@ public class IngestActionFilterTests extends ESTestCase { filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); verifyZeroInteractions(actionFilterChain); } @@ -121,19 +121,15 @@ public class IngestActionFilterTests extends ESTestCase { ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); - Answer answer = new Answer() { - @Override - public Object answer(InvocationOnMock invocationOnMock) throws Throwable { - IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; - PipelineExecutionService.Listener listener = (PipelineExecutionService.Listener) invocationOnMock.getArguments()[2]; - listener.executed(ingestDocument); - return null; - } + Answer answer = invocationOnMock -> { + PipelineExecutionService.Listener listener = (PipelineExecutionService.Listener) invocationOnMock.getArguments()[2]; + listener.executed(new IngestDocument(indexRequest.index(), indexRequest.type(), indexRequest.id(), indexRequest.sourceAsMap())); + return null; }; - doAnswer(answer).when(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); verify(actionFilterChain).proceed("_action", indexRequest, actionListener); verifyZeroInteractions(actionListener); } @@ -154,10 +150,10 @@ public class IngestActionFilterTests extends ESTestCase { return null; } }; - doAnswer(answer).when(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); verify(actionListener).onFailure(exception); verifyZeroInteractions(actionFilterChain); } @@ -250,7 +246,7 @@ public class IngestActionFilterTests extends ESTestCase { listener.failed(exception); return null; }; - doAnswer(answer).when(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); CaptureActionListener actionListener = new CaptureActionListener(); RecordRequestAFC actionFilterChain = new RecordRequestAFC(); @@ -295,7 +291,7 @@ public class IngestActionFilterTests extends ESTestCase { listener.failed(exception); return null; }; - doAnswer(answer).when(executionService).execute(any(IngestDocument.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); ActionListener actionListener = mock(ActionListener.class); RecordRequestAFC actionFilterChain = new RecordRequestAFC(); diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_meta_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_meta_processor.yaml new file mode 100644 index 00000000000..be13146fb63 --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_meta_processor.yaml @@ -0,0 +1,45 @@ +--- +"Test meta processor": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "meta" : { + "_index" : "surprise" + } + } + ] + } + - match: { _id: "my_pipeline" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline_id: "my_pipeline" + body: {field: "value"} + + - do: + get: + index: surprise + type: test + id: 1 + - length: { _source: 1 } + - match: { _source.field: "value" } From 9aff8c6352704c2671cdc5e5e6925f5e5e0d25b5 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 26 Nov 2015 17:00:16 +0100 Subject: [PATCH 087/347] fix compile errors --- .../SimulatePipelineRequestParsingTests.java | 40 +++++++++---------- 1 file changed, 19 insertions(+), 21 deletions(-) diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java index e15f2f92915..1bedcc456af 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java @@ -29,9 +29,7 @@ import org.junit.Before; import java.io.IOException; import java.util.*; -import static org.elasticsearch.ingest.IngestDocument.MetaData.ID; -import static org.elasticsearch.ingest.IngestDocument.MetaData.INDEX; -import static org.elasticsearch.ingest.IngestDocument.MetaData.TYPE; +import static org.elasticsearch.ingest.IngestDocument.MetaData.*; import static org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequest.Fields; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; @@ -64,17 +62,17 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { String index = randomAsciiOfLengthBetween(1, 10); String type = randomAsciiOfLengthBetween(1, 10); String id = randomAsciiOfLengthBetween(1, 10); - doc.put(Fields.INDEX, index); - doc.put(Fields.TYPE, type); - doc.put(Fields.ID, id); + doc.put(INDEX.getFieldName(), index); + doc.put(TYPE.getFieldName(), type); + doc.put(ID.getFieldName(), id); String fieldName = randomAsciiOfLengthBetween(1, 10); String fieldValue = randomAsciiOfLengthBetween(1, 10); doc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue)); docs.add(doc); Map expectedDoc = new HashMap<>(); - expectedDoc.put(Fields.INDEX, index); - expectedDoc.put(Fields.TYPE, type); - expectedDoc.put(Fields.ID, id); + expectedDoc.put(INDEX.getFieldName(), index); + expectedDoc.put(TYPE.getFieldName(), type); + expectedDoc.put(ID.getFieldName(), id); expectedDoc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue)); expectedDocs.add(expectedDoc); } @@ -86,9 +84,9 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { for (IngestDocument ingestDocument : actualRequest.getDocuments()) { Map expectedDocument = expectedDocsIterator.next(); assertThat(ingestDocument.getSource(), equalTo(expectedDocument.get(Fields.SOURCE))); - assertThat(ingestDocument.getMetadata(INDEX), equalTo(expectedDocument.get(Fields.INDEX))); - assertThat(ingestDocument.getMetadata(TYPE), equalTo(expectedDocument.get(Fields.TYPE))); - assertThat(ingestDocument.getMetadata(ID), equalTo(expectedDocument.get(Fields.ID))); + assertThat(ingestDocument.getMetadata(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName()))); + assertThat(ingestDocument.getMetadata(TYPE), equalTo(expectedDocument.get(TYPE.getFieldName()))); + assertThat(ingestDocument.getMetadata(ID), equalTo(expectedDocument.get(ID.getFieldName()))); } assertThat(actualRequest.getPipeline().getId(), equalTo(SimulatePipelineRequest.SIMULATED_PIPELINE_ID)); @@ -108,17 +106,17 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { String index = randomAsciiOfLengthBetween(1, 10); String type = randomAsciiOfLengthBetween(1, 10); String id = randomAsciiOfLengthBetween(1, 10); - doc.put(Fields.INDEX, index); - doc.put(Fields.TYPE, type); - doc.put(Fields.ID, id); + doc.put(INDEX.getFieldName(), index); + doc.put(TYPE.getFieldName(), type); + doc.put(ID.getFieldName(), id); String fieldName = randomAsciiOfLengthBetween(1, 10); String fieldValue = randomAsciiOfLengthBetween(1, 10); doc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue)); docs.add(doc); Map expectedDoc = new HashMap<>(); - expectedDoc.put(Fields.INDEX, index); - expectedDoc.put(Fields.TYPE, type); - expectedDoc.put(Fields.ID, id); + expectedDoc.put(INDEX.getFieldName(), index); + expectedDoc.put(TYPE.getFieldName(), type); + expectedDoc.put(ID.getFieldName(), id); expectedDoc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue)); expectedDocs.add(expectedDoc); } @@ -139,9 +137,9 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { for (IngestDocument ingestDocument : actualRequest.getDocuments()) { Map expectedDocument = expectedDocsIterator.next(); assertThat(ingestDocument.getSource(), equalTo(expectedDocument.get(Fields.SOURCE))); - assertThat(ingestDocument.getMetadata(INDEX), equalTo(expectedDocument.get(Fields.INDEX))); - assertThat(ingestDocument.getMetadata(TYPE), equalTo(expectedDocument.get(Fields.TYPE))); - assertThat(ingestDocument.getMetadata(ID), equalTo(expectedDocument.get(Fields.ID))); + assertThat(ingestDocument.getMetadata(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName()))); + assertThat(ingestDocument.getMetadata(TYPE), equalTo(expectedDocument.get(TYPE.getFieldName()))); + assertThat(ingestDocument.getMetadata(ID), equalTo(expectedDocument.get(ID.getFieldName()))); } assertThat(actualRequest.getPipeline().getId(), equalTo(SimulatePipelineRequest.SIMULATED_PIPELINE_ID)); From 0fe1b4eab1a6240edeea7bc41f107cc849d90fb6 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 26 Nov 2015 18:12:15 +0100 Subject: [PATCH 088/347] PipelineStore no longer is a lifecycle component Client in PipelineStore gets provided via a guice provider Processor and Factory throw Exception instead of IOException Removed PipelineExecutionService.Listener with ActionListener --- .../org/elasticsearch/ingest/Pipeline.java | 4 +- .../ingest/processor/Processor.java | 4 +- .../plugin/ingest/IngestPlugin.java | 9 -- .../ingest/PipelineExecutionService.java | 17 ++-- .../plugin/ingest/PipelineStore.java | 89 ++++++++++--------- .../ingest/transport/IngestActionFilter.java | 12 +-- .../simulate/SimulatePipelineRequest.java | 2 +- .../SimulatePipelineTransportAction.java | 2 +- .../AbstractStringProcessorTestCase.java | 6 +- .../processor/add/AddProcessorTests.java | 6 +- .../convert/ConvertProcessorTests.java | 24 ++--- .../processor/gsub/GsubProcessorTests.java | 6 +- .../processor/join/JoinProcessorTests.java | 8 +- .../remove/RemoveProcessorTests.java | 4 +- .../rename/RenameProcessorTests.java | 6 +- .../processor/split/SplitProcessorTests.java | 6 +- .../ingest/PipelineExecutionServiceTests.java | 43 ++++----- .../plugin/ingest/PipelineStoreTests.java | 7 +- .../transport/IngestActionFilterTests.java | 32 +++---- 19 files changed, 139 insertions(+), 148 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index b2ba34cfd54..4fb26bc7ab0 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -44,7 +44,7 @@ public final class Pipeline { /** * Modifies the data of a document to be indexed based on the processor this pipeline holds */ - public void execute(IngestDocument ingestDocument) { + public void execute(IngestDocument ingestDocument) throws Exception { for (Processor processor : processors) { processor.execute(ingestDocument); } @@ -73,7 +73,7 @@ public final class Pipeline { public final static class Factory { - public Pipeline create(String id, Map config, Map processorRegistry) throws IOException { + public Pipeline create(String id, Map config, Map processorRegistry) throws Exception { String description = ConfigurationUtils.readOptionalStringProperty(config, "description"); List processors = new ArrayList<>(); @SuppressWarnings("unchecked") diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java index 67a1cad45a7..1795d2f5c70 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java @@ -36,7 +36,7 @@ public interface Processor { /** * Introspect and potentially modify the incoming data. */ - void execute(IngestDocument ingestDocument); + void execute(IngestDocument ingestDocument) throws Exception; /** * Gets the type of a processor @@ -54,7 +54,7 @@ public interface Processor { * Implementations are responsible for removing the used keys, so that after creating a pipeline ingest can * verify if all configurations settings have been used. */ - P create(Map config) throws IOException; + P create(Map config) throws Exception; /** * Sets the configuration directory when needed to read additional config files diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 5405459b28a..77bac7df2bb 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -82,15 +82,6 @@ public class IngestPlugin extends Plugin { } } - @Override - public Collection> nodeServices() { - if (transportClient) { - return Collections.emptyList(); - } else { - return Collections.singletonList(PipelineStore.class); - } - } - @Override public Settings additionalSettings() { return settingsBuilder() diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index e66697f02d6..d717174c05d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -19,6 +19,7 @@ package org.elasticsearch.plugin.ingest; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -43,10 +44,10 @@ public class PipelineExecutionService { this.threadPool = threadPool; } - public void execute(IndexRequest indexRequest, String pipelineId, Listener listener) { + public void execute(IndexRequest indexRequest, String pipelineId, ActionListener listener) { Pipeline pipeline = store.get(pipelineId); if (pipeline == null) { - listener.failed(new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist")); + listener.onFailure(new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist")); return; } @@ -81,21 +82,13 @@ public class PipelineExecutionService { TimeValue timeValue = TimeValue.parseTimeValue(ttlStr, null, "ttl"); indexRequest.ttl(timeValue.millis()); } - listener.executed(ingestDocument); + listener.onResponse(ingestDocument); } catch (Throwable e) { - listener.failed(e); + listener.onFailure(e); } }); } - public interface Listener { - - void executed(IngestDocument ingestDocument); - - void failed(Throwable e); - - } - public static Settings additionalSettings(Settings nodeSettings) { Settings settings = nodeSettings.getAsSettings("threadpool." + THREAD_POOL_NAME); if (!settings.names().isEmpty()) { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index cbfbf269dd3..133b40b9cc7 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -35,10 +35,10 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.common.SearchScrollIterator; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.component.Lifecycle; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.Injector; +import org.elasticsearch.common.inject.Provider; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -57,15 +57,15 @@ import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.util.*; -public class PipelineStore extends AbstractLifecycleComponent { +public class PipelineStore extends AbstractComponent { public final static String INDEX = ".ingest"; public final static String TYPE = "pipeline"; - private final Injector injector; private final ThreadPool threadPool; private final TimeValue scrollTimeout; private final ClusterService clusterService; + private final Provider clientProvider; private final TimeValue pipelineUpdateInterval; private final Pipeline.Factory factory = new Pipeline.Factory(); private final Map processorFactoryRegistry; @@ -74,11 +74,11 @@ public class PipelineStore extends AbstractLifecycleComponent { private volatile Map pipelines = new HashMap<>(); @Inject - public PipelineStore(Settings settings, Injector injector, ThreadPool threadPool, Environment environment, ClusterService clusterService, Map processors) { + public PipelineStore(Settings settings, Provider clientProvider, ThreadPool threadPool, Environment environment, ClusterService clusterService, Map processors) { super(settings); - this.injector = injector; this.threadPool = threadPool; this.clusterService = clusterService; + this.clientProvider = clientProvider; this.scrollTimeout = settings.getAsTime("ingest.pipeline.store.scroll.timeout", TimeValue.timeValueSeconds(30)); this.pipelineUpdateInterval = settings.getAsTime("ingest.pipeline.store.update.interval", TimeValue.timeValueSeconds(1)); for (Processor.Factory factory : processors.values()) { @@ -86,43 +86,43 @@ public class PipelineStore extends AbstractLifecycleComponent { } this.processorFactoryRegistry = Collections.unmodifiableMap(processors); clusterService.add(new PipelineStoreListener()); + clusterService.addLifecycleListener(new LifecycleListener() { + @Override + public void beforeClose() { + // Ideally we would implement Closeable, but when a node is stopped this doesn't get invoked: + try { + IOUtils.close(processorFactoryRegistry.values()); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + }); } - @Override - protected void doStart() { - client = injector.getInstance(Client.class); - } - - @Override - protected void doStop() { - } - - @Override - protected void doClose() { - try { - IOUtils.close(processorFactoryRegistry.values()); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - + /** + * Deletes the pipeline specified by id in the request. + */ public void delete(DeletePipelineRequest request, ActionListener listener) { DeleteRequest deleteRequest = new DeleteRequest(request); deleteRequest.index(PipelineStore.INDEX); deleteRequest.type(PipelineStore.TYPE); deleteRequest.id(request.id()); deleteRequest.refresh(true); - client.delete(deleteRequest, listener); + client().delete(deleteRequest, listener); } - public void put(PutPipelineRequest request, ActionListener listener) { - // validates the pipeline and processor configuration: - Map pipelineConfig = XContentHelper.convertToMap(request.source(), false).v2(); + /** + * Stores the specified pipeline definition in the request. + * + * @throws IllegalArgumentException If the pipeline holds incorrect configuration + */ + public void put(PutPipelineRequest request, ActionListener listener) throws IllegalArgumentException { try { + // validates the pipeline and processor configuration: + Map pipelineConfig = XContentHelper.convertToMap(request.source(), false).v2(); constructPipeline(request.id(), pipelineConfig); - } catch (IOException e) { - listener.onFailure(e); - return; + } catch (Exception e) { + throw new IllegalArgumentException("Invalid pipeline configuration", e); } IndexRequest indexRequest = new IndexRequest(request); @@ -131,9 +131,12 @@ public class PipelineStore extends AbstractLifecycleComponent { indexRequest.id(request.id()); indexRequest.source(request.source()); indexRequest.refresh(true); - client.index(indexRequest, listener); + client().index(indexRequest, listener); } + /** + * Returns the pipeline by the specified id + */ public Pipeline get(String id) { PipelineDefinition ref = pipelines.get(id); if (ref != null) { @@ -166,11 +169,11 @@ public class PipelineStore extends AbstractLifecycleComponent { return result; } - Pipeline constructPipeline(String id, Map config) throws IOException { + Pipeline constructPipeline(String id, Map config) throws Exception { return factory.create(id, config, processorFactoryRegistry); } - synchronized void updatePipelines() throws IOException { + synchronized void updatePipelines() throws Exception { // note: this process isn't fast or smart, but the idea is that there will not be many pipelines, // so for that reason the goal is to keep the update logic simple. @@ -208,14 +211,12 @@ public class PipelineStore extends AbstractLifecycleComponent { } void startUpdateWorker() { - if (lifecycleState() == Lifecycle.State.STARTED) { - threadPool.schedule(pipelineUpdateInterval, ThreadPool.Names.GENERIC, new Updater()); - } + threadPool.schedule(pipelineUpdateInterval, ThreadPool.Names.GENERIC, new Updater()); } boolean existPipeline(String pipelineId) { GetRequest request = new GetRequest(PipelineStore.INDEX, PipelineStore.TYPE, pipelineId); - GetResponse response = client.get(request).actionGet(); + GetResponse response = client().get(request).actionGet(); return response.isExists(); } @@ -227,7 +228,15 @@ public class PipelineStore extends AbstractLifecycleComponent { SearchRequest searchRequest = new SearchRequest(PipelineStore.INDEX); searchRequest.source(sourceBuilder); searchRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); - return SearchScrollIterator.createIterator(client, scrollTimeout, searchRequest); + return SearchScrollIterator.createIterator(client(), scrollTimeout, searchRequest); + } + + + private Client client() { + if (client == null) { + client = clientProvider.get(); + } + return client; } class Updater implements Runnable { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index 910f26341e7..91453270803 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -84,15 +84,15 @@ public final class IngestActionFilter extends AbstractComponent implements Actio chain.proceed(action, indexRequest, listener); return; } - executionService.execute(indexRequest, pipelineId, new PipelineExecutionService.Listener() { + executionService.execute(indexRequest, pipelineId, new ActionListener() { @Override - public void executed(IngestDocument ingestDocument) { + public void onResponse(IngestDocument ingestDocument) { indexRequest.putHeader(IngestPlugin.PIPELINE_ALREADY_PROCESSED, true); chain.proceed(action, indexRequest, listener); } @Override - public void failed(Throwable e) { + public void onFailure(Throwable e) { logger.error("failed to execute pipeline [{}]", e, pipelineId); listener.onFailure(e); } @@ -121,14 +121,14 @@ public final class IngestActionFilter extends AbstractComponent implements Actio } IndexRequest indexRequest = (IndexRequest) actionRequest; - executionService.execute(indexRequest, pipelineId, new PipelineExecutionService.Listener() { + executionService.execute(indexRequest, pipelineId, new ActionListener() { @Override - public void executed(IngestDocument ingestDocument) { + public void onResponse(IngestDocument ingestDocument) { processBulkIndexRequest(bulkRequestModifier, pipelineId, action, chain, listener); } @Override - public void failed(Throwable e) { + public void onFailure(Throwable e) { logger.debug("failed to execute pipeline [{}]", e, pipelineId); bulkRequestModifier.markCurrentItemAsFailed(e); processBulkIndexRequest(bulkRequestModifier, pipelineId, action, chain, listener); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java index c1c134c5a1b..8e48a5e057a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java @@ -135,7 +135,7 @@ public class SimulatePipelineRequest extends ActionRequest { return new Parsed(pipeline, ingestDocumentList, verbose); } - static Parsed parse(Map config, boolean verbose, PipelineStore pipelineStore) throws IOException { + static Parsed parse(Map config, boolean verbose, PipelineStore pipelineStore) throws Exception { Map pipelineConfig = ConfigurationUtils.readMap(config, Fields.PIPELINE); Pipeline pipeline = PIPELINE_FACTORY.create(SIMULATED_PIPELINE_ID, pipelineConfig, pipelineStore.getProcessorFactoryRegistry()); List ingestDocumentList = parseDocs(config); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java index f3e327c527f..b4036c5bac3 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java @@ -55,7 +55,7 @@ public class SimulatePipelineTransportAction extends HandledTransportAction expected = new HashMap<>(); @@ -57,7 +57,7 @@ public abstract class AbstractStringProcessorTestCase extends ESTestCase { } } - public void testNullValue() throws IOException { + public void testNullValue() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); Processor processor = newProcessor(Collections.singletonList(fieldName)); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); @@ -69,7 +69,7 @@ public abstract class AbstractStringProcessorTestCase extends ESTestCase { } } - public void testNonStringValue() throws IOException { + public void testNonStringValue() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); Processor processor = newProcessor(Collections.singletonList(fieldName)); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java index ff32c3e2f05..2ae98b163f0 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java @@ -31,7 +31,7 @@ import static org.hamcrest.Matchers.equalTo; public class AddProcessorTests extends ESTestCase { - public void testAddExistingFields() throws IOException { + public void testAddExistingFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numFields = randomIntBetween(1, 5); Map fields = new HashMap<>(); @@ -49,7 +49,7 @@ public class AddProcessorTests extends ESTestCase { } } - public void testAddNewFields() throws IOException { + public void testAddNewFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); //used to verify that there are no conflicts between subsequent fields going to be added IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); @@ -68,7 +68,7 @@ public class AddProcessorTests extends ESTestCase { } } - public void testAddFieldsTypeMismatch() throws IOException { + public void testAddFieldsTypeMismatch() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestDocument.setFieldValue("field", "value"); Processor processor = new AddProcessor(Collections.singletonMap("field.inner", "value")); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java index 56d74c6f12a..3b089f05782 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java @@ -32,7 +32,7 @@ import static org.hamcrest.Matchers.equalTo; public class ConvertProcessorTests extends ESTestCase { - public void testConvertInt() throws IOException { + public void testConvertInt() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Map fields = new HashMap<>(); Map expectedResult = new HashMap<>(); @@ -50,7 +50,7 @@ public class ConvertProcessorTests extends ESTestCase { } } - public void testConvertIntList() throws IOException { + public void testConvertIntList() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Map fields = new HashMap<>(); Map> expectedResult = new HashMap<>(); @@ -75,7 +75,7 @@ public class ConvertProcessorTests extends ESTestCase { } } - public void testConvertIntError() throws IOException { + public void testConvertIntError() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); String value = "string-" + randomAsciiOfLengthBetween(1, 10); @@ -91,7 +91,7 @@ public class ConvertProcessorTests extends ESTestCase { } } - public void testConvertFloat() throws IOException { + public void testConvertFloat() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Map fields = new HashMap<>(); Map expectedResult = new HashMap<>(); @@ -110,7 +110,7 @@ public class ConvertProcessorTests extends ESTestCase { } } - public void testConvertFloatList() throws IOException { + public void testConvertFloatList() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Map fields = new HashMap<>(); Map> expectedResult = new HashMap<>(); @@ -135,7 +135,7 @@ public class ConvertProcessorTests extends ESTestCase { } } - public void testConvertFloatError() throws IOException { + public void testConvertFloatError() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); String value = "string-" + randomAsciiOfLengthBetween(1, 10); @@ -151,7 +151,7 @@ public class ConvertProcessorTests extends ESTestCase { } } - public void testConvertBoolean() throws IOException { + public void testConvertBoolean() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Map fields = new HashMap<>(); Map expectedResult = new HashMap<>(); @@ -174,7 +174,7 @@ public class ConvertProcessorTests extends ESTestCase { } } - public void testConvertBooleanList() throws IOException { + public void testConvertBooleanList() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Map fields = new HashMap<>(); Map> expectedResult = new HashMap<>(); @@ -203,7 +203,7 @@ public class ConvertProcessorTests extends ESTestCase { } } - public void testConvertBooleanError() throws IOException { + public void testConvertBooleanError() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); String fieldValue; @@ -225,7 +225,7 @@ public class ConvertProcessorTests extends ESTestCase { } } - public void testConvertString() throws IOException { + public void testConvertString() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Map fields = new HashMap<>(); Map expectedResult = new HashMap<>(); @@ -264,7 +264,7 @@ public class ConvertProcessorTests extends ESTestCase { } } - public void testConvertStringList() throws IOException { + public void testConvertStringList() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Map fields = new HashMap<>(); Map> expectedResult = new HashMap<>(); @@ -309,7 +309,7 @@ public class ConvertProcessorTests extends ESTestCase { } } - public void testConvertNullField() throws IOException { + public void testConvertNullField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); Type type = randomFrom(Type.values()); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java index 7f91a8b3a56..0bcb11f58b5 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java @@ -35,7 +35,7 @@ import static org.hamcrest.Matchers.equalTo; public class GsubProcessorTests extends ESTestCase { - public void testGsub() throws IOException { + public void testGsub() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numFields = randomIntBetween(1, 5); List expressions = new ArrayList<>(); @@ -50,7 +50,7 @@ public class GsubProcessorTests extends ESTestCase { } } - public void testGsubNotAStringValue() throws IOException { + public void testGsubNotAStringValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, 123); @@ -64,7 +64,7 @@ public class GsubProcessorTests extends ESTestCase { } } - public void testGsubNullValue() throws IOException { + public void testGsubNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); List gsubExpressions = Collections.singletonList(new GsubExpression(fieldName, Pattern.compile("\\."), "-")); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java index 391c9a641ea..a75aac5253a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java @@ -33,7 +33,7 @@ public class JoinProcessorTests extends ESTestCase { private static final String[] SEPARATORS = new String[]{"-", "_", "."}; - public void testJoinStrings() throws IOException { + public void testJoinStrings() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Map fields = new HashMap<>(); Map expectedResultMap = new HashMap<>(); @@ -62,7 +62,7 @@ public class JoinProcessorTests extends ESTestCase { } } - public void testJoinIntegers() throws IOException { + public void testJoinIntegers() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Map fields = new HashMap<>(); Map expectedResultMap = new HashMap<>(); @@ -91,7 +91,7 @@ public class JoinProcessorTests extends ESTestCase { } } - public void testJoinNonListField() throws IOException { + public void testJoinNonListField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, randomAsciiOfLengthBetween(1, 10)); @@ -104,7 +104,7 @@ public class JoinProcessorTests extends ESTestCase { } } - public void testJoinNonExistingField() throws IOException { + public void testJoinNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); Processor processor = new JoinProcessor(Collections.singletonMap(fieldName, "-")); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java index 3c110fd7b60..610b4c8a5cb 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java @@ -35,7 +35,7 @@ import static org.hamcrest.Matchers.nullValue; public class RemoveProcessorTests extends ESTestCase { - public void testRemoveFields() throws IOException { + public void testRemoveFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numFields = randomIntBetween(1, 5); Set fields = new HashSet<>(); @@ -50,7 +50,7 @@ public class RemoveProcessorTests extends ESTestCase { } } - public void testRemoveNonExistingField() throws IOException { + public void testRemoveNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); Processor processor = new RemoveProcessor(Collections.singletonList(RandomDocumentPicks.randomFieldName(random()))); processor.execute(ingestDocument); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java index 51ed5da1750..332d3957369 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java @@ -32,7 +32,7 @@ import static org.hamcrest.Matchers.nullValue; public class RenameProcessorTests extends ESTestCase { - public void testRename() throws IOException { + public void testRename() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numFields = randomIntBetween(1, 5); Map fields = new HashMap<>(); @@ -56,14 +56,14 @@ public class RenameProcessorTests extends ESTestCase { } } - public void testRenameNonExistingField() throws IOException { + public void testRenameNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); Processor processor = new RenameProcessor(Collections.singletonMap(RandomDocumentPicks.randomFieldName(random()), RandomDocumentPicks.randomFieldName(random()))); processor.execute(ingestDocument); assertThat(ingestDocument.getSource().size(), equalTo(0)); } - public void testRenameExistingFieldNullValue() throws IOException { + public void testRenameExistingFieldNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, null); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java index 14cd6d02a39..799e9c41c33 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java @@ -31,7 +31,7 @@ import static org.hamcrest.Matchers.equalTo; public class SplitProcessorTests extends ESTestCase { - public void testSplit() throws IOException { + public void testSplit() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Map fields = new HashMap<>(); int numFields = randomIntBetween(1, 5); @@ -46,7 +46,7 @@ public class SplitProcessorTests extends ESTestCase { } } - public void testSplitNullValue() throws IOException { + public void testSplitNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); Map split = Collections.singletonMap(fieldName, "\\."); @@ -59,7 +59,7 @@ public class SplitProcessorTests extends ESTestCase { } } - public void testSplitNonStringValue() throws IOException { + public void testSplitNonStringValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, randomInt()); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index 917d4b1815c..62a9bbd60b7 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -59,10 +60,10 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testExecute_pipelineDoesNotExist() { when(store.get("_id")).thenReturn(null); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); + ActionListener listener = mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); - verify(listener).failed(any(IllegalArgumentException.class)); - verify(listener, times(0)).executed(any()); + verify(listener).onFailure(any(IllegalArgumentException.class)); + verify(listener, times(0)).onResponse(any()); } public void testExecuteSuccess() throws Exception { @@ -70,11 +71,11 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); + ActionListener listener = mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(listener).executed(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(listener, times(0)).failed(any(Exception.class)); + verify(listener).onResponse(eqID("_index", "_type", "_id", Collections.emptyMap())); + verify(listener, times(0)).onFailure(any(Exception.class)); } public void testExecutePropagateAllMetaDataUpdates() throws Exception { @@ -94,11 +95,11 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); + ActionListener listener = mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); verify(processor).execute(any()); - verify(listener).executed(any()); - verify(listener, times(0)).failed(any(Exception.class)); + verify(listener).onResponse(any()); + verify(listener, times(0)).onFailure(any(Exception.class)); assertThat(indexRequest.index(), equalTo("update_index")); assertThat(indexRequest.type(), equalTo("update_type")); @@ -114,11 +115,11 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); + ActionListener listener = mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(listener, times(0)).executed(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(listener).failed(any(RuntimeException.class)); + verify(listener, times(0)).onResponse(eqID("_index", "_type", "_id", Collections.emptyMap())); + verify(listener).onFailure(any(RuntimeException.class)); } public void testExecuteTTL() throws Exception { @@ -130,12 +131,12 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - PipelineExecutionService.Listener listener = mock(PipelineExecutionService.Listener.class); + ActionListener listener = mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); assertThat(indexRequest.ttl(), equalTo(TimeValue.parseTimeValue("5d", null, "ttl").millis())); - verify(listener, times(1)).executed(any()); - verify(listener, never()).failed(any()); + verify(listener, times(1)).onResponse(any()); + verify(listener, never()).onFailure(any()); // test with invalid ttl metaProcessorFactory = new MetaDataProcessor.Factory(); @@ -145,11 +146,11 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - listener = mock(PipelineExecutionService.Listener.class); + listener = mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); - verify(listener, never()).executed(any()); - verify(listener, times(1)).failed(any(ElasticsearchParseException.class)); + verify(listener, never()).onResponse(any()); + verify(listener, times(1)).onFailure(any(ElasticsearchParseException.class)); // test with provided ttl when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.emptyList())); @@ -157,12 +158,12 @@ public class PipelineExecutionServiceTests extends ESTestCase { indexRequest = new IndexRequest("_index", "_type", "_id") .source(Collections.emptyMap()) .ttl(1000l); - listener = mock(PipelineExecutionService.Listener.class); + listener = mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); assertThat(indexRequest.ttl(), equalTo(1000l)); - verify(listener, times(1)).executed(any()); - verify(listener, never()).failed(any(Throwable.class)); + verify(listener, times(1)).onResponse(any()); + verify(listener, never()).onFailure(any(Throwable.class)); } private IngestDocument eqID(String index, String type, String id, Map source) { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java index 1dfc64f65b5..e67262b3df0 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.inject.Injector; +import org.elasticsearch.common.inject.Provider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.StringText; import org.elasticsearch.env.Environment; @@ -65,19 +66,15 @@ public class PipelineStoreTests extends ESTestCase { public void init() { threadPool = new ThreadPool("test"); client = mock(Client.class); - Injector injector = mock(Injector.class); - when(injector.getInstance(Client.class)).thenReturn(client); ClusterService clusterService = mock(ClusterService.class); when(client.searchScroll(any())).thenReturn(expectedSearchReponse(Collections.emptyList())); Environment environment = mock(Environment.class); - store = new PipelineStore(Settings.EMPTY, injector, threadPool, environment, clusterService, Collections.emptyMap()); - store.start(); + store = new PipelineStore(Settings.EMPTY, () -> client, threadPool, environment, clusterService, Collections.emptyMap()); } @After public void cleanup() { - store.stop(); threadPool.shutdown(); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index 67ea1b32d3e..534f76ecc72 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -83,7 +83,7 @@ public class IngestActionFilterTests extends ESTestCase { filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); verifyZeroInteractions(actionFilterChain); } @@ -96,7 +96,7 @@ public class IngestActionFilterTests extends ESTestCase { filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); verifyZeroInteractions(actionFilterChain); } @@ -122,14 +122,14 @@ public class IngestActionFilterTests extends ESTestCase { ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); Answer answer = invocationOnMock -> { - PipelineExecutionService.Listener listener = (PipelineExecutionService.Listener) invocationOnMock.getArguments()[2]; - listener.executed(new IngestDocument(indexRequest.index(), indexRequest.type(), indexRequest.id(), indexRequest.sourceAsMap())); + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(new IngestDocument(indexRequest.index(), indexRequest.type(), indexRequest.id(), indexRequest.sourceAsMap())); return null; }; - doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); verify(actionFilterChain).proceed("_action", indexRequest, actionListener); verifyZeroInteractions(actionListener); } @@ -145,15 +145,15 @@ public class IngestActionFilterTests extends ESTestCase { Answer answer = new Answer() { @Override public Object answer(InvocationOnMock invocationOnMock) throws Throwable { - PipelineExecutionService.Listener listener = (PipelineExecutionService.Listener) invocationOnMock.getArguments()[2]; - listener.failed(exception); + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onFailure(exception); return null; } }; - doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); verify(actionListener).onFailure(exception); verifyZeroInteractions(actionFilterChain); } @@ -242,11 +242,11 @@ public class IngestActionFilterTests extends ESTestCase { RuntimeException exception = new RuntimeException(); Answer answer = (invocationOnMock) -> { - PipelineExecutionService.Listener listener = (PipelineExecutionService.Listener) invocationOnMock.getArguments()[2]; - listener.failed(exception); + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onFailure(exception); return null; }; - doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); CaptureActionListener actionListener = new CaptureActionListener(); RecordRequestAFC actionFilterChain = new RecordRequestAFC(); @@ -287,11 +287,11 @@ public class IngestActionFilterTests extends ESTestCase { RuntimeException exception = new RuntimeException(); Answer answer = (invocationOnMock) -> { - PipelineExecutionService.Listener listener = (PipelineExecutionService.Listener) invocationOnMock.getArguments()[2]; - listener.failed(exception); + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onFailure(exception); return null; }; - doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(PipelineExecutionService.Listener.class)); + doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); ActionListener actionListener = mock(ActionListener.class); RecordRequestAFC actionFilterChain = new RecordRequestAFC(); From 43b861b07616c91fc06db43a2190717ffcc0e512 Mon Sep 17 00:00:00 2001 From: javanna Date: Sat, 28 Nov 2015 12:25:12 +0100 Subject: [PATCH 089/347] IngestDocument to support accessing and modifying list items When reading, through #getFieldValue and #hasField, and a list is encountered, the next element in the path is treated as the index of the item that the path points to (e.g. `list.0.key`). If the index is not a number or out of bounds, an exception gets thrown. Added #appendFieldValue method that has the same behaviour as setFieldValue, but when a list is the last element in the path, instead of replacing the whole list it will simply add a new element to the existing list. This method is currently unused, we have to decide whether the set processor or a new processor should use it. A few other changes made: - Renamed hasFieldValue to hasField, as this method is not really about values but only keys. It will return true if a key is there but its value is null, while it returns false only when a field is not there at all. - Changed null semantic in getFieldValue. null gets returned only when it was an actual value in the source, an exception is thrown otherwise when trying to access a non existing field, so that null != field not present. - Made remove stricter about non existing fields. Throws error when trying to remove a non existing field. This is more consistent with the other methods in IngestDocument which are strict about fields that are not present. Relates to #14324 --- .../elasticsearch/ingest/IngestDocument.java | 264 ++++++++++---- .../processor/rename/RenameProcessor.java | 4 +- .../elasticsearch/ingest/IngestClientIT.java | 35 +- .../ingest/IngestDocumentTests.java | 326 +++++++++++++++--- .../AbstractStringProcessorTestCase.java | 17 +- .../processor/add/AddProcessorTests.java | 11 +- .../convert/ConvertProcessorTests.java | 21 +- .../processor/gsub/GsubProcessorTests.java | 18 +- .../processor/join/JoinProcessorTests.java | 14 +- .../remove/RemoveProcessorTests.java | 17 +- .../rename/RenameProcessorTests.java | 4 +- .../processor/split/SplitProcessorTests.java | 18 +- 12 files changed, 616 insertions(+), 133 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index bbb47aed8b1..5629e25e4b8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -67,29 +67,28 @@ public final class IngestDocument { * @param path The path within the document in dot-notation * @param clazz The expected class of the field value * @return the value for the provided path if existing, null otherwise - * @throws IllegalArgumentException if the field is present but is not of the type provided as argument. + * @throws IllegalArgumentException if the field is null, empty, or if the source contains a field within the path + * which is not of the expected type */ public T getFieldValue(String path, Class clazz) { - if (path == null || path.length() == 0) { - return null; + if (Strings.isEmpty(path)) { + throw new IllegalArgumentException("path cannot be null nor empty"); } String[] pathElements = Strings.splitStringToArray(path, '.'); assert pathElements.length > 0; - Map innerMap = getParent(pathElements); - if (innerMap == null) { - return null; + Object context = source; + for (String pathElement : pathElements) { + context = resolve(pathElement, path, context); } - String leafKey = pathElements[pathElements.length - 1]; - Object fieldValue = innerMap.get(leafKey); - if (fieldValue == null) { + if (context == null) { return null; } - if (clazz.isInstance(fieldValue)) { - return clazz.cast(fieldValue); + if (clazz.isInstance(context)) { + return clazz.cast(context); } - throw new IllegalArgumentException("field [" + path + "] of type [" + fieldValue.getClass().getName() + "] cannot be cast to [" + clazz.getName() + "]"); + throw new IllegalArgumentException("field [" + path + "] of type [" + context.getClass().getName() + "] cannot be cast to [" + clazz.getName() + "]"); } /** @@ -97,18 +96,58 @@ public final class IngestDocument { * @param path The path within the document in dot-notation * @return true if the document contains a value for the field, false otherwise */ - public boolean hasFieldValue(String path) { - if (path == null || path.length() == 0) { + public boolean hasField(String path) { + if (Strings.isEmpty(path)) { return false; } String[] pathElements = Strings.splitStringToArray(path, '.'); assert pathElements.length > 0; - Map innerMap = getParent(pathElements); - if (innerMap == null) { - return false; + + Object context = source; + for (int i = 0; i < pathElements.length - 1; i++) { + String pathElement = pathElements[i]; + if (context == null) { + return false; + } + if (context instanceof Map) { + @SuppressWarnings("unchecked") + Map map = (Map) context; + context = map.get(pathElement); + } else if (context instanceof List) { + @SuppressWarnings("unchecked") + List list = (List) context; + try { + int index = Integer.parseInt(pathElement); + if (index < 0 || index >= list.size()) { + return false; + } + context = list.get(index); + } catch (NumberFormatException e) { + return false; + } + + } else { + return false; + } } + String leafKey = pathElements[pathElements.length - 1]; - return innerMap.containsKey(leafKey); + if (context instanceof Map) { + @SuppressWarnings("unchecked") + Map map = (Map) context; + return map.containsKey(leafKey); + } + if (context instanceof List) { + @SuppressWarnings("unchecked") + List list = (List) context; + try { + int index = Integer.parseInt(leafKey); + return index >= 0 && index < list.size(); + } catch (NumberFormatException e) { + return false; + } + } + return false; } /** @@ -116,73 +155,182 @@ public final class IngestDocument { * @param path the path of the field to be removed */ public void removeField(String path) { - if (path == null || path.length() == 0) { - return; + if (Strings.isEmpty(path)) { + throw new IllegalArgumentException("path cannot be null nor empty"); } String[] pathElements = Strings.splitStringToArray(path, '.'); assert pathElements.length > 0; - Map parent = getParent(pathElements); - if (parent != null) { - String leafKey = pathElements[pathElements.length - 1]; - if (parent.containsKey(leafKey)) { - sourceModified = true; - parent.remove(leafKey); - } + + Object context = source; + for (int i = 0; i < pathElements.length - 1; i++) { + context = resolve(pathElements[i], path, context); } + + String leafKey = pathElements[pathElements.length - 1]; + if (context instanceof Map) { + @SuppressWarnings("unchecked") + Map map = (Map) context; + if (map.containsKey(leafKey)) { + map.remove(leafKey); + this.sourceModified = true; + return; + } + throw new IllegalArgumentException("field [" + leafKey + "] not present as part of path [" + path + "]"); + } + if (context instanceof List) { + @SuppressWarnings("unchecked") + List list = (List) context; + int index; + try { + index = Integer.parseInt(leafKey); + } catch (NumberFormatException e) { + throw new IllegalArgumentException("[" + leafKey + "] is not an integer, cannot be used as an index as part of path [" + path + "]", e); + } + if (index < 0 || index >= list.size()) { + throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path + "]"); + } + list.remove(index); + this.sourceModified = true; + return; + } + + if (context == null) { + throw new IllegalArgumentException("cannot remove [" + leafKey + "] from null as part of path [" + path + "]"); + } + throw new IllegalArgumentException("cannot remove [" + leafKey + "] from object of type [" + context.getClass().getName() + "] as part of path [" + path + "]"); } - private Map getParent(String[] pathElements) { - Map innerMap = source; - for (int i = 0; i < pathElements.length - 1; i++) { - Object obj = innerMap.get(pathElements[i]); - if (obj instanceof Map) { - @SuppressWarnings("unchecked") - Map stringObjectMap = (Map) obj; - innerMap = stringObjectMap; - } else { - return null; - } + private static Object resolve(String pathElement, String fullPath, Object context) { + if (context == null) { + throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from null as part of path [" + fullPath + "]"); } - return innerMap; + if (context instanceof Map) { + @SuppressWarnings("unchecked") + Map map = (Map) context; + if (map.containsKey(pathElement)) { + return map.get(pathElement); + } + throw new IllegalArgumentException("field [" + pathElement + "] not present as part of path [" + fullPath + "]"); + } + if (context instanceof List) { + @SuppressWarnings("unchecked") + List list = (List) context; + int index; + try { + index = Integer.parseInt(pathElement); + } catch (NumberFormatException e) { + throw new IllegalArgumentException("[" + pathElement + "] is not an integer, cannot be used as an index as part of path [" + fullPath + "]", e); + } + if (index < 0 || index >= list.size()) { + throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + fullPath + "]"); + } + return list.get(index); + } + throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from object of type [" + context.getClass().getName() + "] as part of path [" + fullPath + "]"); + } + + /** + * Appends the provided value to the provided path in the document. + * Any non existing path element will be created. Same as {@link #setFieldValue(String, Object)} + * but if the last element is a list, the value will be appended to the existing list. + * @param path The path within the document in dot-notation + * @param value The value to put in for the path key + */ + public void appendFieldValue(String path, Object value) { + setFieldValue(path, value, true); } /** * Sets the provided value to the provided path in the document. - * Any non existing path element will be created. + * Any non existing path element will be created. If the last element is a list, + * the value will replace the existing list. * @param path The path within the document in dot-notation * @param value The value to put in for the path key */ public void setFieldValue(String path, Object value) { - if (path == null || path.length() == 0) { - throw new IllegalArgumentException("cannot add null or empty field"); + setFieldValue(path, value, false); + } + + private void setFieldValue(String path, Object value, boolean append) { + if (Strings.isEmpty(path)) { + throw new IllegalArgumentException("path cannot be null nor empty"); } String[] pathElements = Strings.splitStringToArray(path, '.'); assert pathElements.length > 0; - Map inner = source; + Object context = source; for (int i = 0; i < pathElements.length - 1; i++) { String pathElement = pathElements[i]; - if (inner.containsKey(pathElement)) { - Object object = inner.get(pathElement); - if (object instanceof Map) { - @SuppressWarnings("unchecked") - Map stringObjectMap = (Map) object; - inner = stringObjectMap; - } else if (object == null ) { - throw new IllegalArgumentException("cannot add field to null parent, [" + Map.class.getName() + "] expected instead."); + if (context == null) { + throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from null as part of path [" + path + "]"); + } + if (context instanceof Map) { + @SuppressWarnings("unchecked") + Map map = (Map) context; + if (map.containsKey(pathElement)) { + context = map.get(pathElement); } else { - throw new IllegalArgumentException("cannot add field to parent [" + pathElement + "] of type [" + object.getClass().getName() + "], [" + Map.class.getName() + "] expected instead."); + HashMap newMap = new HashMap<>(); + map.put(pathElement, newMap); + sourceModified = true; + context = newMap; } + } else if (context instanceof List) { + @SuppressWarnings("unchecked") + List list = (List) context; + int index; + try { + index = Integer.parseInt(pathElement); + } catch (NumberFormatException e) { + throw new IllegalArgumentException("[" + pathElement + "] is not an integer, cannot be used as an index as part of path [" + path + "]", e); + } + if (index < 0 || index >= list.size()) { + throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path + "]"); + } + context = list.get(index); } else { - Map newInnerMap = new HashMap<>(); - inner.put(pathElement, newInnerMap); - inner = newInnerMap; + throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from object of type [" + context.getClass().getName() + "] as part of path [" + path + "]"); } } String leafKey = pathElements[pathElements.length - 1]; - inner.put(leafKey, value); - sourceModified = true; + if (context == null) { + throw new IllegalArgumentException("cannot set [" + leafKey + "] with null parent as part of path [" + path + "]"); + } + if (context instanceof Map) { + @SuppressWarnings("unchecked") + Map map = (Map) context; + if (append) { + if (map.containsKey(leafKey)) { + Object object = map.get(leafKey); + if (object instanceof List) { + @SuppressWarnings("unchecked") + List list = (List) object; + list.add(value); + sourceModified = true; + return; + } + } + } + map.put(leafKey, value); + sourceModified = true; + } else if (context instanceof List) { + @SuppressWarnings("unchecked") + List list = (List) context; + int index; + try { + index = Integer.parseInt(leafKey); + } catch (NumberFormatException e) { + throw new IllegalArgumentException("[" + leafKey + "] is not an integer, cannot be used as an index as part of path [" + path + "]", e); + } + if (index < 0 || index >= list.size()) { + throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path + "]"); + } + list.add(index, value); + this.sourceModified = true; + } else { + throw new IllegalArgumentException("cannot set [" + leafKey + "] with parent object of type [" + context.getClass().getName() + "] as part of path [" + path + "]"); + } } public String getMetadata(MetaData metaData) { @@ -195,7 +343,7 @@ public final class IngestDocument { /** * Returns the document. Should be used only for reading. Any change made to the returned map will - * not be reflected to the modified flag. Modify the document instead using {@link #setFieldValue(String, Object)} + * not be reflected to the sourceModified flag. Modify the document instead using {@link #setFieldValue(String, Object)} * and {@link #removeField(String)} */ public Map getSource() { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java index d532564146f..479a4941879 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java @@ -47,8 +47,8 @@ public class RenameProcessor implements Processor { @Override public void execute(IngestDocument document) { for(Map.Entry entry : fields.entrySet()) { - if (document.hasFieldValue(entry.getKey())) { - if (document.hasFieldValue(entry.getKey()) == false) { + if (document.hasField(entry.getKey())) { + if (document.hasField(entry.getKey()) == false) { throw new IllegalArgumentException("field [" + entry.getKey() + "] doesn't exist"); } Object oldValue = document.getFieldValue(entry.getKey(), Object.class); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 985f91bfb8e..01d7baa4ee3 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -45,7 +45,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -53,7 +52,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.*; import static org.hamcrest.core.Is.is; -import static org.hamcrest.core.IsNull.notNullValue; public class IngestClientIT extends ESIntegTestCase { @@ -115,8 +113,37 @@ public class IngestClientIT extends ESIntegTestCase { assertThat(response.getResults().size(), equalTo(1)); assertThat(response.getResults().get(0), instanceOf(SimulateDocumentSimpleResult.class)); SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) response.getResults().get(0); - IngestDocument expectedIngestDocument = new IngestDocument("index", "type", "id", Collections.singletonMap("foo", "bar")); - assertThat(simulateDocumentSimpleResult.getIngestDocument(), equalTo(expectedIngestDocument)); + assertThat(simulateDocumentSimpleResult.getIngestDocument(), nullValue()); + assertThat(simulateDocumentSimpleResult.getFailure(), notNullValue()); + + response = new SimulatePipelineRequestBuilder(client(), SimulatePipelineAction.INSTANCE) + .setId("_id") + .setSource(jsonBuilder().startObject() + .startArray("docs") + .startObject() + .field("_index", "index") + .field("_type", "type") + .field("_id", "id") + .startObject("_source") + .field("field1", "123.42 400 ") + .endObject() + .endObject() + .endArray() + .endObject().bytes()) + .get(); + + assertThat(response.isVerbose(), equalTo(false)); + assertThat(response.getPipelineId(), equalTo("_id")); + assertThat(response.getResults().size(), equalTo(1)); + assertThat(response.getResults().get(0), instanceOf(SimulateDocumentSimpleResult.class)); + simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) response.getResults().get(0); + Map source = new HashMap<>(); + source.put("field1", "123.42 400 "); + source.put("val", 123.42f); + source.put("status", 400); + source.put("msg", "foo"); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", source); + assertThat(simulateDocumentSimpleResult.getIngestDocument().getSource(), equalTo(ingestDocument.getSource())); assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index ad28fd663dc..fb5966e76db 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -22,9 +22,7 @@ package org.elasticsearch.ingest; import org.elasticsearch.test.ESTestCase; import org.junit.Before; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; +import java.util.*; import static org.hamcrest.Matchers.*; @@ -40,8 +38,16 @@ public class IngestDocumentTests extends ESTestCase { Map innerObject = new HashMap<>(); innerObject.put("buzz", "hello world"); innerObject.put("foo_null", null); + innerObject.put("1", "bar"); document.put("fizz", innerObject); + List> list = new ArrayList<>(); + Map value = new HashMap<>(); + value.put("field", "value"); + list.add(value); + list.add(null); + document.put("list", list); ingestDocument = new IngestDocument("index", "type", "id", document); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); } public void testSimpleGetFieldValue() { @@ -71,46 +77,127 @@ public class IngestDocumentTests extends ESTestCase { public void testNestedGetFieldValue() { assertThat(ingestDocument.getFieldValue("fizz.buzz", String.class), equalTo("hello world")); + assertThat(ingestDocument.getFieldValue("fizz.1", String.class), equalTo("bar")); + } + + public void testNestedGetFieldValueTypeMismatch() { + try { + ingestDocument.getFieldValue("foo.foo.bar", String.class); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("cannot resolve [foo] from object of type [java.lang.String] as part of path [foo.foo.bar]")); + } + } + + public void testListGetFieldValue() { + assertThat(ingestDocument.getFieldValue("list.0.field", String.class), equalTo("value")); + } + + public void testListGetFieldValueNull() { + assertThat(ingestDocument.getFieldValue("list.1", String.class), nullValue()); + } + + public void testListGetFieldValueIndexNotNumeric() { + try { + ingestDocument.getFieldValue("list.test.field", String.class); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[test] is not an integer, cannot be used as an index as part of path [list.test.field]")); + } + } + + public void testListGetFieldValueIndexOutOfBounds() { + try { + ingestDocument.getFieldValue("list.10.field", String.class); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[10] is out of bounds for array with length [2] as part of path [list.10.field]")); + } } public void testGetFieldValueNotFound() { - assertThat(ingestDocument.getFieldValue("not.here", String.class), nullValue()); + try { + ingestDocument.getFieldValue("not.here", String.class); + fail("get field value should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [not] not present as part of path [not.here]")); + } + } + + public void testGetFieldValueNotFoundNullParent() { + try { + ingestDocument.getFieldValue("fizz.foo_null.not_there", String.class); + fail("get field value should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("cannot resolve [not_there] from null as part of path [fizz.foo_null.not_there]")); + } } public void testGetFieldValueNull() { - assertNull(ingestDocument.getFieldValue(null, String.class)); + try { + ingestDocument.getFieldValue(null, String.class); + fail("get field value should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("path cannot be null nor empty")); + } } public void testGetFieldValueEmpty() { - assertNull(ingestDocument.getFieldValue("", String.class)); + try { + ingestDocument.getFieldValue("", String.class); + fail("get field value should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("path cannot be null nor empty")); + } } - public void testHasFieldValue() { - assertTrue(ingestDocument.hasFieldValue("fizz")); + public void testHasField() { + assertTrue(ingestDocument.hasField("fizz")); } - public void testHasFieldValueNested() { - assertTrue(ingestDocument.hasFieldValue("fizz.buzz")); + public void testHasFieldNested() { + assertTrue(ingestDocument.hasField("fizz.buzz")); } - public void testHasFieldValueNotFound() { - assertFalse(ingestDocument.hasFieldValue("doesnotexist")); + public void testListHasField() { + assertTrue(ingestDocument.hasField("list.0.field")); } - public void testHasFieldValueNestedNotFound() { - assertFalse(ingestDocument.hasFieldValue("fizz.doesnotexist")); + public void testListHasFieldNull() { + assertTrue(ingestDocument.hasField("list.1")); } - public void testHasFieldValueNull() { - assertFalse(ingestDocument.hasFieldValue(null)); + public void testListHasFieldIndexOutOfBounds() { + assertFalse(ingestDocument.hasField("list.10")); } - public void testHasFieldValueNullValue() { - assertTrue(ingestDocument.hasFieldValue("fizz.foo_null")); + public void testListHasFieldIndexNotNumeric() { + assertFalse(ingestDocument.hasField("list.test")); } - public void testHasFieldValueEmpty() { - assertFalse(ingestDocument.hasFieldValue("")); + public void testNestedHasFieldTypeMismatch() { + assertFalse(ingestDocument.hasField("foo.foo.bar")); + } + + public void testHasFieldNotFound() { + assertFalse(ingestDocument.hasField("not.here")); + } + + public void testHasFieldNotFoundNullParent() { + assertFalse(ingestDocument.hasField("fizz.foo_null.not_there")); + } + + public void testHasFieldNestedNotFound() { + assertFalse(ingestDocument.hasField("fizz.doesnotexist")); + } + + public void testHasFieldNull() { + assertFalse(ingestDocument.hasField(null)); + } + + public void testHasFieldNullValue() { + assertTrue(ingestDocument.hasField("fizz.foo_null")); + } + + public void testHasFieldEmpty() { + assertFalse(ingestDocument.hasField("")); } public void testSimpleSetFieldValue() { @@ -162,7 +249,7 @@ public class IngestDocumentTests extends ESTestCase { ingestDocument.setFieldValue("fizz.buzz.new", "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("cannot add field to parent [buzz] of type [java.lang.String], [java.util.Map] expected instead.")); + assertThat(e.getMessage(), equalTo("cannot set [new] with parent object of type [java.lang.String] as part of path [fizz.buzz.new]")); assertThat(ingestDocument.isSourceModified(), equalTo(false)); } } @@ -172,7 +259,7 @@ public class IngestDocumentTests extends ESTestCase { ingestDocument.setFieldValue("fizz.foo_null.test", "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("cannot add field to null parent, [java.util.Map] expected instead.")); + assertThat(e.getMessage(), equalTo("cannot set [test] with null parent as part of path [fizz.foo_null.test]")); assertThat(ingestDocument.isSourceModified(), equalTo(false)); } } @@ -182,7 +269,85 @@ public class IngestDocumentTests extends ESTestCase { ingestDocument.setFieldValue(null, "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("cannot add null or empty field")); + assertThat(e.getMessage(), equalTo("path cannot be null nor empty")); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); + } + } + + public void testListSetFieldValueNoIndexProvided() { + ingestDocument.setFieldValue("list", "value"); + assertThat(ingestDocument.isSourceModified(), equalTo(true)); + Object object = ingestDocument.getSource().get("list"); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("value")); + } + + public void testListAppendFieldValue() { + ingestDocument.appendFieldValue("list", "new_value"); + assertThat(ingestDocument.isSourceModified(), equalTo(true)); + Object object = ingestDocument.getSource().get("list"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(3)); + assertThat(list.get(0), equalTo(Collections.singletonMap("field", "value"))); + assertThat(list.get(1), nullValue()); + assertThat(list.get(2), equalTo("new_value")); + } + + public void testListSetFieldValueIndexProvided() { + ingestDocument.setFieldValue("list.1", "value"); + assertThat(ingestDocument.isSourceModified(), equalTo(true)); + Object object = ingestDocument.getSource().get("list"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(3)); + assertThat(list.get(0), equalTo(Collections.singletonMap("field", "value"))); + assertThat(list.get(1), equalTo("value")); + assertThat(list.get(2), nullValue()); + } + + public void testSetFieldValueListAsPartOfPath() { + ingestDocument.setFieldValue("list.0.field", "new_value"); + assertThat(ingestDocument.isSourceModified(), equalTo(true)); + Object object = ingestDocument.getSource().get("list"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(2)); + assertThat(list.get(0), equalTo(Collections.singletonMap("field", "new_value"))); + assertThat(list.get(1), nullValue()); + } + + public void testListSetFieldValueIndexNotNumeric() { + try { + ingestDocument.setFieldValue("list.test", "value"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[test] is not an integer, cannot be used as an index as part of path [list.test]")); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); + } + + try { + ingestDocument.setFieldValue("list.test.field", "new_value"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[test] is not an integer, cannot be used as an index as part of path [list.test.field]")); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); + } + } + + public void testListSetFieldValueIndexOutOfBounds() { + try { + ingestDocument.setFieldValue("list.10", "value"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[10] is out of bounds for array with length [2] as part of path [list.10]")); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); + } + + try { + ingestDocument.setFieldValue("list.10.field", "value"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[10] is out of bounds for array with length [2] as part of path [list.10.field]")); assertThat(ingestDocument.isSourceModified(), equalTo(false)); } } @@ -192,7 +357,7 @@ public class IngestDocumentTests extends ESTestCase { ingestDocument.setFieldValue("", "bar"); fail("add field should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("cannot add null or empty field")); + assertThat(e.getMessage(), equalTo("path cannot be null nor empty")); assertThat(ingestDocument.isSourceModified(), equalTo(false)); } } @@ -200,48 +365,127 @@ public class IngestDocumentTests extends ESTestCase { public void testRemoveField() { ingestDocument.removeField("foo"); assertThat(ingestDocument.isSourceModified(), equalTo(true)); - assertThat(ingestDocument.getSource().size(), equalTo(2)); + assertThat(ingestDocument.getSource().size(), equalTo(3)); assertThat(ingestDocument.getSource().containsKey("foo"), equalTo(false)); } public void testRemoveInnerField() { ingestDocument.removeField("fizz.buzz"); - assertThat(ingestDocument.getSource().size(), equalTo(3)); + assertThat(ingestDocument.getSource().size(), equalTo(4)); assertThat(ingestDocument.getSource().get("fizz"), instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) ingestDocument.getSource().get("fizz"); - assertThat(map.size(), equalTo(1)); + assertThat(map.size(), equalTo(2)); assertThat(map.containsKey("buzz"), equalTo(false)); ingestDocument.removeField("fizz.foo_null"); + assertThat(map.size(), equalTo(1)); + assertThat(ingestDocument.getSource().size(), equalTo(4)); + assertThat(ingestDocument.getSource().containsKey("fizz"), equalTo(true)); + assertThat(ingestDocument.isSourceModified(), equalTo(true)); + + ingestDocument.removeField("fizz.1"); assertThat(map.size(), equalTo(0)); - assertThat(ingestDocument.getSource().size(), equalTo(3)); + assertThat(ingestDocument.getSource().size(), equalTo(4)); assertThat(ingestDocument.getSource().containsKey("fizz"), equalTo(true)); assertThat(ingestDocument.isSourceModified(), equalTo(true)); } public void testRemoveNonExistingField() { - ingestDocument.removeField("does_not_exist"); - assertThat(ingestDocument.isSourceModified(), equalTo(false)); - assertThat(ingestDocument.getSource().size(), equalTo(3)); + try { + ingestDocument.removeField("does_not_exist"); + fail("remove field should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [does_not_exist] not present as part of path [does_not_exist]")); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); + } } public void testRemoveExistingParentTypeMismatch() { - ingestDocument.removeField("foo.test"); - assertThat(ingestDocument.isSourceModified(), equalTo(false)); - assertThat(ingestDocument.getSource().size(), equalTo(3)); + try { + ingestDocument.removeField("foo.foo.bar"); + fail("remove field should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("cannot resolve [foo] from object of type [java.lang.String] as part of path [foo.foo.bar]")); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); + } + } + + public void testListRemoveField() { + ingestDocument.removeField("list.0.field"); + assertThat(ingestDocument.isSourceModified(), equalTo(true)); + assertThat(ingestDocument.getSource().size(), equalTo(4)); + assertThat(ingestDocument.getSource().containsKey("list"), equalTo(true)); + Object object = ingestDocument.getSource().get("list"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(2)); + object = list.get(0); + assertThat(object, instanceOf(Map.class)); + @SuppressWarnings("unchecked") + Map map = (Map) object; + assertThat(map.size(), equalTo(0)); + ingestDocument.removeField("list.0"); + assertThat(list.size(), equalTo(1)); + assertThat(list.get(0), nullValue()); + } + + public void testRemoveFieldValueNotFoundNullParent() { + try { + ingestDocument.removeField("fizz.foo_null.not_there"); + fail("get field value should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("cannot remove [not_there] from null as part of path [fizz.foo_null.not_there]")); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); + } + } + + public void testNestedRemoveFieldTypeMismatch() { + try { + ingestDocument.removeField("fizz.1.bar"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("cannot remove [bar] from object of type [java.lang.String] as part of path [fizz.1.bar]")); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); + } + } + + public void testListRemoveFieldIndexNotNumeric() { + try { + ingestDocument.removeField("list.test"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[test] is not an integer, cannot be used as an index as part of path [list.test]")); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); + } + } + + public void testListRemoveFieldIndexOutOfBounds() { + try { + ingestDocument.removeField("list.10"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[10] is out of bounds for array with length [2] as part of path [list.10]")); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); + } } public void testRemoveNullField() { - ingestDocument.removeField(null); - assertThat(ingestDocument.isSourceModified(), equalTo(false)); - assertThat(ingestDocument.getSource().size(), equalTo(3)); + try { + ingestDocument.removeField(null); + fail("remove field should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("path cannot be null nor empty")); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); + } } public void testRemoveEmptyField() { - ingestDocument.removeField(""); - assertThat(ingestDocument.isSourceModified(), equalTo(false)); - assertThat(ingestDocument.getSource().size(), equalTo(3)); + try { + ingestDocument.removeField(""); + fail("remove field should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("path cannot be null nor empty")); + assertThat(ingestDocument.isSourceModified(), equalTo(false)); + } } public void testEqualsAndHashcode() throws Exception { @@ -289,9 +533,11 @@ public class IngestDocumentTests extends ESTestCase { } else { assertThat(ingestDocument, equalTo(otherIngestDocument)); assertThat(otherIngestDocument, equalTo(ingestDocument)); + assertThat(ingestDocument.hashCode(), equalTo(otherIngestDocument.hashCode())); IngestDocument thirdIngestDocument = new IngestDocument(index, type, id, Collections.singletonMap(fieldName, fieldValue)); assertThat(thirdIngestDocument, equalTo(ingestDocument)); assertThat(ingestDocument, equalTo(thirdIngestDocument)); + assertThat(ingestDocument.hashCode(), equalTo(thirdIngestDocument.hashCode())); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java index a3ce7b7aab2..3b80b5a38c6 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java @@ -23,12 +23,12 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public abstract class AbstractStringProcessorTestCase extends ESTestCase { @@ -57,7 +57,7 @@ public abstract class AbstractStringProcessorTestCase extends ESTestCase { } } - public void testNullValue() throws Exception { + public void testFieldNotFound() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); Processor processor = newProcessor(Collections.singletonList(fieldName)); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); @@ -65,7 +65,18 @@ public abstract class AbstractStringProcessorTestCase extends ESTestCase { processor.execute(ingestDocument); fail("processor should have failed"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("field [" + fieldName + "] is null, cannot process it.")); + assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]")); + } + } + + public void testNullValue() throws Exception { + Processor processor = newProcessor(Collections.singletonList("field")); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); + try { + processor.execute(ingestDocument); + fail("processor should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [field] is null, cannot process it.")); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java index 2ae98b163f0..5e9756adf9a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java @@ -24,8 +24,9 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; @@ -44,7 +45,7 @@ public class AddProcessorTests extends ESTestCase { processor.execute(ingestDocument); for (Map.Entry field : fields.entrySet()) { - assertThat(ingestDocument.hasFieldValue(field.getKey()), equalTo(true)); + assertThat(ingestDocument.hasField(field.getKey()), equalTo(true)); assertThat(ingestDocument.getFieldValue(field.getKey(), Object.class), equalTo(field.getValue())); } } @@ -63,7 +64,7 @@ public class AddProcessorTests extends ESTestCase { Processor processor = new AddProcessor(fields); processor.execute(ingestDocument); for (Map.Entry field : fields.entrySet()) { - assertThat(ingestDocument.hasFieldValue(field.getKey()), equalTo(true)); + assertThat(ingestDocument.hasField(field.getKey()), equalTo(true)); assertThat(ingestDocument.getFieldValue(field.getKey(), Object.class), equalTo(field.getValue())); } } @@ -76,7 +77,7 @@ public class AddProcessorTests extends ESTestCase { processor.execute(ingestDocument); fail("processor execute should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("cannot add field to parent [field] of type [java.lang.String], [java.util.Map] expected instead.")); + assertThat(e.getMessage(), equalTo("cannot set [inner] with parent object of type [java.lang.String] as part of path [field.inner]")); } } } \ No newline at end of file diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java index 3b089f05782..85dcf860e20 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java @@ -24,10 +24,10 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.*; -import static org.elasticsearch.ingest.processor.convert.ConvertProcessor.*; +import static org.elasticsearch.ingest.processor.convert.ConvertProcessor.Type; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class ConvertProcessorTests extends ESTestCase { @@ -309,7 +309,7 @@ public class ConvertProcessorTests extends ESTestCase { } } - public void testConvertNullField() throws Exception { + public void testConvertNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); Type type = randomFrom(Type.values()); @@ -319,7 +319,20 @@ public class ConvertProcessorTests extends ESTestCase { processor.execute(ingestDocument); fail("processor execute should have failed"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Field [" + fieldName + "] is null, cannot be converted to type [" + type + "]")); + assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]")); + } + } + + public void testConvertNullField() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); + Type type = randomFrom(Type.values()); + Map convert = Collections.singletonMap("field", type); + Processor processor = new ConvertProcessor(convert); + try { + processor.execute(ingestDocument); + fail("processor execute should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Field [field] is null, cannot be converted to type [" + type + "]")); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java index 0bcb11f58b5..89c77135687 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java @@ -24,13 +24,13 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.regex.Pattern; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class GsubProcessorTests extends ESTestCase { @@ -64,7 +64,7 @@ public class GsubProcessorTests extends ESTestCase { } } - public void testGsubNullValue() throws Exception { + public void testGsubFieldNotFound() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); List gsubExpressions = Collections.singletonList(new GsubExpression(fieldName, Pattern.compile("\\."), "-")); @@ -73,7 +73,19 @@ public class GsubProcessorTests extends ESTestCase { processor.execute(ingestDocument); fail("processor execution should have failed"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("field [" + fieldName + "] is null, cannot match pattern.")); + assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]")); + } + } + + public void testGsubNullValue() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); + List gsubExpressions = Collections.singletonList(new GsubExpression("field", Pattern.compile("\\."), "-")); + Processor processor = new GsubProcessor(gsubExpressions); + try { + processor.execute(ingestDocument); + fail("processor execution should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [field] is null, cannot match pattern.")); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java index a75aac5253a..4df240ece45 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java @@ -24,9 +24,9 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.*; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class JoinProcessorTests extends ESTestCase { @@ -111,7 +111,17 @@ public class JoinProcessorTests extends ESTestCase { try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("field [" + fieldName + "] is null, cannot join.")); + assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]")); + } + } + + public void testJoinNullValue() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); + Processor processor = new JoinProcessor(Collections.singletonMap("field", "-")); + try { + processor.execute(ingestDocument); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [field] is null, cannot join.")); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java index 610b4c8a5cb..50b1ee198f9 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java @@ -24,14 +24,13 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Set; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; public class RemoveProcessorTests extends ESTestCase { @@ -45,15 +44,19 @@ public class RemoveProcessorTests extends ESTestCase { Processor processor = new RemoveProcessor(fields); processor.execute(ingestDocument); for (String field : fields) { - assertThat(ingestDocument.getFieldValue(field, Object.class), nullValue()); - assertThat(ingestDocument.hasFieldValue(field), equalTo(false)); + assertThat(ingestDocument.hasField(field), equalTo(false)); } } public void testRemoveNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); - Processor processor = new RemoveProcessor(Collections.singletonList(RandomDocumentPicks.randomFieldName(random()))); - processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(0)); + String fieldName = RandomDocumentPicks.randomFieldName(random()); + Processor processor = new RemoveProcessor(Collections.singletonList(fieldName)); + try { + processor.execute(ingestDocument); + fail("remove field should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]")); + } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java index 332d3957369..2acd0542b16 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java @@ -70,8 +70,8 @@ public class RenameProcessorTests extends ESTestCase { String newFieldName = RandomDocumentPicks.randomFieldName(random()); Processor processor = new RenameProcessor(Collections.singletonMap(fieldName, newFieldName)); processor.execute(ingestDocument); - assertThat(ingestDocument.hasFieldValue(fieldName), equalTo(false)); - assertThat(ingestDocument.hasFieldValue(newFieldName), equalTo(true)); + assertThat(ingestDocument.hasField(fieldName), equalTo(false)); + assertThat(ingestDocument.hasField(newFieldName), equalTo(true)); assertThat(ingestDocument.getFieldValue(newFieldName, Object.class), nullValue()); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java index 799e9c41c33..a190dddc791 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java @@ -24,9 +24,9 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.*; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class SplitProcessorTests extends ESTestCase { @@ -46,7 +46,7 @@ public class SplitProcessorTests extends ESTestCase { } } - public void testSplitNullValue() throws Exception { + public void testSplitFieldNotFound() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); Map split = Collections.singletonMap(fieldName, "\\."); @@ -55,7 +55,19 @@ public class SplitProcessorTests extends ESTestCase { processor.execute(ingestDocument); fail("split processor should have failed"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("field [" + fieldName + "] is null, cannot split.")); + assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]")); + } + } + + public void testSplitNullValue() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); + Map split = Collections.singletonMap("field", "\\."); + Processor processor = new SplitProcessor(split); + try { + processor.execute(ingestDocument); + fail("split processor should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [field] is null, cannot split.")); } } From fdf4543b8eaa40297a63cf03db99c9193158e84e Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 27 Nov 2015 16:39:41 +0100 Subject: [PATCH 090/347] Renamed `add` processor to `set` processor. This name makes more sense, because if a field already exists it overwrites it. --- docs/plugins/ingest.asciidoc | 6 +++--- .../SetProcessor.java} | 14 ++++++------- .../plugin/ingest/IngestModule.java | 4 ++-- .../SetProcessorFactoryTests.java} | 13 ++++++------ .../SetProcessorTests.java} | 20 +++++++++---------- .../rest-api-spec/test/ingest/20_crud.yaml | 2 +- .../rest-api-spec/test/ingest/60_mutate.yaml | 2 +- .../test/ingest/80_simulate.yaml | 10 +++++----- 8 files changed, 34 insertions(+), 37 deletions(-) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{add/AddProcessor.java => set/SetProcessor.java} (84%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{add/AddProcessorFactoryTests.java => set/SetProcessorFactoryTests.java} (79%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{add/AddProcessorTests.java => set/SetProcessorTests.java} (86%) diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 9c1877f2a49..f5c5af22f8e 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -3,14 +3,14 @@ === Processors -==== Add processor -Adds one or more fields and associates them with the specified values. If a field already exists, +==== Set processor +Sets one or more fields and associates them with the specified values. If a field already exists, its value will be replaced with the provided one. [source,js] -------------------------------------------------- { - "add": { + "set": { "fields": { "field": 582.1 } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/add/AddProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java similarity index 84% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/add/AddProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java index 7bbb33a8f57..1b25b7c5981 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/add/AddProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.add; +package org.elasticsearch.ingest.processor.set; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; @@ -31,13 +31,13 @@ import java.util.Map; * Processor that adds new fields with their corresponding values. If the field is already present, its value * will be replaced with the provided one. */ -public class AddProcessor implements Processor { +public class SetProcessor implements Processor { - public static final String TYPE = "add"; + public static final String TYPE = "set"; private final Map fields; - AddProcessor(Map fields) { + SetProcessor(Map fields) { this.fields = fields; } @@ -57,11 +57,11 @@ public class AddProcessor implements Processor { return TYPE; } - public static final class Factory implements Processor.Factory { + public static final class Factory implements Processor.Factory { @Override - public AddProcessor create(Map config) throws IOException { + public SetProcessor create(Map config) throws IOException { Map fields = ConfigurationUtils.readMap(config, "fields"); - return new AddProcessor(Collections.unmodifiableMap(fields)); + return new SetProcessor(Collections.unmodifiableMap(fields)); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index 75a962ba272..898af1a0f3e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -22,7 +22,7 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.ingest.processor.add.AddProcessor; +import org.elasticsearch.ingest.processor.set.SetProcessor; import org.elasticsearch.ingest.processor.convert.ConvertProcessor; import org.elasticsearch.ingest.processor.date.DateProcessor; import org.elasticsearch.ingest.processor.geoip.GeoIpProcessor; @@ -56,7 +56,7 @@ public class IngestModule extends AbstractModule { addProcessor(GeoIpProcessor.TYPE, new GeoIpProcessor.Factory()); addProcessor(GrokProcessor.TYPE, new GrokProcessor.Factory()); addProcessor(DateProcessor.TYPE, new DateProcessor.Factory()); - addProcessor(AddProcessor.TYPE, new AddProcessor.Factory()); + addProcessor(SetProcessor.TYPE, new SetProcessor.Factory()); addProcessor(RenameProcessor.TYPE, new RenameProcessor.Factory()); addProcessor(RemoveProcessor.TYPE, new RemoveProcessor.Factory()); addProcessor(SplitProcessor.TYPE, new SplitProcessor.Factory()); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java similarity index 79% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java index 8acbed60541..dbe5b875d3f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java @@ -17,9 +17,8 @@ * under the License. */ -package org.elasticsearch.ingest.processor.add; +package org.elasticsearch.ingest.processor.set; -import org.elasticsearch.ingest.processor.join.JoinProcessor; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -29,19 +28,19 @@ import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; -public class AddProcessorFactoryTests extends ESTestCase { +public class SetProcessorFactoryTests extends ESTestCase { public void testCreate() throws IOException { - AddProcessor.Factory factory = new AddProcessor.Factory(); + SetProcessor.Factory factory = new SetProcessor.Factory(); Map config = new HashMap<>(); Map fields = Collections.singletonMap("field1", "value1"); config.put("fields", fields); - AddProcessor addProcessor = factory.create(config); - assertThat(addProcessor.getFields(), equalTo(fields)); + SetProcessor setProcessor = factory.create(config); + assertThat(setProcessor.getFields(), equalTo(fields)); } public void testCreateMissingFields() throws IOException { - AddProcessor.Factory factory = new AddProcessor.Factory(); + SetProcessor.Factory factory = new SetProcessor.Factory(); Map config = new HashMap<>(); try { factory.create(config); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java similarity index 86% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java index 5e9756adf9a..c675c9c51eb 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/add/AddProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java @@ -17,22 +17,20 @@ * under the License. */ -package org.elasticsearch.ingest.processor.add; +package org.elasticsearch.ingest.processor.set; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; +import java.util.*; import static org.hamcrest.Matchers.equalTo; -public class AddProcessorTests extends ESTestCase { +public class SetProcessorTests extends ESTestCase { - public void testAddExistingFields() throws Exception { + public void testSetExistingFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numFields = randomIntBetween(1, 5); Map fields = new HashMap<>(); @@ -41,7 +39,7 @@ public class AddProcessorTests extends ESTestCase { Object fieldValue = RandomDocumentPicks.randomFieldValue(random()); fields.put(fieldName, fieldValue); } - Processor processor = new AddProcessor(fields); + Processor processor = new SetProcessor(fields); processor.execute(ingestDocument); for (Map.Entry field : fields.entrySet()) { @@ -50,7 +48,7 @@ public class AddProcessorTests extends ESTestCase { } } - public void testAddNewFields() throws Exception { + public void testSetNewFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); //used to verify that there are no conflicts between subsequent fields going to be added IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); @@ -61,7 +59,7 @@ public class AddProcessorTests extends ESTestCase { String fieldName = RandomDocumentPicks.addRandomField(random(), testIngestDocument, fieldValue); fields.put(fieldName, fieldValue); } - Processor processor = new AddProcessor(fields); + Processor processor = new SetProcessor(fields); processor.execute(ingestDocument); for (Map.Entry field : fields.entrySet()) { assertThat(ingestDocument.hasField(field.getKey()), equalTo(true)); @@ -69,10 +67,10 @@ public class AddProcessorTests extends ESTestCase { } } - public void testAddFieldsTypeMismatch() throws Exception { + public void testSetFieldsTypeMismatch() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestDocument.setFieldValue("field", "value"); - Processor processor = new AddProcessor(Collections.singletonMap("field.inner", "value")); + Processor processor = new SetProcessor(Collections.singletonMap("field.inner", "value")); try { processor.execute(ingestDocument); fail("processor execute should have failed"); diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml index c76500eda11..cc2cee0c742 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml @@ -12,7 +12,7 @@ "description": "_description", "processors": [ { - "add" : { + "set" : { "fields" : { "field2": "_value" } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml index 850d775fdc9..a6126ddca45 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml @@ -12,7 +12,7 @@ "description": "_description", "processors": [ { - "add" : { + "set" : { "fields" : { "new_field": "new_value" } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml index 9aecdaf0ff4..c84f525af28 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml @@ -12,7 +12,7 @@ "description": "_description", "processors": [ { - "add" : { + "set" : { "fields" : { "field2" : "_value" } @@ -66,7 +66,7 @@ "description": "_description", "processors": [ { - "add" : { + "set" : { "fields" : { "field2" : "_value" } @@ -129,14 +129,14 @@ "description": "_description", "processors": [ { - "add" : { + "set" : { "fields" : { "field2" : "_value" } } }, { - "add" : { + "set" : { "fields" : { "field3" : "third_val" } @@ -157,7 +157,7 @@ } - length: { docs: 1 } - length: { docs.0.processor_results: 2 } - - match: { docs.0.processor_results.0.processor_id: "processor[add]-0" } + - match: { docs.0.processor_results.0.processor_id: "processor[set]-0" } - is_true: docs.0.processor_results.0.doc.modified - length: { docs.0.processor_results.0.doc._source: 2 } - match: { docs.0.processor_results.0.doc._source.foo: "bar" } From dde274d944b2a0b9e0deb051738e715a0630e692 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 30 Nov 2015 15:37:16 +0100 Subject: [PATCH 091/347] Replaced IOException with Exception on factory implementations' `Processor.Factory#create(Map)` method. --- .../ingest/processor/AbstractStringProcessor.java | 3 +-- .../ingest/processor/convert/ConvertProcessor.java | 3 +-- .../elasticsearch/ingest/processor/date/DateProcessor.java | 2 +- .../elasticsearch/ingest/processor/geoip/GeoIpProcessor.java | 2 +- .../elasticsearch/ingest/processor/grok/GrokProcessor.java | 5 ++--- .../elasticsearch/ingest/processor/gsub/GsubProcessor.java | 3 +-- .../elasticsearch/ingest/processor/join/JoinProcessor.java | 3 +-- .../ingest/processor/meta/MetaDataProcessor.java | 3 +-- .../ingest/processor/remove/RemoveProcessor.java | 3 +-- .../ingest/processor/rename/RenameProcessor.java | 3 +-- .../org/elasticsearch/ingest/processor/set/SetProcessor.java | 3 +-- .../elasticsearch/ingest/processor/split/SplitProcessor.java | 3 +-- 12 files changed, 13 insertions(+), 23 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java index 00cbe73e508..21d2c6c7d14 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; -import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -58,7 +57,7 @@ public abstract class AbstractStringProcessor implements Processor { public static abstract class Factory implements Processor.Factory { @Override - public T create(Map config) throws IOException { + public T create(Map config) throws Exception { List fields = ConfigurationUtils.readList(config, "fields"); return newProcessor(Collections.unmodifiableList(fields)); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java index 769ecb90a4b..d86fea0485f 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.io.IOException; import java.util.*; /** @@ -129,7 +128,7 @@ public class ConvertProcessor implements Processor { public static class Factory implements Processor.Factory { @Override - public ConvertProcessor create(Map config) throws IOException { + public ConvertProcessor create(Map config) throws Exception { Map fields = ConfigurationUtils.readMap(config, "fields"); Map convertFields = new HashMap<>(); for (Map.Entry entry : fields.entrySet()) { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java index a7cd3eb4fbe..35fcc044f42 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java @@ -111,7 +111,7 @@ public final class DateProcessor implements Processor { public static class Factory implements Processor.Factory { @SuppressWarnings("unchecked") - public DateProcessor create(Map config) { + public DateProcessor create(Map config) throws Exception { String matchField = ConfigurationUtils.readStringProperty(config, "match_field"); String targetField = ConfigurationUtils.readStringProperty(config, "target_field", DEFAULT_TARGET_FIELD); String timezoneString = ConfigurationUtils.readOptionalStringProperty(config, "timezone"); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index 5187f731494..eb90c4acfa5 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -220,7 +220,7 @@ public final class GeoIpProcessor implements Processor { private Path geoIpConfigDirectory; private final DatabaseReaderService databaseReaderService = new DatabaseReaderService(); - public GeoIpProcessor create(Map config) throws IOException { + public GeoIpProcessor create(Map config) throws Exception { String ipField = readStringProperty(config, "source_field"); String targetField = readStringProperty(config, "target_field", "geoip"); String databaseFile = readStringProperty(config, "database_file", "GeoLite2-City.mmdb"); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index c2cc009ef81..f78db7e2d8e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.io.IOException; import java.io.InputStream; import java.nio.file.DirectoryStream; import java.nio.file.Files; @@ -39,7 +38,7 @@ public final class GrokProcessor implements Processor { private final String matchField; private final Grok grok; - public GrokProcessor(Grok grok, String matchField) throws IOException { + public GrokProcessor(Grok grok, String matchField) { this.matchField = matchField; this.grok = grok; } @@ -72,7 +71,7 @@ public final class GrokProcessor implements Processor { public static class Factory implements Processor.Factory { private Path grokConfigDirectory; - public GrokProcessor create(Map config) throws IOException { + public GrokProcessor create(Map config) throws Exception { String matchField = ConfigurationUtils.readStringProperty(config, "field"); String matchPattern = ConfigurationUtils.readStringProperty(config, "pattern"); Map patternBank = new HashMap<>(); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java index bbbb7b9cfd4..8eb6c7dbc89 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -68,7 +67,7 @@ public class GsubProcessor implements Processor { public static class Factory implements Processor.Factory { @Override - public GsubProcessor create(Map config) throws IOException { + public GsubProcessor create(Map config) throws Exception { List> gsubConfig = ConfigurationUtils.readList(config, "expressions"); List gsubExpressions = new ArrayList<>(); for (Map stringObjectMap : gsubConfig) { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java index a30b43ebde7..40016b489fd 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; @@ -68,7 +67,7 @@ public class JoinProcessor implements Processor { public static class Factory implements Processor.Factory { @Override - public JoinProcessor create(Map config) throws IOException { + public JoinProcessor create(Map config) throws Exception { Map fields = ConfigurationUtils.readMap(config, "fields"); return new JoinProcessor(Collections.unmodifiableMap(fields)); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java index e2b3b31498d..1abdd995662 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java @@ -8,7 +8,6 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.IngestDocument.MetaData; import org.elasticsearch.ingest.processor.Processor; -import java.io.IOException; import java.io.StringWriter; import java.util.Collections; import java.util.HashMap; @@ -49,7 +48,7 @@ public final class MetaDataProcessor implements Processor { private final MustacheFactory mustacheFactory = new DefaultMustacheFactory(); @Override - public MetaDataProcessor create(Map config) throws IOException { + public MetaDataProcessor create(Map config) throws Exception { Map templates = new HashMap<>(); Iterator> iterator = config.entrySet().iterator(); while (iterator.hasNext()) { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java index 41aba178741..f04c407b14a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -60,7 +59,7 @@ public class RemoveProcessor implements Processor { public static class Factory implements Processor.Factory { @Override - public RemoveProcessor create(Map config) throws IOException { + public RemoveProcessor create(Map config) throws Exception { List fields = ConfigurationUtils.readList(config, "fields"); return new RemoveProcessor(Collections.unmodifiableList(fields)); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java index 479a4941879..f9d729c954d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.io.IOException; import java.util.Collections; import java.util.Map; @@ -65,7 +64,7 @@ public class RenameProcessor implements Processor { public static class Factory implements Processor.Factory { @Override - public RenameProcessor create(Map config) throws IOException { + public RenameProcessor create(Map config) throws Exception { Map fields = ConfigurationUtils.readMap(config, "fields"); return new RenameProcessor(Collections.unmodifiableMap(fields)); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java index 1b25b7c5981..f46fe5052f5 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.io.IOException; import java.util.Collections; import java.util.Map; @@ -59,7 +58,7 @@ public class SetProcessor implements Processor { public static final class Factory implements Processor.Factory { @Override - public SetProcessor create(Map config) throws IOException { + public SetProcessor create(Map config) throws Exception { Map fields = ConfigurationUtils.readMap(config, "fields"); return new SetProcessor(Collections.unmodifiableMap(fields)); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java index 92e87cc0628..6d9dea24947 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.Map; @@ -65,7 +64,7 @@ public class SplitProcessor implements Processor { public static class Factory implements Processor.Factory { @Override - public SplitProcessor create(Map config) throws IOException { + public SplitProcessor create(Map config) throws Exception { Map fields = ConfigurationUtils.readMap(config, "fields"); return new SplitProcessor(Collections.unmodifiableMap(fields)); } From 4402da1af0b70a361cab6d415aa5e8d260ea74ad Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 30 Nov 2015 15:45:40 +0100 Subject: [PATCH 092/347] also change the tests to deal with Exception instead of IOException --- .../convert/ConvertProcessorFactoryTests.java | 7 +++---- .../processor/gsub/GsubProcessorFactoryTests.java | 12 +++++------- .../processor/join/JoinProcessorFactoryTests.java | 6 ++---- .../lowercase/LowercaseProcessorFactoryTests.java | 5 ++--- .../remove/RemoveProcessorFactoryTests.java | 6 ++---- .../rename/RenameProcessorFactoryTests.java | 6 ++---- .../processor/set/SetProcessorFactoryTests.java | 5 ++--- .../processor/split/SplitProcessorFactoryTests.java | 5 ++--- .../processor/trim/TrimProcessorFactoryTests.java | 6 ++---- .../uppercase/UppercaseProcessorFactoryTests.java | 5 ++--- 10 files changed, 24 insertions(+), 39 deletions(-) diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java index ae20d647df9..108d7104a0c 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.ingest.processor.convert; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; -import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -31,7 +30,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class ConvertProcessorFactoryTests extends ESTestCase { - public void testCreate() throws IOException { + public void testCreate() throws Exception { ConvertProcessor.Factory factory = new ConvertProcessor.Factory(); Map config = new HashMap<>(); ConvertProcessor.Type type = randomFrom(ConvertProcessor.Type.values()); @@ -42,7 +41,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { assertThat(convertProcessor.getFields().get("field1"), equalTo(type)); } - public void testCreateMissingFields() throws IOException { + public void testCreateMissingFields() throws Exception { ConvertProcessor.Factory factory = new ConvertProcessor.Factory(); Map config = new HashMap<>(); try { @@ -53,7 +52,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { } } - public void testCreateUnsupportedType() throws IOException { + public void testCreateUnsupportedType() throws Exception { ConvertProcessor.Factory factory = new ConvertProcessor.Factory(); Map config = new HashMap<>(); String type = "type-" + randomAsciiOfLengthBetween(1, 10); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java index a66dbb2519c..c032a19347a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java @@ -19,10 +19,8 @@ package org.elasticsearch.ingest.processor.gsub; -import org.elasticsearch.ingest.processor.join.JoinProcessor; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -32,7 +30,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class GsubProcessorFactoryTests extends ESTestCase { - public void testCreate() throws IOException { + public void testCreate() throws Exception { GsubProcessor.Factory factory = new GsubProcessor.Factory(); Map config = new HashMap<>(); List> expressions = new ArrayList<>(); @@ -50,7 +48,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { assertThat(gsubExpression.getReplacement(), equalTo("-")); } - public void testCreateMissingExpressions() throws IOException { + public void testCreateMissingExpressions() throws Exception { GsubProcessor.Factory factory = new GsubProcessor.Factory(); Map config = new HashMap<>(); try { @@ -61,7 +59,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { } } - public void testCreateNoFieldPresent() throws IOException { + public void testCreateNoFieldPresent() throws Exception { GsubProcessor.Factory factory = new GsubProcessor.Factory(); Map config = new HashMap<>(); List> expressions = new ArrayList<>(); @@ -78,7 +76,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { } } - public void testCreateNoPatternPresent() throws IOException { + public void testCreateNoPatternPresent() throws Exception { GsubProcessor.Factory factory = new GsubProcessor.Factory(); Map config = new HashMap<>(); List> expressions = new ArrayList<>(); @@ -95,7 +93,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { } } - public void testCreateNoReplacementPresent() throws IOException { + public void testCreateNoReplacementPresent() throws Exception { GsubProcessor.Factory factory = new GsubProcessor.Factory(); Map config = new HashMap<>(); List> expressions = new ArrayList<>(); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java index 9b7aa52b8cd..8ad05eec6f8 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java @@ -19,10 +19,8 @@ package org.elasticsearch.ingest.processor.join; -import org.elasticsearch.ingest.processor.split.SplitProcessor; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -31,7 +29,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class JoinProcessorFactoryTests extends ESTestCase { - public void testCreate() throws IOException { + public void testCreate() throws Exception { JoinProcessor.Factory factory = new JoinProcessor.Factory(); Map config = new HashMap<>(); Map fields = Collections.singletonMap("field1", "-"); @@ -40,7 +38,7 @@ public class JoinProcessorFactoryTests extends ESTestCase { assertThat(joinProcessor.getFields(), equalTo(fields)); } - public void testCreateMissingFields() throws IOException { + public void testCreateMissingFields() throws Exception { JoinProcessor.Factory factory = new JoinProcessor.Factory(); Map config = new HashMap<>(); try { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java index 2c52eaf5572..2a18eddf64c 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest.processor.lowercase; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -31,7 +30,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class LowercaseProcessorFactoryTests extends ESTestCase { - public void testCreate() throws IOException { + public void testCreate() throws Exception { LowercaseProcessor.Factory factory = new LowercaseProcessor.Factory(); Map config = new HashMap<>(); List fields = Collections.singletonList("field1"); @@ -40,7 +39,7 @@ public class LowercaseProcessorFactoryTests extends ESTestCase { assertThat(uppercaseProcessor.getFields(), equalTo(fields)); } - public void testCreateMissingFields() throws IOException { + public void testCreateMissingFields() throws Exception { LowercaseProcessor.Factory factory = new LowercaseProcessor.Factory(); Map config = new HashMap<>(); try { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java index 2c6b0ca303c..27933ea66e3 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java @@ -19,10 +19,8 @@ package org.elasticsearch.ingest.processor.remove; -import org.elasticsearch.ingest.processor.join.JoinProcessor; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -32,7 +30,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class RemoveProcessorFactoryTests extends ESTestCase { - public void testCreate() throws IOException { + public void testCreate() throws Exception { RemoveProcessor.Factory factory = new RemoveProcessor.Factory(); Map config = new HashMap<>(); List fields = Collections.singletonList("field1"); @@ -41,7 +39,7 @@ public class RemoveProcessorFactoryTests extends ESTestCase { assertThat(removeProcessor.getFields(), equalTo(fields)); } - public void testCreateMissingFields() throws IOException { + public void testCreateMissingFields() throws Exception { RemoveProcessor.Factory factory = new RemoveProcessor.Factory(); Map config = new HashMap<>(); try { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java index ca8627cf67d..d858b4aea07 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java @@ -19,10 +19,8 @@ package org.elasticsearch.ingest.processor.rename; -import org.elasticsearch.ingest.processor.join.JoinProcessor; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -31,7 +29,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class RenameProcessorFactoryTests extends ESTestCase { - public void testCreate() throws IOException { + public void testCreate() throws Exception { RenameProcessor.Factory factory = new RenameProcessor.Factory(); Map config = new HashMap<>(); Map fields = Collections.singletonMap("field1", "value1"); @@ -40,7 +38,7 @@ public class RenameProcessorFactoryTests extends ESTestCase { assertThat(renameProcessor.getFields(), equalTo(fields)); } - public void testCreateMissingFields() throws IOException { + public void testCreateMissingFields() throws Exception { RenameProcessor.Factory factory = new RenameProcessor.Factory(); Map config = new HashMap<>(); try { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java index dbe5b875d3f..1f3c345b1db 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest.processor.set; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -30,7 +29,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class SetProcessorFactoryTests extends ESTestCase { - public void testCreate() throws IOException { + public void testCreate() throws Exception { SetProcessor.Factory factory = new SetProcessor.Factory(); Map config = new HashMap<>(); Map fields = Collections.singletonMap("field1", "value1"); @@ -39,7 +38,7 @@ public class SetProcessorFactoryTests extends ESTestCase { assertThat(setProcessor.getFields(), equalTo(fields)); } - public void testCreateMissingFields() throws IOException { + public void testCreateMissingFields() throws Exception { SetProcessor.Factory factory = new SetProcessor.Factory(); Map config = new HashMap<>(); try { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java index e0ca9b34d4f..e1c80859ac0 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest.processor.split; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -30,7 +29,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class SplitProcessorFactoryTests extends ESTestCase { - public void testCreate() throws IOException { + public void testCreate() throws Exception { SplitProcessor.Factory factory = new SplitProcessor.Factory(); Map config = new HashMap<>(); Map fields = Collections.singletonMap("field1", "\\."); @@ -39,7 +38,7 @@ public class SplitProcessorFactoryTests extends ESTestCase { assertThat(splitProcessor.getFields(), equalTo(fields)); } - public void testCreateMissingFields() throws IOException { + public void testCreateMissingFields() throws Exception { SplitProcessor.Factory factory = new SplitProcessor.Factory(); Map config = new HashMap<>(); try { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java index b955a929fe7..2475b04db77 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java @@ -19,10 +19,8 @@ package org.elasticsearch.ingest.processor.trim; -import org.elasticsearch.ingest.processor.lowercase.LowercaseProcessor; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -32,7 +30,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class TrimProcessorFactoryTests extends ESTestCase { - public void testCreate() throws IOException { + public void testCreate() throws Exception { TrimProcessor.Factory factory = new TrimProcessor.Factory(); Map config = new HashMap<>(); List fields = Collections.singletonList("field1"); @@ -41,7 +39,7 @@ public class TrimProcessorFactoryTests extends ESTestCase { assertThat(uppercaseProcessor.getFields(), equalTo(fields)); } - public void testCreateMissingFields() throws IOException { + public void testCreateMissingFields() throws Exception { TrimProcessor.Factory factory = new TrimProcessor.Factory(); Map config = new HashMap<>(); try { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java index 822921ea16c..ec38b65d86a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest.processor.uppercase; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -31,7 +30,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class UppercaseProcessorFactoryTests extends ESTestCase { - public void testCreate() throws IOException { + public void testCreate() throws Exception { UppercaseProcessor.Factory factory = new UppercaseProcessor.Factory(); Map config = new HashMap<>(); List fields = Collections.singletonList("field1"); @@ -40,7 +39,7 @@ public class UppercaseProcessorFactoryTests extends ESTestCase { assertThat(uppercaseProcessor.getFields(), equalTo(fields)); } - public void testCreateMissingFields() throws IOException { + public void testCreateMissingFields() throws Exception { UppercaseProcessor.Factory factory = new UppercaseProcessor.Factory(); Map config = new HashMap<>(); try { From 99a42953307b6e64fed3f2cac7cb7273fd9d3994 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 30 Nov 2015 14:48:54 +0100 Subject: [PATCH 093/347] If a list or map value gets set on ingest document a deep copy needs to be made. If this is not done this can lead to processor configuration being changed by an bulk or index request. --- .../elasticsearch/ingest/IngestDocument.java | 30 +++++++++++ .../ingest/IngestDocumentTests.java | 11 ++++ .../elasticsearch/ingest/PipelineTests.java | 52 +++++++++++++++++++ .../ingest/RandomDocumentPicks.java | 2 +- 4 files changed, 94 insertions(+), 1 deletion(-) create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 5629e25e4b8..fbb68ab812b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -20,6 +20,8 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.HppcMaps; +import org.elasticsearch.search.aggregations.support.format.ValueParser; import java.util.*; @@ -258,6 +260,8 @@ public final class IngestDocument { String[] pathElements = Strings.splitStringToArray(path, '.'); assert pathElements.length > 0; + value = deepCopy(value); + Object context = source; for (int i = 0; i < pathElements.length - 1; i++) { String pathElement = pathElements[i]; @@ -354,6 +358,32 @@ public final class IngestDocument { return sourceModified; } + static Object deepCopy(Object value) { + if (value instanceof Map) { + @SuppressWarnings("unchecked") + Map mapValue = (Map) value; + Map copy = new HashMap<>(mapValue.size()); + for (Map.Entry entry : mapValue.entrySet()) { + copy.put(entry.getKey(), deepCopy(entry.getValue())); + } + return copy; + } else if (value instanceof List) { + @SuppressWarnings("unchecked") + List listValue = (List) value; + List copy = new ArrayList<>(listValue.size()); + for (Object itemValue : listValue) { + copy.add(deepCopy(itemValue)); + } + return copy; + } else if (value == null || value instanceof String || value instanceof Integer || + value instanceof Long || value instanceof Float || + value instanceof Double || value instanceof Boolean) { + return value; + } else { + throw new IllegalArgumentException("unexpected value type [" + value.getClass() + "]"); + } + } + @Override public boolean equals(Object obj) { if (obj == this) { return true; } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index fb5966e76db..b3bd8aaf827 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest; +import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -540,4 +541,14 @@ public class IngestDocumentTests extends ESTestCase { assertThat(ingestDocument.hashCode(), equalTo(thirdIngestDocument.hashCode())); } } + + public void testDeepCopy() { + int iterations = scaledRandomIntBetween(8, 64); + for (int i = 0; i < iterations; i++) { + Map map = RandomDocumentPicks.randomDocument(random()); + Object copy = IngestDocument.deepCopy(map); + assertThat("iteration: " + i, copy, equalTo(map)); + assertThat("iteration: " + i, copy, not(sameInstance(map))); + } + } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java new file mode 100644 index 00000000000..ae23df8e71d --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java @@ -0,0 +1,52 @@ +package org.elasticsearch.ingest; + +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.processor.set.SetProcessor; +import org.elasticsearch.ingest.processor.remove.RemoveProcessor; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +public class PipelineTests extends ESTestCase { + + public void testProcessorSettingsRemainUntouched() throws Exception { + Map subField = new HashMap<>(); + subField.put("_subfield", "value"); + Map fieldSettings = new HashMap<>(); + fieldSettings.put("_field", subField); + Map addSettings = new HashMap<>(); + addSettings.put("fields", fieldSettings); + Map removeSettings = new HashMap<>(); + removeSettings.put("fields", Collections.singletonList("_field._subfield")); + Pipeline pipeline = createPipeline(processorConfig(SetProcessor.TYPE, addSettings), processorConfig(RemoveProcessor.TYPE, removeSettings)); + + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", new HashMap<>()); + pipeline.execute(ingestDocument); + + assertThat(ingestDocument.getSource().get("_field"), Matchers.notNullValue()); + assertThat(((Map) ingestDocument.getSource().get("_field")).get("_subfield"), Matchers.nullValue()); + assertThat(((Map) fieldSettings.get("_field")).get("_subfield"), Matchers.equalTo("value")); + } + + private Pipeline createPipeline(Map... processorConfigs) throws Exception { + Map config = new HashMap<>(); + config.put("processors", Arrays.asList(processorConfigs)); + Map factoryRegistry = new HashMap<>(); + factoryRegistry.put(SetProcessor.TYPE, new SetProcessor.Factory()); + factoryRegistry.put(RemoveProcessor.TYPE, new RemoveProcessor.Factory()); + Pipeline.Factory factory = new Pipeline.Factory(); + return factory.create("_id", config, factoryRegistry); + } + + private Map processorConfig(String type, Map settings) { + Map processorConfig = new HashMap<>(); + processorConfig.put(type, settings); + return processorConfig; + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java index 0443dd0b68f..e83a66a38d6 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java @@ -135,7 +135,7 @@ public final class RandomDocumentPicks { return new IngestDocument(index, type, id, document); } - private static Map randomDocument(Random random) { + public static Map randomDocument(Random random) { Map document = new HashMap<>(); addRandomFields(random, document, 0); return document; From fa9fcb3b116ee8d522b1c5a4100fb6ecc401b8ff Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 1 Dec 2015 17:12:34 +0100 Subject: [PATCH 094/347] geo processor should add a list of doubles instead of an array to the ingest document --- .../elasticsearch/ingest/processor/geoip/GeoIpProcessor.java | 2 +- .../ingest/processor/geoip/GeoIpProcessorTests.java | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index eb90c4acfa5..bbdc1154f13 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -165,7 +165,7 @@ public final class GeoIpProcessor implements Processor { break; case LOCATION: if (location.getLatitude() != null && location.getLongitude() != null) { - geoData.put("location", new double[]{location.getLongitude(), location.getLatitude()}); + geoData.put("location", Arrays.asList(location.getLongitude(), location.getLatitude())); } break; } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java index 86de0a7862a..8064240c317 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.test.ESTestCase; import java.io.InputStream; +import java.util.Arrays; import java.util.EnumSet; import java.util.HashMap; import java.util.Map; @@ -55,7 +56,7 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(geoData.get("timezone"), equalTo("Europe/Amsterdam")); assertThat(geoData.get("latitude"), equalTo(52.374)); assertThat(geoData.get("longitude"), equalTo(4.8897)); - assertThat(geoData.get("location"), equalTo(new double[]{4.8897, 52.374})); + assertThat(geoData.get("location"), equalTo(Arrays.asList(4.8897d, 52.374d))); } public void testCountry() throws Exception { From 9dd52ad7d39e2dc04ec9bd34f72cbdcf6bc65b18 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 30 Nov 2015 18:11:23 +0100 Subject: [PATCH 095/347] Removed pollution from the Processor.Factory interface. 1) It no longer extends from Closeable. 2) Removed the config directory setter. Implementation that relied on it, now get the location to the config dir via their constructors. --- .../ingest/processor/Processor.java | 11 +--- .../processor/geoip/GeoIpProcessor.java | 14 ++--- .../ingest/processor/grok/GrokProcessor.java | 11 ++-- .../plugin/ingest/IngestModule.java | 41 +++++++------ .../plugin/ingest/PipelineStore.java | 61 ++++++++++++++----- .../geoip/GeoIpProcessorFactoryTests.java | 19 ++---- .../grok/GrokProcessorFactoryTests.java | 3 +- 7 files changed, 88 insertions(+), 72 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java index 1795d2f5c70..d9c788ba21e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java @@ -46,7 +46,7 @@ public interface Processor { /** * A factory that knows how to construct a processor based on a map of maps. */ - interface Factory

extends Closeable { + interface Factory

{ /** * Creates a processor based on the specified map of maps config. @@ -56,14 +56,5 @@ public interface Processor { */ P create(Map config) throws Exception; - /** - * Sets the configuration directory when needed to read additional config files - */ - default void setConfigDirectory(Path configDirectory) { - } - - @Override - default void close() throws IOException { - } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index bbdc1154f13..6fd70cf7828 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.Processor; +import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.net.InetAddress; @@ -211,15 +212,19 @@ public final class GeoIpProcessor implements Processor { return geoData; } - public static class Factory implements Processor.Factory { + public static class Factory implements Processor.Factory, Closeable { static final Set DEFAULT_FIELDS = EnumSet.of( Field.CONTINENT_NAME, Field.COUNTRY_ISO_CODE, Field.REGION_NAME, Field.CITY_NAME, Field.LOCATION ); - private Path geoIpConfigDirectory; + private final Path geoIpConfigDirectory; private final DatabaseReaderService databaseReaderService = new DatabaseReaderService(); + public Factory(Path configDirectory) { + this.geoIpConfigDirectory = configDirectory.resolve("ingest").resolve("geoip"); + } + public GeoIpProcessor create(Map config) throws Exception { String ipField = readStringProperty(config, "source_field"); String targetField = readStringProperty(config, "target_field", "geoip"); @@ -250,11 +255,6 @@ public final class GeoIpProcessor implements Processor { } } - @Override - public void setConfigDirectory(Path configDirectory) { - geoIpConfigDirectory = configDirectory.resolve("ingest").resolve("geoip"); - } - @Override public void close() throws IOException { databaseReaderService.close(); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index f78db7e2d8e..320303d2cfa 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -69,7 +69,12 @@ public final class GrokProcessor implements Processor { } public static class Factory implements Processor.Factory { - private Path grokConfigDirectory; + + private final Path grokConfigDirectory; + + public Factory(Path configDirectory) { + this.grokConfigDirectory = configDirectory.resolve("ingest").resolve("grok"); + } public GrokProcessor create(Map config) throws Exception { String matchField = ConfigurationUtils.readStringProperty(config, "field"); @@ -90,10 +95,6 @@ public final class GrokProcessor implements Processor { return new GrokProcessor(grok, matchField); } - @Override - public void setConfigDirectory(Path configDirectory) { - this.grokConfigDirectory = configDirectory.resolve("ingest").resolve("grok"); - } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index 898af1a0f3e..5c6961b8670 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -21,7 +21,6 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; -import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.ingest.processor.set.SetProcessor; import org.elasticsearch.ingest.processor.convert.ConvertProcessor; import org.elasticsearch.ingest.processor.date.DateProcessor; @@ -42,9 +41,11 @@ import org.elasticsearch.plugin.ingest.transport.simulate.SimulateExecutionServi import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.plugin.ingest.PipelineStore.ProcessorFactoryProvider; + public class IngestModule extends AbstractModule { - private final Map processors = new HashMap<>(); + private final Map processorFactoryProviders = new HashMap<>(); @Override protected void configure() { @@ -53,23 +54,23 @@ public class IngestModule extends AbstractModule { binder().bind(PipelineStore.class).asEagerSingleton(); binder().bind(SimulateExecutionService.class).asEagerSingleton(); - addProcessor(GeoIpProcessor.TYPE, new GeoIpProcessor.Factory()); - addProcessor(GrokProcessor.TYPE, new GrokProcessor.Factory()); - addProcessor(DateProcessor.TYPE, new DateProcessor.Factory()); - addProcessor(SetProcessor.TYPE, new SetProcessor.Factory()); - addProcessor(RenameProcessor.TYPE, new RenameProcessor.Factory()); - addProcessor(RemoveProcessor.TYPE, new RemoveProcessor.Factory()); - addProcessor(SplitProcessor.TYPE, new SplitProcessor.Factory()); - addProcessor(JoinProcessor.TYPE, new JoinProcessor.Factory()); - addProcessor(UppercaseProcessor.TYPE, new UppercaseProcessor.Factory()); - addProcessor(LowercaseProcessor.TYPE, new LowercaseProcessor.Factory()); - addProcessor(TrimProcessor.TYPE, new TrimProcessor.Factory()); - addProcessor(ConvertProcessor.TYPE, new ConvertProcessor.Factory()); - addProcessor(GsubProcessor.TYPE, new GsubProcessor.Factory()); - addProcessor(MetaDataProcessor.TYPE, new MetaDataProcessor.Factory()); + addProcessor(GeoIpProcessor.TYPE, environment -> new GeoIpProcessor.Factory(environment.configFile())); + addProcessor(GrokProcessor.TYPE, environment -> new GrokProcessor.Factory(environment.configFile())); + addProcessor(DateProcessor.TYPE, environment -> new DateProcessor.Factory()); + addProcessor(SetProcessor.TYPE, environment -> new SetProcessor.Factory()); + addProcessor(RenameProcessor.TYPE, environment -> new RenameProcessor.Factory()); + addProcessor(RemoveProcessor.TYPE, environment -> new RemoveProcessor.Factory()); + addProcessor(SplitProcessor.TYPE, environment -> new SplitProcessor.Factory()); + addProcessor(JoinProcessor.TYPE, environment -> new JoinProcessor.Factory()); + addProcessor(UppercaseProcessor.TYPE, environment -> new UppercaseProcessor.Factory()); + addProcessor(LowercaseProcessor.TYPE, environment -> new LowercaseProcessor.Factory()); + addProcessor(TrimProcessor.TYPE, environment -> new TrimProcessor.Factory()); + addProcessor(ConvertProcessor.TYPE, environment -> new ConvertProcessor.Factory()); + addProcessor(GsubProcessor.TYPE, environment -> new GsubProcessor.Factory()); + addProcessor(MetaDataProcessor.TYPE, environment -> new MetaDataProcessor.Factory()); - MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, Processor.Factory.class); - for (Map.Entry entry : processors.entrySet()) { + MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, ProcessorFactoryProvider.class); + for (Map.Entry entry : processorFactoryProviders.entrySet()) { mapBinder.addBinding(entry.getKey()).toInstance(entry.getValue()); } } @@ -77,8 +78,8 @@ public class IngestModule extends AbstractModule { /** * Adds a processor factory under a specific type name. */ - public void addProcessor(String type, Processor.Factory factory) { - processors.put(type, factory); + public void addProcessor(String type, ProcessorFactoryProvider processorFactoryProvider) { + processorFactoryProviders.put(type, processorFactoryProvider); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index 133b40b9cc7..b3e30b51ff9 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -36,6 +36,8 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.common.SearchScrollIterator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Provider; @@ -54,10 +56,11 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.threadpool.ThreadPool; +import java.io.Closeable; import java.io.IOException; import java.util.*; -public class PipelineStore extends AbstractComponent { +public class PipelineStore extends AbstractLifecycleComponent { public final static String INDEX = ".ingest"; public final static String TYPE = "pipeline"; @@ -74,29 +77,44 @@ public class PipelineStore extends AbstractComponent { private volatile Map pipelines = new HashMap<>(); @Inject - public PipelineStore(Settings settings, Provider clientProvider, ThreadPool threadPool, Environment environment, ClusterService clusterService, Map processors) { + public PipelineStore(Settings settings, Provider clientProvider, ThreadPool threadPool, Environment environment, ClusterService clusterService, Map processorFactoryProviders) { super(settings); this.threadPool = threadPool; this.clusterService = clusterService; this.clientProvider = clientProvider; this.scrollTimeout = settings.getAsTime("ingest.pipeline.store.scroll.timeout", TimeValue.timeValueSeconds(30)); this.pipelineUpdateInterval = settings.getAsTime("ingest.pipeline.store.update.interval", TimeValue.timeValueSeconds(1)); - for (Processor.Factory factory : processors.values()) { - factory.setConfigDirectory(environment.configFile()); + Map processorFactories = new HashMap<>(); + for (Map.Entry entry : processorFactoryProviders.entrySet()) { + Processor.Factory processorFactory = entry.getValue().get(environment); + processorFactories.put(entry.getKey(), processorFactory); } - this.processorFactoryRegistry = Collections.unmodifiableMap(processors); + this.processorFactoryRegistry = Collections.unmodifiableMap(processorFactories); clusterService.add(new PipelineStoreListener()); - clusterService.addLifecycleListener(new LifecycleListener() { - @Override - public void beforeClose() { - // Ideally we would implement Closeable, but when a node is stopped this doesn't get invoked: - try { - IOUtils.close(processorFactoryRegistry.values()); - } catch (IOException e) { - throw new RuntimeException(e); - } + } + + @Override + protected void doStart() { + } + + @Override + protected void doStop() { + } + + @Override + protected void doClose() { + // TODO: When org.elasticsearch.node.Node can close Closable instances we should remove this code + List closeables = new ArrayList<>(); + for (Processor.Factory factory : processorFactoryRegistry.values()) { + if (factory instanceof Closeable) { + closeables.add((Closeable) factory); } - }); + } + try { + IOUtils.close(closeables); + } catch (IOException e) { + throw new RuntimeException(e); + } } /** @@ -239,6 +257,19 @@ public class PipelineStore extends AbstractComponent { return client; } + /** + * The ingest framework (pipeline, processor and processor factory) can't rely on ES specific code. However some + * processors rely on reading files from the config directory. We can't add Environment as a constructor parameter, + * so we need some code that provides the physical location of the configuration directory to the processor factories + * that need this and this is what this processor factory provider does. + */ + @FunctionalInterface + interface ProcessorFactoryProvider { + + Processor.Factory get(Environment environment); + + } + class Updater implements Runnable { @Override diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java index 010b0ede5c1..d4feeff886e 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.test.StreamsUtils; import org.junit.Before; import java.io.ByteArrayInputStream; -import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.*; @@ -47,8 +46,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { } public void testBuild_defaults() throws Exception { - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); - factory.setConfigDirectory(configDir); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(configDir); Map config = new HashMap<>(); config.put("source_field", "_field"); @@ -61,8 +59,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { } public void testBuild_targetField() throws Exception { - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); - factory.setConfigDirectory(configDir); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(configDir); Map config = new HashMap<>(); config.put("source_field", "_field"); config.put("target_field", "_field"); @@ -72,8 +69,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { } public void testBuild_dbFile() throws Exception { - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); - factory.setConfigDirectory(configDir); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(configDir); Map config = new HashMap<>(); config.put("source_field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb"); @@ -84,8 +80,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { } public void testBuild_nonExistingDbFile() throws Exception { - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); - factory.setConfigDirectory(configDir); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(configDir); Map config = new HashMap<>(); config.put("source_field", "_field"); @@ -98,8 +93,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { } public void testBuild_fields() throws Exception { - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); - factory.setConfigDirectory(configDir); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(configDir); Set fields = EnumSet.noneOf(GeoIpProcessor.Field.class); List fieldNames = new ArrayList<>(); @@ -118,8 +112,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { } public void testBuild_illegalFieldOption() throws Exception { - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(); - factory.setConfigDirectory(configDir); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(configDir); Map config = new HashMap<>(); config.put("source_field", "_field"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java index 9291c1bb04a..39430fe24bc 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java @@ -43,8 +43,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { } public void testBuild() throws Exception { - GrokProcessor.Factory factory = new GrokProcessor.Factory(); - factory.setConfigDirectory(configDir); + GrokProcessor.Factory factory = new GrokProcessor.Factory(configDir); Map config = new HashMap<>(); config.put("field", "_field"); From 2c1effdd4122fa469778666b438493c935b1eb88 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Mon, 30 Nov 2015 16:22:04 -0800 Subject: [PATCH 096/347] throw exception when grok processor does not match --- .../ingest/processor/grok/GrokProcessor.java | 15 ++-- .../processor/grok/GrokProcessorTests.java | 83 +++++++++++++++++++ .../ingest/processor/grok/GrokTests.java | 7 ++ 3 files changed, 97 insertions(+), 8 deletions(-) create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index f78db7e2d8e..420bd876a96 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -44,14 +44,13 @@ public final class GrokProcessor implements Processor { } @Override - public void execute(IngestDocument ingestDocument) { - Object field = ingestDocument.getFieldValue(matchField, Object.class); - // TODO(talevy): handle invalid field types - if (field instanceof String) { - Map matches = grok.captures((String) field); - if (matches != null) { - matches.forEach((k, v) -> ingestDocument.setFieldValue(k, v)); - } + public void execute(IngestDocument ingestDocument) throws Exception { + String fieldValue = ingestDocument.getFieldValue(matchField, String.class); + Map matches = grok.captures(fieldValue); + if (matches != null) { + matches.forEach((k, v) -> ingestDocument.setFieldValue(k, v)); + } else { + throw new IllegalArgumentException("Grok expression does not match field value: [" + fieldValue + "]"); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java new file mode 100644 index 00000000000..5afd15490dc --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.grok; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.test.ESTestCase; + +import java.util.*; + +import static org.hamcrest.Matchers.*; + + +public class GrokProcessorTests extends ESTestCase { + + public void testMatch() throws Exception { + String fieldName = RandomDocumentPicks.randomFieldName(random()); + IngestDocument doc = new IngestDocument("index", "type", "id", new HashMap<>()); + doc.setFieldValue(fieldName, "1"); + Grok grok = new Grok(Collections.singletonMap("ONE", "1"), "%{ONE:one}"); + GrokProcessor processor = new GrokProcessor(grok, fieldName); + processor.execute(doc); + assertThat(doc.getFieldValue("one", String.class), equalTo("1")); + } + + public void testNoMatch() { + String fieldName = RandomDocumentPicks.randomFieldName(random()); + IngestDocument doc = new IngestDocument("index", "type", "id", new HashMap<>()); + doc.setFieldValue(fieldName, "23"); + Grok grok = new Grok(Collections.singletonMap("ONE", "1"), "%{ONE:one}"); + GrokProcessor processor = new GrokProcessor(grok, fieldName); + try { + processor.execute(doc); + fail(); + } catch (Exception e) { + assertThat(e.getMessage(), equalTo("Grok expression does not match field value: [23]")); + } + } + + public void testNotStringField() { + String fieldName = RandomDocumentPicks.randomFieldName(random()); + IngestDocument doc = new IngestDocument("index", "type", "id", new HashMap<>()); + doc.setFieldValue(fieldName, 1); + Grok grok = new Grok(Collections.singletonMap("ONE", "1"), "%{ONE:one}"); + GrokProcessor processor = new GrokProcessor(grok, fieldName); + try { + processor.execute(doc); + fail(); + } catch (Exception e) { + assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); + } + } + + public void testMissingField() { + String fieldName = "foo.bar"; + IngestDocument doc = new IngestDocument("index", "type", "id", new HashMap<>()); + Grok grok = new Grok(Collections.singletonMap("ONE", "1"), "%{ONE:one}"); + GrokProcessor processor = new GrokProcessor(grok, fieldName); + try { + processor.execute(doc); + fail(); + } catch (Exception e) { + assertThat(e.getMessage(), equalTo("field [foo] not present as part of path [foo.bar]")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java index 744ad7c6aac..5632ba489a7 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java @@ -281,4 +281,11 @@ public class GrokTests extends ESTestCase { assertEquals(expected, actual); } + + public void testNoMatch() { + Map bank = new HashMap<>(); + bank.put("MONTHDAY", "(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])"); + Grok grok = new Grok(bank, "%{MONTHDAY:greatday}"); + assertThat(grok.captures("nomatch"), nullValue()); + } } From 15b6708a5d37487028bcb60c9035fd8e8f932d38 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 1 Dec 2015 18:20:07 +0100 Subject: [PATCH 097/347] and now make use of the lifecycle infrastructure --- .../org/elasticsearch/plugin/ingest/IngestPlugin.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 77bac7df2bb..5405459b28a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -82,6 +82,15 @@ public class IngestPlugin extends Plugin { } } + @Override + public Collection> nodeServices() { + if (transportClient) { + return Collections.emptyList(); + } else { + return Collections.singletonList(PipelineStore.class); + } + } + @Override public Settings additionalSettings() { return settingsBuilder() From 6c0510b01d98d1e5e633f4b760f385d618013f00 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 1 Dec 2015 19:23:44 +0100 Subject: [PATCH 098/347] Make rename processor less error prone Rename processor now checks whether the field to rename exists and throws exception if it doesn't. It also checks that the new field to rename to doesn't exist yet, and throws exception otherwise. Also we make sure that the rename operation is atomic, otherwise things may break between the remove and the set and we'd leave the document in an inconsistent state. Note that the requirement for the new field name to not exist simplifies the usecase for e.g. { "rename" : { "list.1": "list.2"} } as such a rename wouldn't be accepted if list is actually a list given that either list.2 already exists or the index is out of bounds for the existing list. If one really wants to replace an existing field, that field needs to be removed first through remove processor and then rename can be used. --- docs/plugins/ingest.asciidoc | 3 +- .../processor/rename/RenameProcessor.java | 24 ++-- .../rename/RenameProcessorTests.java | 113 +++++++++++++++++- 3 files changed, 128 insertions(+), 12 deletions(-) diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index f5c5af22f8e..5e878224c82 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -33,7 +33,8 @@ Removes one or more existing fields. If a field doesn't exist, nothing will happ -------------------------------------------------- ==== Rename processor -Renames one or more existing fields. If a field doesn't exist, an exception will be thrown. +Renames one or more existing fields. If a field doesn't exist, an exception will be thrown. Also the new field +name must not exist. [source,js] -------------------------------------------------- diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java index f9d729c954d..25dde43ea93 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java @@ -46,13 +46,23 @@ public class RenameProcessor implements Processor { @Override public void execute(IngestDocument document) { for(Map.Entry entry : fields.entrySet()) { - if (document.hasField(entry.getKey())) { - if (document.hasField(entry.getKey()) == false) { - throw new IllegalArgumentException("field [" + entry.getKey() + "] doesn't exist"); - } - Object oldValue = document.getFieldValue(entry.getKey(), Object.class); - document.removeField(entry.getKey()); - document.setFieldValue(entry.getValue(), oldValue); + String oldFieldName = entry.getKey(); + if (document.hasField(oldFieldName) == false) { + throw new IllegalArgumentException("field [" + oldFieldName + "] doesn't exist"); + } + String newFieldName = entry.getValue(); + if (document.hasField(newFieldName)) { + throw new IllegalArgumentException("field [" + newFieldName + "] already exists"); + } + + Object oldValue = document.getFieldValue(entry.getKey(), Object.class); + document.setFieldValue(newFieldName, oldValue); + try { + document.removeField(oldFieldName); + } catch (Exception e) { + //remove the new field if the removal of the old one failed + document.removeField(newFieldName); + throw e; } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java index 2acd0542b16..338a5ff8f60 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java @@ -24,10 +24,10 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.*; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; public class RenameProcessorTests extends ESTestCase { @@ -56,11 +56,66 @@ public class RenameProcessorTests extends ESTestCase { } } + public void testRenameArrayElement() throws Exception { + Map document = new HashMap<>(); + List list = new ArrayList<>(); + list.add("item1"); + list.add("item2"); + list.add("item3"); + document.put("list", list); + List> one = new ArrayList<>(); + one.add(Collections.singletonMap("one", "one")); + one.add(Collections.singletonMap("two", "two")); + document.put("one", one); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + + Processor processor = new RenameProcessor(Collections.singletonMap("list.0", "item")); + processor.execute(ingestDocument); + Object actualObject = ingestDocument.getSource().get("list"); + assertThat(actualObject, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List actualList = (List) actualObject; + assertThat(actualList.size(), equalTo(2)); + assertThat(actualList.get(0), equalTo("item2")); + assertThat(actualList.get(1), equalTo("item3")); + actualObject = ingestDocument.getSource().get("item"); + assertThat(actualObject, instanceOf(String.class)); + assertThat(actualObject, equalTo("item1")); + + processor = new RenameProcessor(Collections.singletonMap("list.0", "list.3")); + try { + processor.execute(ingestDocument); + fail("processor execute should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[3] is out of bounds for array with length [2] as part of path [list.3]")); + assertThat(actualList.size(), equalTo(2)); + assertThat(actualList.get(0), equalTo("item2")); + assertThat(actualList.get(1), equalTo("item3")); + } + } + public void testRenameNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); - Processor processor = new RenameProcessor(Collections.singletonMap(RandomDocumentPicks.randomFieldName(random()), RandomDocumentPicks.randomFieldName(random()))); - processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(0)); + String fieldName = RandomDocumentPicks.randomFieldName(random()); + Processor processor = new RenameProcessor(Collections.singletonMap(fieldName, RandomDocumentPicks.randomFieldName(random()))); + try { + processor.execute(ingestDocument); + fail("processor execute should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [" + fieldName + "] doesn't exist")); + } + } + + public void testRenameNewFieldAlreadyExists() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); + Processor processor = new RenameProcessor(Collections.singletonMap(RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument), fieldName)); + try { + processor.execute(ingestDocument); + fail("processor execute should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [" + fieldName + "] already exists")); + } } public void testRenameExistingFieldNullValue() throws Exception { @@ -74,4 +129,54 @@ public class RenameProcessorTests extends ESTestCase { assertThat(ingestDocument.hasField(newFieldName), equalTo(true)); assertThat(ingestDocument.getFieldValue(newFieldName, Object.class), nullValue()); } + + public void testRenameAtomicOperationSetFails() throws Exception { + Map document = new HashMap() { + private static final long serialVersionUID = 362498820763181265L; + @Override + public Object put(String key, Object value) { + if (key.equals("new_field")) { + throw new UnsupportedOperationException(); + } + return super.put(key, value); + } + }; + document.put("list", Collections.singletonList("item")); + + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + Processor processor = new RenameProcessor(Collections.singletonMap("list", "new_field")); + try { + processor.execute(ingestDocument); + fail("processor execute should have failed"); + } catch(UnsupportedOperationException e) { + //the set failed, the old field has not been removed + assertThat(ingestDocument.getSource().containsKey("list"), equalTo(true)); + assertThat(ingestDocument.getSource().containsKey("new_field"), equalTo(false)); + } + } + + public void testRenameAtomicOperationRemoveFails() throws Exception { + Map document = new HashMap() { + private static final long serialVersionUID = 362498820763181265L; + @Override + public Object remove(Object key) { + if (key.equals("list")) { + throw new UnsupportedOperationException(); + } + return super.remove(key); + } + }; + document.put("list", Collections.singletonList("item")); + + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + Processor processor = new RenameProcessor(Collections.singletonMap("list", "new_field")); + try { + processor.execute(ingestDocument); + fail("processor execute should have failed"); + } catch (UnsupportedOperationException e) { + //the set failed, the old field has not been removed + assertThat(ingestDocument.getSource().containsKey("list"), equalTo(true)); + assertThat(ingestDocument.getSource().containsKey("new_field"), equalTo(false)); + } + } } From 5e07644788a29a36c1c91fd7e1de7b113464fdf9 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 1 Dec 2015 20:07:17 +0100 Subject: [PATCH 099/347] [DOCS] add missing comma --- docs/plugins/ingest.asciidoc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 5e878224c82..0c1c20c2cea 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -3,14 +3,14 @@ === Processors -==== Set processor -Sets one or more fields and associates them with the specified values. If a field already exists, +==== Add processor +Adds one or more fields and associates them with the specified values. If a field already exists, its value will be replaced with the provided one. [source,js] -------------------------------------------------- { - "set": { + "add": { "fields": { "field": 582.1 } @@ -33,7 +33,7 @@ Removes one or more existing fields. If a field doesn't exist, nothing will happ -------------------------------------------------- ==== Rename processor -Renames one or more existing fields. If a field doesn't exist, an exception will be thrown. Also the new field +Renames one or more existing fields. If a field doesn't exist, an exception will be thrown. Also, the new field name must not exist. [source,js] From 767bd1d4d5ce431a5103b1a51a47868076be2141 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Tue, 1 Dec 2015 15:46:02 -0800 Subject: [PATCH 100/347] move PatternUtils#loadBankFromStream into GrokProcessor.Factory --- .../ingest/processor/grok/GrokProcessor.java | 22 ++++++++- .../ingest/processor/grok/PatternUtils.java | 47 ------------------- .../ingest/processor/grok/GrokTests.java | 2 +- 3 files changed, 22 insertions(+), 49 deletions(-) delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/PatternUtils.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index 46d937598f4..6541ecff2e1 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -23,7 +23,11 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; +import java.io.BufferedReader; +import java.io.IOException; import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; @@ -75,6 +79,22 @@ public final class GrokProcessor implements Processor { this.grokConfigDirectory = configDirectory.resolve("ingest").resolve("grok"); } + static void loadBankFromStream(Map patternBank, InputStream inputStream) throws IOException { + String line; + BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); + while ((line = br.readLine()) != null) { + String trimmedLine = line.replaceAll("^\\s+", ""); + if (trimmedLine.startsWith("#") || trimmedLine.length() == 0) { + continue; + } + + String[] parts = trimmedLine.split("\\s+", 2); + if (parts.length == 2) { + patternBank.put(parts[0], parts[1]); + } + } + } + public GrokProcessor create(Map config) throws Exception { String matchField = ConfigurationUtils.readStringProperty(config, "field"); String matchPattern = ConfigurationUtils.readStringProperty(config, "pattern"); @@ -84,7 +104,7 @@ public final class GrokProcessor implements Processor { for (Path patternFilePath : stream) { if (Files.isRegularFile(patternFilePath)) { try(InputStream is = Files.newInputStream(patternFilePath, StandardOpenOption.READ)) { - PatternUtils.loadBankFromStream(patternBank, is); + loadBankFromStream(patternBank, is); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/PatternUtils.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/PatternUtils.java deleted file mode 100644 index 05b291d4ea6..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/PatternUtils.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.ingest.processor.grok; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.util.Map; - -final class PatternUtils { - private PatternUtils() {} - - public static void loadBankFromStream(Map patternBank, InputStream inputStream) throws IOException { - String line; - BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); - while ((line = br.readLine()) != null) { - String trimmedLine = line.replaceAll("^\\s+", ""); - if (trimmedLine.startsWith("#") || trimmedLine.length() == 0) { - continue; - } - - String[] parts = trimmedLine.split("\\s+", 2); - if (parts.length == 2) { - patternBank.put(parts[0], parts[1]); - } - } - } - -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java index 5632ba489a7..b73a8e0030b 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java @@ -40,7 +40,7 @@ public class GrokTests extends ESTestCase { Map patternBank = new HashMap<>(); for (InputStream is : inputStreams) { - PatternUtils.loadBankFromStream(patternBank, is); + GrokProcessor.Factory.loadBankFromStream(patternBank, is); } return patternBank; From 270a3977bc623404161d37c67a2a523bb44ee3e6 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 1 Dec 2015 22:35:51 +0100 Subject: [PATCH 101/347] Removed the lazy cache in DatabaseReaderService and eagerly build all available databases. --- .../geoip/DatabaseReaderService.java | 51 ------------------- .../processor/geoip/GeoIpProcessor.java | 47 ++++++++++++----- .../geoip/DatabaseReaderServiceTests.java | 41 --------------- .../geoip/GeoIpProcessorFactoryTests.java | 5 +- 4 files changed, 37 insertions(+), 107 deletions(-) delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderService.java delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderServiceTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderService.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderService.java deleted file mode 100644 index 8d61accf92e..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderService.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.geoip; - -import com.maxmind.geoip2.DatabaseReader; - -import java.io.Closeable; -import java.io.IOException; -import java.io.InputStream; -import java.util.HashMap; -import java.util.Map; - -final class DatabaseReaderService implements Closeable { - - private final Map databaseReaders = new HashMap<>(); - - synchronized DatabaseReader getOrCreateDatabaseReader(String key, InputStream inputStream) throws IOException { - DatabaseReader databaseReader = databaseReaders.get(key); - if (databaseReader != null) { - return databaseReader; - } - - databaseReader = new DatabaseReader.Builder(inputStream).build(); - databaseReaders.put(key, databaseReader); - return databaseReader; - } - - @Override - public void close() throws IOException { - for (DatabaseReader databaseReader : databaseReaders.values()) { - databaseReader.close(); - } - } -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index 6fd70cf7828..83871d54c86 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -24,6 +24,7 @@ import com.maxmind.geoip2.exception.AddressNotFoundException; import com.maxmind.geoip2.model.CityResponse; import com.maxmind.geoip2.model.CountryResponse; import com.maxmind.geoip2.record.*; +import org.apache.lucene.util.IOUtils; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.ingest.IngestDocument; @@ -40,8 +41,10 @@ import java.nio.file.StandardOpenOption; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.*; +import java.util.stream.Stream; import static org.elasticsearch.ingest.processor.ConfigurationUtils.readList; +import static org.elasticsearch.ingest.processor.ConfigurationUtils.readOptionalList; import static org.elasticsearch.ingest.processor.ConfigurationUtils.readStringProperty; public final class GeoIpProcessor implements Processor { @@ -218,21 +221,41 @@ public final class GeoIpProcessor implements Processor { Field.CONTINENT_NAME, Field.COUNTRY_ISO_CODE, Field.REGION_NAME, Field.CITY_NAME, Field.LOCATION ); - private final Path geoIpConfigDirectory; - private final DatabaseReaderService databaseReaderService = new DatabaseReaderService(); + private final Map databaseReaders; public Factory(Path configDirectory) { - this.geoIpConfigDirectory = configDirectory.resolve("ingest").resolve("geoip"); + Path geoIpConfigDirectory = configDirectory.resolve("ingest").resolve("geoip"); + if (Files.exists(geoIpConfigDirectory) == false && Files.isDirectory(geoIpConfigDirectory)) { + throw new IllegalStateException("the geoip directory [" + geoIpConfigDirectory + "] containing databases doesn't exist"); + } + + try (Stream databaseFiles = Files.list(geoIpConfigDirectory)) { + Map databaseReaders = new HashMap<>(); + // Use iterator instead of forEach otherwise IOException needs to be caught twice... + Iterator iterator = databaseFiles.iterator(); + while (iterator.hasNext()) { + Path databasePath = iterator.next(); + if (Files.isRegularFile(databasePath)) { + try (InputStream inputStream = Files.newInputStream(databasePath, StandardOpenOption.READ)) { + databaseReaders.put(databasePath.getFileName().toString(), new DatabaseReader.Builder(inputStream).build()); + } + } + } + this.databaseReaders = Collections.unmodifiableMap(databaseReaders); + } catch (IOException e) { + throw new RuntimeException(e); + } } public GeoIpProcessor create(Map config) throws Exception { String ipField = readStringProperty(config, "source_field"); String targetField = readStringProperty(config, "target_field", "geoip"); String databaseFile = readStringProperty(config, "database_file", "GeoLite2-City.mmdb"); + List fieldNames = readOptionalList(config, "fields"); + final Set fields; - if (config.containsKey("fields")) { + if (fieldNames != null) { fields = EnumSet.noneOf(Field.class); - List fieldNames = readList(config, "fields"); for (String fieldName : fieldNames) { try { fields.add(Field.parse(fieldName)); @@ -244,20 +267,16 @@ public final class GeoIpProcessor implements Processor { fields = DEFAULT_FIELDS; } - Path databasePath = geoIpConfigDirectory.resolve(databaseFile); - if (Files.exists(databasePath) && Files.isRegularFile(databasePath)) { - try (InputStream database = Files.newInputStream(databasePath, StandardOpenOption.READ)) { - DatabaseReader databaseReader = databaseReaderService.getOrCreateDatabaseReader(databaseFile, database); - return new GeoIpProcessor(ipField, databaseReader, targetField, fields); - } - } else { - throw new IllegalArgumentException("database file [" + databaseFile + "] doesn't exist in [" + geoIpConfigDirectory + "]"); + DatabaseReader databaseReader = databaseReaders.get(databaseFile); + if (databaseReader == null) { + throw new IllegalArgumentException("database file [" + databaseFile + "] doesn't exist"); } + return new GeoIpProcessor(ipField, databaseReader, targetField, fields); } @Override public void close() throws IOException { - databaseReaderService.close(); + IOUtils.close(databaseReaders.values()); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderServiceTests.java deleted file mode 100644 index ebf3fefdba0..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/DatabaseReaderServiceTests.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.geoip; - -import com.maxmind.geoip2.DatabaseReader; -import org.elasticsearch.test.ESTestCase; -import static org.hamcrest.Matchers.*; - -import java.io.InputStream; - -public class DatabaseReaderServiceTests extends ESTestCase { - - public void testLookup() throws Exception { - InputStream database = DatabaseReaderServiceTests.class.getResourceAsStream("/GeoLite2-City.mmdb"); - - DatabaseReaderService service = new DatabaseReaderService(); - DatabaseReader instance = service.getOrCreateDatabaseReader("key1", database); - assertThat(service.getOrCreateDatabaseReader("key1", database), equalTo(instance)); - - database = DatabaseReaderServiceTests.class.getResourceAsStream("/GeoLite2-City.mmdb"); - assertThat(service.getOrCreateDatabaseReader("key2", database), not(equalTo(instance))); - } - -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java index d4feeff886e..d42f87d8048 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java @@ -87,8 +87,9 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { config.put("database_file", "does-not-exist.mmdb"); try { factory.create(config); + fail("Exception expected"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), startsWith("database file [does-not-exist.mmdb] doesn't exist in")); + assertThat(e.getMessage(), equalTo("database file [does-not-exist.mmdb] doesn't exist")); } } @@ -119,6 +120,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { config.put("fields", Collections.singletonList("invalid")); try { factory.create(config); + fail("exception expected"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("illegal field option [invalid]. valid values are [[IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LATITUDE, LONGITUDE, LOCATION]]")); } @@ -128,6 +130,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { config.put("fields", "invalid"); try { factory.create(config); + fail("exception expected"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("property [fields] isn't a list, but of type [java.lang.String]")); } From f427ad20948dddd1a87613ef66f1faa9f2ae7260 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 2 Dec 2015 12:17:43 +0100 Subject: [PATCH 102/347] docs: undo accidental rename added via: 5e07644788a29a36c1c91fd7e1de7b113464fdf9 --- docs/plugins/ingest.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 0c1c20c2cea..4d72756c5e7 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -3,14 +3,14 @@ === Processors -==== Add processor -Adds one or more fields and associates them with the specified values. If a field already exists, +==== Set processor +Sets one or more fields and associates them with the specified values. If a field already exists, its value will be replaced with the provided one. [source,js] -------------------------------------------------- { - "add": { + "set": { "fields": { "field": 582.1 } From 9ab765b851f39d160251283cfc4cd47a25dee235 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 2 Dec 2015 14:38:49 +0100 Subject: [PATCH 103/347] The geoip processor should only try to read *.mmdb files from the geoip config directory --- .../ingest/processor/geoip/GeoIpProcessor.java | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index 83871d54c86..50ac93ac491 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -35,15 +35,12 @@ import java.io.IOException; import java.io.InputStream; import java.net.InetAddress; import java.net.UnknownHostException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardOpenOption; +import java.nio.file.*; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.*; import java.util.stream.Stream; -import static org.elasticsearch.ingest.processor.ConfigurationUtils.readList; import static org.elasticsearch.ingest.processor.ConfigurationUtils.readOptionalList; import static org.elasticsearch.ingest.processor.ConfigurationUtils.readStringProperty; @@ -231,11 +228,12 @@ public final class GeoIpProcessor implements Processor { try (Stream databaseFiles = Files.list(geoIpConfigDirectory)) { Map databaseReaders = new HashMap<>(); + PathMatcher pathMatcher = geoIpConfigDirectory.getFileSystem().getPathMatcher("glob:**.mmdb"); // Use iterator instead of forEach otherwise IOException needs to be caught twice... Iterator iterator = databaseFiles.iterator(); while (iterator.hasNext()) { Path databasePath = iterator.next(); - if (Files.isRegularFile(databasePath)) { + if (Files.isRegularFile(databasePath) && pathMatcher.matches(databasePath)) { try (InputStream inputStream = Files.newInputStream(databasePath, StandardOpenOption.READ)) { databaseReaders.put(databasePath.getFileName().toString(), new DatabaseReader.Builder(inputStream).build()); } From 6acf8ec2634f7db23a030301aa21c012fec81ba9 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 3 Dec 2015 15:19:04 +0100 Subject: [PATCH 104/347] Removed pipeline tests with a simpler tests The PipelineTests tried to test if the configured map/list in set processor wasn't modified while documents were ingested. Creating a pipeline programmatically created more noise than the test needed. The new tests in IngestDocumentTests have the same goal, but is much smaller and clearer by directly testing against IngestDocument. --- .../ingest/IngestDocumentTests.java | 24 +++++++++ .../elasticsearch/ingest/PipelineTests.java | 52 ------------------- 2 files changed, 24 insertions(+), 52 deletions(-) delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index b3bd8aaf827..3f3bc832b2a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -551,4 +551,28 @@ public class IngestDocumentTests extends ESTestCase { assertThat("iteration: " + i, copy, not(sameInstance(map))); } } + + public void testDeepCopyDoesNotChangeProvidedMap() { + Map myPreciousMap = new HashMap<>(); + myPreciousMap.put("field2", "value2"); + + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", new HashMap<>()); + ingestDocument.setFieldValue("field1", myPreciousMap); + ingestDocument.removeField("field1.field2"); + + assertThat(myPreciousMap.size(), equalTo(1)); + assertThat(myPreciousMap.get("field2"), equalTo("value2")); + } + + public void testDeepCopyDoesNotChangeProvidedList() { + List myPreciousList = new ArrayList<>(); + myPreciousList.add("value"); + + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", new HashMap<>()); + ingestDocument.setFieldValue("field1", myPreciousList); + ingestDocument.removeField("field1.0"); + + assertThat(myPreciousList.size(), equalTo(1)); + assertThat(myPreciousList.get(0), equalTo("value")); + } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java deleted file mode 100644 index ae23df8e71d..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineTests.java +++ /dev/null @@ -1,52 +0,0 @@ -package org.elasticsearch.ingest; - -import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.ingest.processor.set.SetProcessor; -import org.elasticsearch.ingest.processor.remove.RemoveProcessor; -import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matchers; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -public class PipelineTests extends ESTestCase { - - public void testProcessorSettingsRemainUntouched() throws Exception { - Map subField = new HashMap<>(); - subField.put("_subfield", "value"); - Map fieldSettings = new HashMap<>(); - fieldSettings.put("_field", subField); - Map addSettings = new HashMap<>(); - addSettings.put("fields", fieldSettings); - Map removeSettings = new HashMap<>(); - removeSettings.put("fields", Collections.singletonList("_field._subfield")); - Pipeline pipeline = createPipeline(processorConfig(SetProcessor.TYPE, addSettings), processorConfig(RemoveProcessor.TYPE, removeSettings)); - - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", new HashMap<>()); - pipeline.execute(ingestDocument); - - assertThat(ingestDocument.getSource().get("_field"), Matchers.notNullValue()); - assertThat(((Map) ingestDocument.getSource().get("_field")).get("_subfield"), Matchers.nullValue()); - assertThat(((Map) fieldSettings.get("_field")).get("_subfield"), Matchers.equalTo("value")); - } - - private Pipeline createPipeline(Map... processorConfigs) throws Exception { - Map config = new HashMap<>(); - config.put("processors", Arrays.asList(processorConfigs)); - Map factoryRegistry = new HashMap<>(); - factoryRegistry.put(SetProcessor.TYPE, new SetProcessor.Factory()); - factoryRegistry.put(RemoveProcessor.TYPE, new RemoveProcessor.Factory()); - Pipeline.Factory factory = new Pipeline.Factory(); - return factory.create("_id", config, factoryRegistry); - } - - private Map processorConfig(String type, Map settings) { - Map processorConfig = new HashMap<>(); - processorConfig.put(type, settings); - return processorConfig; - } - -} From 56da7b32edaf3d54875a33791e66f1832cedd061 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Tue, 1 Dec 2015 16:22:30 -0800 Subject: [PATCH 105/347] add ability to define custom grok patterns within processor config --- docs/plugins/ingest.asciidoc | 23 +++++ .../ingest/processor/grok/GrokProcessor.java | 7 ++ .../grok/GrokProcessorFactoryTests.java | 13 +++ .../rest-api-spec/test/ingest/30_grok.yaml | 97 +++++++++++++++++++ 4 files changed, 140 insertions(+) diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 4d72756c5e7..46d57fbe994 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -245,6 +245,7 @@ TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9]) | Name | Required | Default | Description | `match_field` | yes | - | The field to use for grok expression parsing | `match_pattern` | yes | - | The grok expression to match and extract named captures with +| `pattern_definitions` | no | - | A map of pattern-name and pattern tuples defining custom patterns to be used by the current processor. Patterns matching existing names will override the pre-existing definition. |====== Here is an example of using the provided patterns to extract out and name structured fields from a string field in @@ -295,6 +296,28 @@ This pipeline will insert these named captures as new fields within the document } -------------------------------------------------- +An example of a pipeline specifying custom pattern definitions: + +[source,js] +-------------------------------------------------- +{ + "description" : "...", + "processors": [ + { + "grok": { + "match_field": "message", + "match_pattern": "my %{FAVORITE_DOG:dog} is colored %{RGB:color}" + "pattern_definitions" : { + "FAVORITE_DOG" : "beagle", + "RGB" : "RED|GREEN|BLUE" + } + } + } + ] +} +-------------------------------------------------- + + ==== Geoip processor The GeoIP processor adds information about the geographical location of IP addresses, based on data from the Maxmind databases. diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java index 6541ecff2e1..562a86a402a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java @@ -98,7 +98,10 @@ public final class GrokProcessor implements Processor { public GrokProcessor create(Map config) throws Exception { String matchField = ConfigurationUtils.readStringProperty(config, "field"); String matchPattern = ConfigurationUtils.readStringProperty(config, "pattern"); + Map customPatternBank = ConfigurationUtils.readOptionalMap(config, "pattern_definitions"); + Map patternBank = new HashMap<>(); + Path patternsDirectory = grokConfigDirectory.resolve("patterns"); try (DirectoryStream stream = Files.newDirectoryStream(patternsDirectory)) { for (Path patternFilePath : stream) { @@ -110,6 +113,10 @@ public final class GrokProcessor implements Processor { } } + if (customPatternBank != null) { + patternBank.putAll(customPatternBank); + } + Grok grok = new Grok(patternBank, matchPattern); return new GrokProcessor(grok, matchField); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java index 39430fe24bc..e8d44d392d7 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java @@ -24,6 +24,7 @@ import org.junit.Before; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -53,4 +54,16 @@ public class GrokProcessorFactoryTests extends ESTestCase { assertThat(processor.getGrok(), notNullValue()); } + public void testCreateWithCustomPatterns() throws Exception { + GrokProcessor.Factory factory = new GrokProcessor.Factory(configDir); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("pattern", "%{MY_PATTERN:name}!"); + config.put("pattern_definitions", Collections.singletonMap("MY_PATTERN", "foo")); + GrokProcessor processor = factory.create(config); + assertThat(processor.getMatchField(), equalTo("_field")); + assertThat(processor.getGrok(), notNullValue()); + assertThat(processor.getGrok().match("foo!"), equalTo(true)); + } } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml index d59a3b53ff5..e0f97b625b1 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml @@ -46,3 +46,100 @@ - match: { _source.status: 400 } - match: { _source.msg: "foo" } +--- +"Test Grok Pipeline With Custom Pattern": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "grok" : { + "field" : "field1", + "pattern" : "<%{MY_PATTERN:msg}>", + "pattern_definitions" : { + "MY_PATTERN" : "foo" + } + } + } + ] + } + - match: { _id: "my_pipeline" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline_id: "my_pipeline" + body: {field1: ""} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.msg: "foo" } + +--- +"Test Grok Pipeline With Custom Pattern Sharing Same Name As Another": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "grok" : { + "field" : "field1", + "pattern" : "<%{NUMBER:msg}>", + "pattern_definitions" : { + "NUMBER" : "foo" + } + } + } + ] + } + - match: { _id: "my_pipeline" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline_id: "my_pipeline" + body: {field1: ""} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.msg: "foo" } From 73986cc54fdc7c5644a4848716cd787a700c37a7 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 4 Dec 2015 14:17:07 +0100 Subject: [PATCH 106/347] adapt to upstream changes --- .../plugin/ingest/PipelineExecutionService.java | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index d717174c05d..4a959209152 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; @@ -58,12 +57,7 @@ public class PipelineExecutionService { String routing = indexRequest.routing(); String parent = indexRequest.parent(); String timestamp = indexRequest.timestamp(); - String ttl = null; - if (indexRequest.ttl() != -1) { - // At this point we don't know the original string ttl that was specified, - // so we covert the ttl which is a long to a string using 'ms' as unit: - ttl = TimeValue.timeValueMillis(indexRequest.ttl()).toString(); - } + String ttl = indexRequest.ttl() == null ? null : indexRequest.ttl().toString(); Map sourceAsMap = indexRequest.sourceAsMap(); IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, parent, timestamp, ttl, sourceAsMap); try { @@ -77,11 +71,7 @@ public class PipelineExecutionService { indexRequest.routing(ingestDocument.getMetadata(IngestDocument.MetaData.ROUTING)); indexRequest.parent(ingestDocument.getMetadata(IngestDocument.MetaData.PARENT)); indexRequest.timestamp(ingestDocument.getMetadata(IngestDocument.MetaData.TIMESTAMP)); - String ttlStr = ingestDocument.getMetadata(IngestDocument.MetaData.TTL); - if (ttlStr != null) { - TimeValue timeValue = TimeValue.parseTimeValue(ttlStr, null, "ttl"); - indexRequest.ttl(timeValue.millis()); - } + indexRequest.ttl(ingestDocument.getMetadata(IngestDocument.MetaData.TTL)); listener.onResponse(ingestDocument); } catch (Throwable e) { listener.onFailure(e); From d7c3b51b9c9fb7aed8d106891a27ad57bb0bc281 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 4 Dec 2015 16:35:53 +0100 Subject: [PATCH 107/347] [TEST] adapt to upstream changes --- .../ingest/PipelineExecutionServiceTests.java | 37 ++++++++++--------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index 62a9bbd60b7..2aa7d2207f8 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; @@ -30,17 +29,14 @@ import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.ingest.processor.meta.MetaDataProcessor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.junit.After; import org.junit.Before; import org.mockito.ArgumentMatcher; import org.mockito.Matchers; import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; import java.util.*; -import java.util.concurrent.Executor; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; import static org.mockito.Matchers.any; import static org.mockito.Mockito.*; @@ -57,10 +53,11 @@ public class PipelineExecutionServiceTests extends ESTestCase { executionService = new PipelineExecutionService(store, threadPool); } - public void testExecute_pipelineDoesNotExist() { + public void testExecutePipelineDoesNotExist() { when(store.get("_id")).thenReturn(null); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - ActionListener listener = mock(ActionListener.class); + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener)mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); verify(listener).onFailure(any(IllegalArgumentException.class)); verify(listener, times(0)).onResponse(any()); @@ -68,10 +65,11 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testExecuteSuccess() throws Exception { Processor processor = mock(Processor.class); - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(processor))); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - ActionListener listener = mock(ActionListener.class); + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener)mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); verify(listener).onResponse(eqID("_index", "_type", "_id", Collections.emptyMap())); @@ -92,10 +90,11 @@ public class PipelineExecutionServiceTests extends ESTestCase { } return null; }).when(processor).execute(any()); - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(processor))); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - ActionListener listener = mock(ActionListener.class); + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener)mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); verify(processor).execute(any()); verify(listener).onResponse(any()); @@ -107,21 +106,23 @@ public class PipelineExecutionServiceTests extends ESTestCase { assertThat(indexRequest.routing(), equalTo("update_routing")); assertThat(indexRequest.parent(), equalTo("update_parent")); assertThat(indexRequest.timestamp(), equalTo("update_timestamp")); - assertThat(indexRequest.ttl(), equalTo(3024000000l)); + assertThat(indexRequest.ttl(), equalTo(new TimeValue(3024000000l))); } - public void testExecute_failure() throws Exception { + public void testExecuteFailure() throws Exception { Processor processor = mock(Processor.class); - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Arrays.asList(processor))); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - ActionListener listener = mock(ActionListener.class); + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener)mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); verify(listener, times(0)).onResponse(eqID("_index", "_type", "_id", Collections.emptyMap())); verify(listener).onFailure(any(RuntimeException.class)); } + @SuppressWarnings("unchecked") public void testExecuteTTL() throws Exception { // test with valid ttl MetaDataProcessor.Factory metaProcessorFactory = new MetaDataProcessor.Factory(); @@ -131,10 +132,10 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - ActionListener listener = mock(ActionListener.class); + ActionListener listener = (ActionListener)mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); - assertThat(indexRequest.ttl(), equalTo(TimeValue.parseTimeValue("5d", null, "ttl").millis())); + assertThat(indexRequest.ttl(), equalTo(TimeValue.parseTimeValue("5d", null, "ttl"))); verify(listener, times(1)).onResponse(any()); verify(listener, never()).onFailure(any()); @@ -161,7 +162,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { listener = mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); - assertThat(indexRequest.ttl(), equalTo(1000l)); + assertThat(indexRequest.ttl(), equalTo(new TimeValue(1000l))); verify(listener, times(1)).onResponse(any()); verify(listener, never()).onFailure(any(Throwable.class)); } From 45f48ac12674f66b9eac4afa8abfc438686cc06a Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Mon, 30 Nov 2015 18:05:17 -0800 Subject: [PATCH 108/347] update all processors to only operate on one field at a time when possible --- docs/plugins/ingest.asciidoc | 55 ++-- .../processor/AbstractStringProcessor.java | 26 +- .../ingest/processor/ConfigurationUtils.java | 11 + .../processor/convert/ConvertProcessor.java | 60 ++-- .../ingest/processor/gsub/GsubExpression.java | 70 ----- .../ingest/processor/gsub/GsubProcessor.java | 63 ++-- .../ingest/processor/join/JoinProcessor.java | 39 ++- .../lowercase/LowercaseProcessor.java | 8 +- .../processor/remove/RemoveProcessor.java | 21 +- .../processor/rename/RenameProcessor.java | 54 ++-- .../ingest/processor/set/SetProcessor.java | 26 +- .../processor/split/SplitProcessor.java | 30 +- .../ingest/processor/trim/TrimProcessor.java | 8 +- .../uppercase/UppercaseProcessor.java | 8 +- .../AbstractStringProcessorTestCase.java | 23 +- .../convert/ConvertProcessorFactoryTests.java | 48 ++- .../convert/ConvertProcessorTests.java | 291 +++++++----------- .../gsub/GsubProcessorFactoryTests.java | 59 +--- .../processor/gsub/GsubProcessorTests.java | 22 +- .../join/JoinProcessorFactoryTests.java | 27 +- .../processor/join/JoinProcessorTests.java | 77 ++--- .../LowercaseProcessorFactoryTests.java | 9 +- .../lowercase/LowercaseProcessorTests.java | 4 +- .../remove/RemoveProcessorFactoryTests.java | 9 +- .../remove/RemoveProcessorTests.java | 14 +- .../rename/RenameProcessorFactoryTests.java | 24 +- .../rename/RenameProcessorTests.java | 41 +-- .../set/SetProcessorFactoryTests.java | 37 ++- .../processor/set/SetProcessorTests.java | 37 +-- .../split/SplitProcessorFactoryTests.java | 24 +- .../processor/split/SplitProcessorTests.java | 21 +- .../trim/TrimProcessorFactoryTests.java | 9 +- .../processor/trim/TrimProcessorTests.java | 6 +- .../UppercaseProcessorFactoryTests.java | 9 +- .../uppercase/UppercaseProcessorTests.java | 4 +- .../rest-api-spec/test/ingest/20_crud.yaml | 5 +- .../rest-api-spec/test/ingest/60_mutate.yaml | 51 +-- .../test/ingest/80_simulate.yaml | 29 +- 38 files changed, 582 insertions(+), 777 deletions(-) delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubExpression.java diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 46d57fbe994..0ae37433bae 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -4,52 +4,46 @@ === Processors ==== Set processor -Sets one or more fields and associates them with the specified values. If a field already exists, +Sets one field and associates it with the specified value. If a field already exists, its value will be replaced with the provided one. [source,js] -------------------------------------------------- { "set": { - "fields": { - "field": 582.1 - } + "field1": 582.1 } } -------------------------------------------------- ==== Remove processor -Removes one or more existing fields. If a field doesn't exist, nothing will happen. +Removes an existing field. If a field doesn't exist, nothing will happen. [source,js] -------------------------------------------------- { "remove": { - "fields": [ - "field1","field2" - ] + "field": "foo" } } -------------------------------------------------- ==== Rename processor -Renames one or more existing fields. If a field doesn't exist, an exception will be thrown. Also, the new field +Renames an existing fields. If a field doesn't exist, an exception will be thrown. Also, the new field name must not exist. [source,js] -------------------------------------------------- { "rename": { - "fields": { - "field1": "field2" - } + "field": "foo" } } -------------------------------------------------- ==== Convert processor -Converts one or more field value to a different type, like turning a string to an integer. +Converts an existing field's value to a different type, like turning a string to an integer. If the field value is an array, all members will be converted. The supported types include: `integer`, `float`, `string`, and `boolean`. @@ -61,10 +55,7 @@ false if its string value is equal to `false` (ignore case) and it will throw ex -------------------------------------------------- { "convert": { - "fields": { - "field1": "integer", - "field2": "float" - } + "foo": "integer" } } -------------------------------------------------- @@ -73,8 +64,7 @@ false if its string value is equal to `false` (ignore case) and it will throw ex Converts a string field by applying a regular expression and a replacement. If the field is not a string, the processor will throw an exception. -This configuration takes an `expression` array consisting of objects. Each object -holds three elements: `field` for the field name, `pattern` for the +This configuration takes a `field` for the field name, `pattern` for the pattern to be replaced, and `replacement` for the string to replace the matching patterns with. @@ -82,13 +72,9 @@ pattern to be replaced, and `replacement` for the string to replace the matching -------------------------------------------------- { "gsub": { - "expressions": [ - { - "field": "field1", - "pattern": "\.", - "replacement": "-" - } - ] + "field": "field1", + "pattern": "\.", + "replacement": "-" } } -------------------------------------------------- @@ -101,9 +87,8 @@ Throws error when the field is not an array. -------------------------------------------------- { "join": { - "fields": { - "joined_array_field": "other_array_field" - } + "field": "joined_array_field", + "separator": "-" } } -------------------------------------------------- @@ -115,9 +100,7 @@ Split a field to an array using a separator character. Only works on string fiel -------------------------------------------------- { "split": { - "fields": { - "message": "," - } + "field": "," } } -------------------------------------------------- @@ -129,7 +112,7 @@ Converts a string to its lowercase equivalent. -------------------------------------------------- { "lowercase": { - "fields": ["foo", "bar"] + "field": "foo" } } -------------------------------------------------- @@ -141,7 +124,7 @@ Converts a string to its uppercase equivalent. -------------------------------------------------- { "uppercase": { - "fields": ["foo", "bar"] + "field": "foo" } } -------------------------------------------------- @@ -153,7 +136,7 @@ Trims whitespace from field. NOTE: this only works on leading and trailing white -------------------------------------------------- { "trim": { - "fields": ["foo", "bar"] + "field": "foo" } } -------------------------------------------------- @@ -538,4 +521,4 @@ The delete pipeline api deletes pipelines by id. -------------------------------------------------- DELETE _ingest/pipeline/my-pipeline-id -------------------------------------------------- -// AUTOSENSE \ No newline at end of file +// AUTOSENSE diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java index 21d2c6c7d14..475ace15552 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java @@ -32,25 +32,23 @@ import java.util.Map; */ public abstract class AbstractStringProcessor implements Processor { - private final Collection fields; + private final String field; - protected AbstractStringProcessor(Collection fields) { - this.fields = fields; + protected AbstractStringProcessor(String field) { + this.field = field; } - public Collection getFields() { - return fields; + public String getField() { + return field; } @Override public final void execute(IngestDocument document) { - for(String field : fields) { - String val = document.getFieldValue(field, String.class); - if (val == null) { - throw new IllegalArgumentException("field [" + field + "] is null, cannot process it."); - } - document.setFieldValue(field, process(val)); + String val = document.getFieldValue(field, String.class); + if (val == null) { + throw new IllegalArgumentException("field [" + field + "] is null, cannot process it."); } + document.setFieldValue(field, process(val)); } protected abstract String process(String value); @@ -58,10 +56,10 @@ public abstract class AbstractStringProcessor implements Processor { public static abstract class Factory implements Processor.Factory { @Override public T create(Map config) throws Exception { - List fields = ConfigurationUtils.readList(config, "fields"); - return newProcessor(Collections.unmodifiableList(fields)); + String field = ConfigurationUtils.readStringProperty(config, "field"); + return newProcessor(field); } - protected abstract T newProcessor(Collection fields); + protected abstract T newProcessor(String field); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java index af001decc08..7ba737eb56e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java @@ -149,4 +149,15 @@ public final class ConfigurationUtils { throw new IllegalArgumentException("property [" + propertyName + "] isn't a map, but of type [" + value.getClass().getName() + "]"); } } + + /** + * Returns and removes the specified property as an {@link Object} from the specified configuration map. + */ + public static Object readObject(Map configuration, String propertyName) { + Object value = configuration.remove(propertyName); + if (value == null) { + throw new IllegalArgumentException("required property [" + propertyName + "] is missing"); + } + return value; + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java index d86fea0485f..0944305c8b6 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java @@ -87,38 +87,41 @@ public class ConvertProcessor implements Processor { public static final String TYPE = "convert"; - private final Map fields; + private final String field; + private final Type convertType; - ConvertProcessor(Map fields) { - this.fields = fields; + ConvertProcessor(String field, Type convertType) { + this.field = field; + this.convertType = convertType; } - Map getFields() { - return fields; + String getField() { + return field; + } + + Type getConvertType() { + return convertType; } @Override public void execute(IngestDocument document) { - for(Map.Entry entry : fields.entrySet()) { - Type type = entry.getValue(); - Object oldValue = document.getFieldValue(entry.getKey(), Object.class); - Object newValue; - if (oldValue == null) { - throw new IllegalArgumentException("Field [" + entry.getKey() + "] is null, cannot be converted to type [" + type + "]"); - } - - if (oldValue instanceof List) { - List list = (List) oldValue; - List newList = new ArrayList<>(); - for (Object value : list) { - newList.add(type.convert(value)); - } - newValue = newList; - } else { - newValue = type.convert(oldValue); - } - document.setFieldValue(entry.getKey(), newValue); + Object oldValue = document.getFieldValue(field, Object.class); + Object newValue; + if (oldValue == null) { + throw new IllegalArgumentException("Field [" + field + "] is null, cannot be converted to type [" + convertType + "]"); } + + if (oldValue instanceof List) { + List list = (List) oldValue; + List newList = new ArrayList<>(); + for (Object value : list) { + newList.add(convertType.convert(value)); + } + newValue = newList; + } else { + newValue = convertType.convert(oldValue); + } + document.setFieldValue(field, newValue); } @Override @@ -129,12 +132,9 @@ public class ConvertProcessor implements Processor { public static class Factory implements Processor.Factory { @Override public ConvertProcessor create(Map config) throws Exception { - Map fields = ConfigurationUtils.readMap(config, "fields"); - Map convertFields = new HashMap<>(); - for (Map.Entry entry : fields.entrySet()) { - convertFields.put(entry.getKey(), Type.fromString(entry.getValue())); - } - return new ConvertProcessor(Collections.unmodifiableMap(convertFields)); + String field = ConfigurationUtils.readStringProperty(config, "field"); + Type convertType = Type.fromString(ConfigurationUtils.readStringProperty(config, "type")); + return new ConvertProcessor(field, convertType); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubExpression.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubExpression.java deleted file mode 100644 index 54d55a0add0..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubExpression.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor.gsub; - -import java.util.Objects; -import java.util.regex.Pattern; - -/** - * Represents a gsub expression containing the field name, the pattern to look for and its string replacement. - */ -public class GsubExpression { - - private final String fieldName; - private final Pattern pattern; - private final String replacement; - - public GsubExpression(String fieldName, Pattern pattern, String replacement) { - this.fieldName = Objects.requireNonNull(fieldName); - this.pattern = Objects.requireNonNull(pattern); - this.replacement = Objects.requireNonNull(replacement); - } - - public String getFieldName() { - return fieldName; - } - - public Pattern getPattern() { - return pattern; - } - - public String getReplacement() { - return replacement; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - GsubExpression that = (GsubExpression) o; - return Objects.equals(fieldName, that.fieldName) && - Objects.equals(pattern.pattern(), that.pattern.pattern()) && - Objects.equals(replacement, that.replacement); - } - - @Override - public int hashCode() { - return Objects.hash(fieldName, pattern, replacement); - } -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java index 8eb6c7dbc89..e15f99d9fed 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java @@ -23,8 +23,6 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.util.ArrayList; -import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -37,27 +35,38 @@ public class GsubProcessor implements Processor { public static final String TYPE = "gsub"; - private final List gsubExpressions; + private final String field; + private final Pattern pattern; + private final String replacement; - GsubProcessor(List gsubExpressions) { - this.gsubExpressions = gsubExpressions; + GsubProcessor(String field, Pattern pattern, String replacement) { + this.field = field; + this.pattern = pattern; + this.replacement = replacement; } - List getGsubExpressions() { - return gsubExpressions; + String getField() { + return field; } + Pattern getPattern() { + return pattern; + } + + String getReplacement() { + return replacement; + } + + @Override public void execute(IngestDocument document) { - for (GsubExpression gsubExpression : gsubExpressions) { - String oldVal = document.getFieldValue(gsubExpression.getFieldName(), String.class); - if (oldVal == null) { - throw new IllegalArgumentException("field [" + gsubExpression.getFieldName() + "] is null, cannot match pattern."); - } - Matcher matcher = gsubExpression.getPattern().matcher(oldVal); - String newVal = matcher.replaceAll(gsubExpression.getReplacement()); - document.setFieldValue(gsubExpression.getFieldName(), newVal); + String oldVal = document.getFieldValue(field, String.class); + if (oldVal == null) { + throw new IllegalArgumentException("field [" + field + "] is null, cannot match pattern."); } + Matcher matcher = pattern.matcher(oldVal); + String newVal = matcher.replaceAll(replacement); + document.setFieldValue(field, newVal); } @Override @@ -68,25 +77,11 @@ public class GsubProcessor implements Processor { public static class Factory implements Processor.Factory { @Override public GsubProcessor create(Map config) throws Exception { - List> gsubConfig = ConfigurationUtils.readList(config, "expressions"); - List gsubExpressions = new ArrayList<>(); - for (Map stringObjectMap : gsubConfig) { - String field = stringObjectMap.get("field"); - if (field == null) { - throw new IllegalArgumentException("no [field] specified for gsub expression"); - } - String pattern = stringObjectMap.get("pattern"); - if (pattern == null) { - throw new IllegalArgumentException("no [pattern] specified for gsub expression"); - } - String replacement = stringObjectMap.get("replacement"); - if (replacement == null) { - throw new IllegalArgumentException("no [replacement] specified for gsub expression"); - } - Pattern searchPattern = Pattern.compile(pattern); - gsubExpressions.add(new GsubExpression(field, searchPattern, replacement)); - } - return new GsubProcessor(gsubExpressions); + String field = ConfigurationUtils.readStringProperty(config, "field"); + String pattern = ConfigurationUtils.readStringProperty(config, "pattern"); + String replacement = ConfigurationUtils.readStringProperty(config, "replacement"); + Pattern searchPattern = Pattern.compile(pattern); + return new GsubProcessor(field, searchPattern, replacement); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java index 40016b489fd..85be2316777 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java @@ -23,7 +23,7 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.util.Collections; +import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -36,28 +36,32 @@ public class JoinProcessor implements Processor { public static final String TYPE = "join"; - private final Map fields; + private final String field; + private final String separator; - JoinProcessor(Map fields) { - this.fields = fields; + JoinProcessor(String field, String separator) { + this.field = field; + this.separator = separator; } - Map getFields() { - return fields; + String getField() { + return field; + } + + String getSeparator() { + return separator; } @Override public void execute(IngestDocument document) { - for(Map.Entry entry : fields.entrySet()) { - List list = document.getFieldValue(entry.getKey(), List.class); - if (list == null) { - throw new IllegalArgumentException("field [" + entry.getKey() + "] is null, cannot join."); - } - String joined = list.stream() - .map(Object::toString) - .collect(Collectors.joining(entry.getValue())); - document.setFieldValue(entry.getKey(), joined); + List list = document.getFieldValue(field, List.class); + if (list == null) { + throw new IllegalArgumentException("field [" + field + "] is null, cannot join."); } + String joined = list.stream() + .map(Object::toString) + .collect(Collectors.joining(separator)); + document.setFieldValue(field, joined); } @Override @@ -68,8 +72,9 @@ public class JoinProcessor implements Processor { public static class Factory implements Processor.Factory { @Override public JoinProcessor create(Map config) throws Exception { - Map fields = ConfigurationUtils.readMap(config, "fields"); - return new JoinProcessor(Collections.unmodifiableMap(fields)); + String field = ConfigurationUtils.readStringProperty(config, "field"); + String separator = ConfigurationUtils.readStringProperty(config, "separator"); + return new JoinProcessor(field, separator); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java index 751a566d10a..6bff6223053 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java @@ -33,8 +33,8 @@ public class LowercaseProcessor extends AbstractStringProcessor { public static final String TYPE = "lowercase"; - LowercaseProcessor(Collection fields) { - super(fields); + LowercaseProcessor(String field) { + super(field); } @Override @@ -49,8 +49,8 @@ public class LowercaseProcessor extends AbstractStringProcessor { public static class Factory extends AbstractStringProcessor.Factory { @Override - protected LowercaseProcessor newProcessor(Collection fields) { - return new LowercaseProcessor(fields); + protected LowercaseProcessor newProcessor(String field) { + return new LowercaseProcessor(field); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java index f04c407b14a..80cd017ef78 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java @@ -23,9 +23,6 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.util.Collection; -import java.util.Collections; -import java.util.List; import java.util.Map; /** @@ -35,21 +32,19 @@ public class RemoveProcessor implements Processor { public static final String TYPE = "remove"; - private final Collection fields; + private final String field; - RemoveProcessor(Collection fields) { - this.fields = fields; + RemoveProcessor(String field) { + this.field = field; } - Collection getFields() { - return fields; + String getField() { + return field; } @Override public void execute(IngestDocument document) { - for(String field : fields) { - document.removeField(field); - } + document.removeField(field); } @Override @@ -60,8 +55,8 @@ public class RemoveProcessor implements Processor { public static class Factory implements Processor.Factory { @Override public RemoveProcessor create(Map config) throws Exception { - List fields = ConfigurationUtils.readList(config, "fields"); - return new RemoveProcessor(Collections.unmodifiableList(fields)); + String field = ConfigurationUtils.readStringProperty(config, "field"); + return new RemoveProcessor(field); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java index 25dde43ea93..7e894e53893 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java @@ -23,6 +23,7 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; +import java.util.Arrays; import java.util.Collections; import java.util.Map; @@ -33,37 +34,39 @@ public class RenameProcessor implements Processor { public static final String TYPE = "rename"; - private final Map fields; + private final String oldFieldName; + private final String newFieldName; - RenameProcessor(Map fields) { - this.fields = fields; + RenameProcessor(String oldFieldName, String newFieldName) { + this.oldFieldName = oldFieldName; + this.newFieldName = newFieldName; } - Map getFields() { - return fields; + String getOldFieldName() { + return oldFieldName; + } + + String getNewFieldName() { + return newFieldName; } @Override public void execute(IngestDocument document) { - for(Map.Entry entry : fields.entrySet()) { - String oldFieldName = entry.getKey(); - if (document.hasField(oldFieldName) == false) { - throw new IllegalArgumentException("field [" + oldFieldName + "] doesn't exist"); - } - String newFieldName = entry.getValue(); - if (document.hasField(newFieldName)) { - throw new IllegalArgumentException("field [" + newFieldName + "] already exists"); - } + if (document.hasField(oldFieldName) == false) { + throw new IllegalArgumentException("field [" + oldFieldName + "] doesn't exist"); + } + if (document.hasField(newFieldName)) { + throw new IllegalArgumentException("field [" + newFieldName + "] already exists"); + } - Object oldValue = document.getFieldValue(entry.getKey(), Object.class); - document.setFieldValue(newFieldName, oldValue); - try { - document.removeField(oldFieldName); - } catch (Exception e) { - //remove the new field if the removal of the old one failed - document.removeField(newFieldName); - throw e; - } + Object oldValue = document.getFieldValue(oldFieldName, Object.class); + document.setFieldValue(newFieldName, oldValue); + try { + document.removeField(oldFieldName); + } catch (Exception e) { + //remove the new field if the removal of the old one failed + document.removeField(newFieldName); + throw e; } } @@ -75,8 +78,9 @@ public class RenameProcessor implements Processor { public static class Factory implements Processor.Factory { @Override public RenameProcessor create(Map config) throws Exception { - Map fields = ConfigurationUtils.readMap(config, "fields"); - return new RenameProcessor(Collections.unmodifiableMap(fields)); + String field = ConfigurationUtils.readStringProperty(config, "field"); + String newField = ConfigurationUtils.readStringProperty(config, "to"); + return new RenameProcessor(field, newField); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java index f46fe5052f5..f14be2a3217 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java @@ -23,6 +23,7 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; +import java.util.Arrays; import java.util.Collections; import java.util.Map; @@ -34,21 +35,25 @@ public class SetProcessor implements Processor { public static final String TYPE = "set"; - private final Map fields; + private final String field; + private final Object value; - SetProcessor(Map fields) { - this.fields = fields; + SetProcessor(String field, Object value) { + this.field = field; + this.value = value; } - Map getFields() { - return fields; + String getField() { + return field; + } + + Object getValue() { + return value; } @Override public void execute(IngestDocument document) { - for(Map.Entry entry : fields.entrySet()) { - document.setFieldValue(entry.getKey(), entry.getValue()); - } + document.setFieldValue(field, value); } @Override @@ -59,8 +64,9 @@ public class SetProcessor implements Processor { public static final class Factory implements Processor.Factory { @Override public SetProcessor create(Map config) throws Exception { - Map fields = ConfigurationUtils.readMap(config, "fields"); - return new SetProcessor(Collections.unmodifiableMap(fields)); + String field = ConfigurationUtils.readStringProperty(config, "field"); + Object value = ConfigurationUtils.readObject(config, "value"); + return new SetProcessor(field, value); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java index 6d9dea24947..1895fc10d37 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java @@ -36,25 +36,29 @@ public class SplitProcessor implements Processor { public static final String TYPE = "split"; - private final Map fields; + private final String field; + private final String separator; - SplitProcessor(Map fields) { - this.fields = fields; + SplitProcessor(String field, String separator) { + this.field = field; + this.separator = separator; } - Map getFields() { - return fields; + String getField() { + return field; + } + + String getSeparator() { + return separator; } @Override public void execute(IngestDocument document) { - for(Map.Entry entry : fields.entrySet()) { - String oldVal = document.getFieldValue(entry.getKey(), String.class); - if (oldVal == null) { - throw new IllegalArgumentException("field [" + entry.getKey() + "] is null, cannot split."); - } - document.setFieldValue(entry.getKey(), Arrays.asList(oldVal.split(entry.getValue()))); + String oldVal = document.getFieldValue(field, String.class); + if (oldVal == null) { + throw new IllegalArgumentException("field [" + field + "] is null, cannot split."); } + document.setFieldValue(field, Arrays.asList(oldVal.split(separator))); } @Override @@ -65,8 +69,8 @@ public class SplitProcessor implements Processor { public static class Factory implements Processor.Factory { @Override public SplitProcessor create(Map config) throws Exception { - Map fields = ConfigurationUtils.readMap(config, "fields"); - return new SplitProcessor(Collections.unmodifiableMap(fields)); + String field = ConfigurationUtils.readStringProperty(config, "field"); + return new SplitProcessor(field, ConfigurationUtils.readStringProperty(config, "separator")); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java index d3090a37d41..94b617ba41e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java @@ -31,8 +31,8 @@ public class TrimProcessor extends AbstractStringProcessor { public static final String TYPE = "trim"; - TrimProcessor(Collection fields) { - super(fields); + TrimProcessor(String field) { + super(field); } @Override @@ -47,8 +47,8 @@ public class TrimProcessor extends AbstractStringProcessor { public static class Factory extends AbstractStringProcessor.Factory { @Override - protected TrimProcessor newProcessor(Collection fields) { - return new TrimProcessor(fields); + protected TrimProcessor newProcessor(String field) { + return new TrimProcessor(field); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java index a4b281fe2e9..fe0d029cf3c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java @@ -32,8 +32,8 @@ public class UppercaseProcessor extends AbstractStringProcessor { public static final String TYPE = "uppercase"; - UppercaseProcessor(Collection fields) { - super(fields); + UppercaseProcessor(String field) { + super(field); } @Override @@ -48,8 +48,8 @@ public class UppercaseProcessor extends AbstractStringProcessor { public static class Factory extends AbstractStringProcessor.Factory { @Override - protected UppercaseProcessor newProcessor(Collection fields) { - return new UppercaseProcessor(fields); + protected UppercaseProcessor newProcessor(String field) { + return new UppercaseProcessor(field); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java index 3b80b5a38c6..6bb2f9dd0dc 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java @@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.equalTo; public abstract class AbstractStringProcessorTestCase extends ESTestCase { - protected abstract AbstractStringProcessor newProcessor(Collection fields); + protected abstract AbstractStringProcessor newProcessor(String field); protected String modifyInput(String input) { return input; @@ -43,23 +43,16 @@ public abstract class AbstractStringProcessorTestCase extends ESTestCase { public void testProcessor() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - int numFields = randomIntBetween(1, 5); - Map expected = new HashMap<>(); - for (int i = 0; i < numFields; i++) { - String fieldValue = RandomDocumentPicks.randomString(random()); - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, modifyInput(fieldValue)); - expected.put(fieldName, expectedResult(fieldValue)); - } - Processor processor = newProcessor(expected.keySet()); + String fieldValue = RandomDocumentPicks.randomString(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, modifyInput(fieldValue)); + Processor processor = newProcessor(fieldName); processor.execute(ingestDocument); - for (Map.Entry entry : expected.entrySet()) { - assertThat(ingestDocument.getFieldValue(entry.getKey(), String.class), equalTo(entry.getValue())); - } + assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedResult(fieldValue))); } public void testFieldNotFound() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = newProcessor(Collections.singletonList(fieldName)); + Processor processor = newProcessor(fieldName); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); try { processor.execute(ingestDocument); @@ -70,7 +63,7 @@ public abstract class AbstractStringProcessorTestCase extends ESTestCase { } public void testNullValue() throws Exception { - Processor processor = newProcessor(Collections.singletonList("field")); + Processor processor = newProcessor("field"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); try { processor.execute(ingestDocument); @@ -82,7 +75,7 @@ public abstract class AbstractStringProcessorTestCase extends ESTestCase { public void testNonStringValue() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = newProcessor(Collections.singletonList(fieldName)); + Processor processor = newProcessor(fieldName); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestDocument.setFieldValue(fieldName, randomInt()); try { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java index 108d7104a0c..369e4d461de 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java @@ -34,30 +34,19 @@ public class ConvertProcessorFactoryTests extends ESTestCase { ConvertProcessor.Factory factory = new ConvertProcessor.Factory(); Map config = new HashMap<>(); ConvertProcessor.Type type = randomFrom(ConvertProcessor.Type.values()); - Map fields = Collections.singletonMap("field1", type.toString()); - config.put("fields", fields); + config.put("field", "field1"); + config.put("type", type.toString()); ConvertProcessor convertProcessor = factory.create(config); - assertThat(convertProcessor.getFields().size(), equalTo(1)); - assertThat(convertProcessor.getFields().get("field1"), equalTo(type)); - } - - public void testCreateMissingFields() throws Exception { - ConvertProcessor.Factory factory = new ConvertProcessor.Factory(); - Map config = new HashMap<>(); - try { - factory.create(config); - fail("factory create should have failed"); - } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("required property [fields] is missing")); - } + assertThat(convertProcessor.getField(), equalTo("field1")); + assertThat(convertProcessor.getConvertType(), equalTo(type)); } public void testCreateUnsupportedType() throws Exception { ConvertProcessor.Factory factory = new ConvertProcessor.Factory(); Map config = new HashMap<>(); String type = "type-" + randomAsciiOfLengthBetween(1, 10); - Map fields = Collections.singletonMap("field1", type); - config.put("fields", fields); + config.put("field", "field1"); + config.put("type", type); try { factory.create(config); fail("factory create should have failed"); @@ -65,4 +54,29 @@ public class ConvertProcessorFactoryTests extends ESTestCase { assertThat(e.getMessage(), Matchers.equalTo("type [" + type + "] not supported, cannot convert field.")); } } + + public void testCreateNoFieldPresent() throws Exception { + ConvertProcessor.Factory factory = new ConvertProcessor.Factory(); + Map config = new HashMap<>(); + String type = "type-" + randomAsciiOfLengthBetween(1, 10); + config.put("type", type); + try { + factory.create(config); + fail("factory create should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), Matchers.equalTo("required property [field] is missing")); + } + } + + public void testCreateNoTypePresent() throws Exception { + ConvertProcessor.Factory factory = new ConvertProcessor.Factory(); + Map config = new HashMap<>(); + config.put("field", "field1"); + try { + factory.create(config); + fail("factory create should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), Matchers.equalTo("required property [type] is missing")); + } + } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java index 85dcf860e20..fe560656ce5 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java @@ -34,45 +34,27 @@ public class ConvertProcessorTests extends ESTestCase { public void testConvertInt() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - Map fields = new HashMap<>(); - Map expectedResult = new HashMap<>(); - int numFields = randomIntBetween(1, 5); - for (int i = 0; i < numFields; i++) { - int randomInt = randomInt(); - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, randomInt); - fields.put(fieldName, Type.INTEGER); - expectedResult.put(fieldName, randomInt); - } - Processor processor = new ConvertProcessor(fields); + int randomInt = randomInt(); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, randomInt); + Processor processor = new ConvertProcessor(fieldName, Type.INTEGER); processor.execute(ingestDocument); - for (Map.Entry entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getFieldValue(entry.getKey(), Integer.class), equalTo(entry.getValue())); - } + assertThat(ingestDocument.getFieldValue(fieldName, Integer.class), equalTo(randomInt)); } public void testConvertIntList() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - Map fields = new HashMap<>(); - Map> expectedResult = new HashMap<>(); - int numFields = randomIntBetween(1, 5); - for (int i = 0; i < numFields; i++) { - int numItems = randomIntBetween(1, 10); - List fieldValue = new ArrayList<>(); - List expectedList = new ArrayList<>(); - for (int j = 0; j < numItems; j++) { - int randomInt = randomInt(); - fieldValue.add(Integer.toString(randomInt)); - expectedList.add(randomInt); - } - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - fields.put(fieldName, Type.INTEGER); - expectedResult.put(fieldName, expectedList); + int numItems = randomIntBetween(1, 10); + List fieldValue = new ArrayList<>(); + List expectedList = new ArrayList<>(); + for (int j = 0; j < numItems; j++) { + int randomInt = randomInt(); + fieldValue.add(Integer.toString(randomInt)); + expectedList.add(randomInt); } - Processor processor = new ConvertProcessor(fields); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); + Processor processor = new ConvertProcessor(fieldName, Type.INTEGER); processor.execute(ingestDocument); - for (Map.Entry> entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getFieldValue(entry.getKey(), List.class), equalTo(entry.getValue())); - } + assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList)); } public void testConvertIntError() throws Exception { @@ -81,8 +63,7 @@ public class ConvertProcessorTests extends ESTestCase { String value = "string-" + randomAsciiOfLengthBetween(1, 10); ingestDocument.setFieldValue(fieldName, value); - Map convert = Collections.singletonMap(fieldName, Type.INTEGER); - Processor processor = new ConvertProcessor(convert); + Processor processor = new ConvertProcessor(fieldName, Type.INTEGER); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -93,46 +74,30 @@ public class ConvertProcessorTests extends ESTestCase { public void testConvertFloat() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - Map fields = new HashMap<>(); Map expectedResult = new HashMap<>(); - int numFields = randomIntBetween(1, 5); - for (int i = 0; i < numFields; i++) { - float randomFloat = randomFloat(); - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, randomFloat); - fields.put(fieldName, Type.FLOAT); - expectedResult.put(fieldName, randomFloat); - } + float randomFloat = randomFloat(); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, randomFloat); + expectedResult.put(fieldName, randomFloat); - Processor processor = new ConvertProcessor(fields); + Processor processor = new ConvertProcessor(fieldName, Type.FLOAT); processor.execute(ingestDocument); - for (Map.Entry entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getFieldValue(entry.getKey(), Float.class), equalTo(entry.getValue())); - } + assertThat(ingestDocument.getFieldValue(fieldName, Float.class), equalTo(randomFloat)); } public void testConvertFloatList() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - Map fields = new HashMap<>(); - Map> expectedResult = new HashMap<>(); - int numFields = randomIntBetween(1, 5); - for (int i = 0; i < numFields; i++) { - int numItems = randomIntBetween(1, 10); - List fieldValue = new ArrayList<>(); - List expectedList = new ArrayList<>(); - for (int j = 0; j < numItems; j++) { - float randomFloat = randomFloat(); - fieldValue.add(Float.toString(randomFloat)); - expectedList.add(randomFloat); - } - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - fields.put(fieldName, Type.FLOAT); - expectedResult.put(fieldName, expectedList); + int numItems = randomIntBetween(1, 10); + List fieldValue = new ArrayList<>(); + List expectedList = new ArrayList<>(); + for (int j = 0; j < numItems; j++) { + float randomFloat = randomFloat(); + fieldValue.add(Float.toString(randomFloat)); + expectedList.add(randomFloat); } - Processor processor = new ConvertProcessor(fields); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); + Processor processor = new ConvertProcessor(fieldName, Type.FLOAT); processor.execute(ingestDocument); - for (Map.Entry> entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getFieldValue(entry.getKey(), List.class), equalTo(entry.getValue())); - } + assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList)); } public void testConvertFloatError() throws Exception { @@ -141,8 +106,7 @@ public class ConvertProcessorTests extends ESTestCase { String value = "string-" + randomAsciiOfLengthBetween(1, 10); ingestDocument.setFieldValue(fieldName, value); - Map convert = Collections.singletonMap(fieldName, Type.FLOAT); - Processor processor = new ConvertProcessor(convert); + Processor processor = new ConvertProcessor(fieldName, Type.FLOAT); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -155,52 +119,36 @@ public class ConvertProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Map fields = new HashMap<>(); Map expectedResult = new HashMap<>(); - int numFields = randomIntBetween(1, 5); - for (int i = 0; i < numFields; i++) { + boolean randomBoolean = randomBoolean(); + String booleanString = Boolean.toString(randomBoolean); + if (randomBoolean) { + booleanString = booleanString.toUpperCase(Locale.ROOT); + } + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, booleanString); + + Processor processor = new ConvertProcessor(fieldName, Type.BOOLEAN); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue(fieldName, Boolean.class), equalTo(randomBoolean)); + } + + public void testConvertBooleanList() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + int numItems = randomIntBetween(1, 10); + List fieldValue = new ArrayList<>(); + List expectedList = new ArrayList<>(); + for (int j = 0; j < numItems; j++) { boolean randomBoolean = randomBoolean(); String booleanString = Boolean.toString(randomBoolean); if (randomBoolean) { booleanString = booleanString.toUpperCase(Locale.ROOT); } - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, booleanString); - fields.put(fieldName, Type.BOOLEAN); - expectedResult.put(fieldName, randomBoolean); + fieldValue.add(booleanString); + expectedList.add(randomBoolean); } - - Processor processor = new ConvertProcessor(fields); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); + Processor processor = new ConvertProcessor(fieldName, Type.BOOLEAN); processor.execute(ingestDocument); - for (Map.Entry entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getFieldValue(entry.getKey(), Boolean.class), equalTo(entry.getValue())); - } - } - - public void testConvertBooleanList() throws Exception { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - Map fields = new HashMap<>(); - Map> expectedResult = new HashMap<>(); - int numFields = randomIntBetween(1, 5); - for (int i = 0; i < numFields; i++) { - int numItems = randomIntBetween(1, 10); - List fieldValue = new ArrayList<>(); - List expectedList = new ArrayList<>(); - for (int j = 0; j < numItems; j++) { - boolean randomBoolean = randomBoolean(); - String booleanString = Boolean.toString(randomBoolean); - if (randomBoolean) { - booleanString = booleanString.toUpperCase(Locale.ROOT); - } - fieldValue.add(booleanString); - expectedList.add(randomBoolean); - } - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - fields.put(fieldName, Type.BOOLEAN); - expectedResult.put(fieldName, expectedList); - } - Processor processor = new ConvertProcessor(fields); - processor.execute(ingestDocument); - for (Map.Entry> entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getFieldValue(entry.getKey(), List.class), equalTo(entry.getValue())); - } + assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList)); } public void testConvertBooleanError() throws Exception { @@ -215,8 +163,7 @@ public class ConvertProcessorTests extends ESTestCase { } ingestDocument.setFieldValue(fieldName, fieldValue); - Map convert = Collections.singletonMap(fieldName, Type.BOOLEAN); - Processor processor = new ConvertProcessor(convert); + Processor processor = new ConvertProcessor(fieldName, Type.BOOLEAN); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -227,94 +174,75 @@ public class ConvertProcessorTests extends ESTestCase { public void testConvertString() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - Map fields = new HashMap<>(); - Map expectedResult = new HashMap<>(); - int numFields = randomIntBetween(1, 5); - for (int i = 0; i < numFields; i++) { - Object fieldValue; - String expectedFieldValue; - switch(randomIntBetween(0, 2)) { - case 0: - float randomFloat = randomFloat(); - fieldValue = randomFloat; - expectedFieldValue = Float.toString(randomFloat); - break; - case 1: - int randomInt = randomInt(); - fieldValue = randomInt; - expectedFieldValue = Integer.toString(randomInt); - break; - case 2: - boolean randomBoolean = randomBoolean(); - fieldValue = randomBoolean; - expectedFieldValue = Boolean.toString(randomBoolean); - break; - default: - throw new UnsupportedOperationException(); - } - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - fields.put(fieldName, Type.STRING); - expectedResult.put(fieldName, expectedFieldValue); + Object fieldValue; + String expectedFieldValue; + switch(randomIntBetween(0, 2)) { + case 0: + float randomFloat = randomFloat(); + fieldValue = randomFloat; + expectedFieldValue = Float.toString(randomFloat); + break; + case 1: + int randomInt = randomInt(); + fieldValue = randomInt; + expectedFieldValue = Integer.toString(randomInt); + break; + case 2: + boolean randomBoolean = randomBoolean(); + fieldValue = randomBoolean; + expectedFieldValue = Boolean.toString(randomBoolean); + break; + default: + throw new UnsupportedOperationException(); } + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new ConvertProcessor(fields); + Processor processor = new ConvertProcessor(fieldName, Type.STRING); processor.execute(ingestDocument); - for (Map.Entry entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getFieldValue(entry.getKey(), String.class), equalTo(entry.getValue())); - } + assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedFieldValue)); } public void testConvertStringList() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - Map fields = new HashMap<>(); - Map> expectedResult = new HashMap<>(); - int numFields = randomIntBetween(1, 5); - for (int i = 0; i < numFields; i++) { - int numItems = randomIntBetween(1, 10); - List fieldValue = new ArrayList<>(); - List expectedList = new ArrayList<>(); - for (int j = 0; j < numItems; j++) { - Object randomValue; - String randomValueString; - switch(randomIntBetween(0, 2)) { - case 0: - float randomFloat = randomFloat(); - randomValue = randomFloat; - randomValueString = Float.toString(randomFloat); - break; - case 1: - int randomInt = randomInt(); - randomValue = randomInt; - randomValueString = Integer.toString(randomInt); - break; - case 2: - boolean randomBoolean = randomBoolean(); - randomValue = randomBoolean; - randomValueString = Boolean.toString(randomBoolean); - break; - default: - throw new UnsupportedOperationException(); - } - fieldValue.add(randomValue); - expectedList.add(randomValueString); + int numItems = randomIntBetween(1, 10); + List fieldValue = new ArrayList<>(); + List expectedList = new ArrayList<>(); + for (int j = 0; j < numItems; j++) { + Object randomValue; + String randomValueString; + switch(randomIntBetween(0, 2)) { + case 0: + float randomFloat = randomFloat(); + randomValue = randomFloat; + randomValueString = Float.toString(randomFloat); + break; + case 1: + int randomInt = randomInt(); + randomValue = randomInt; + randomValueString = Integer.toString(randomInt); + break; + case 2: + boolean randomBoolean = randomBoolean(); + randomValue = randomBoolean; + randomValueString = Boolean.toString(randomBoolean); + break; + default: + throw new UnsupportedOperationException(); } - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - fields.put(fieldName, Type.STRING); - expectedResult.put(fieldName, expectedList); + fieldValue.add(randomValue); + expectedList.add(randomValueString); } - Processor processor = new ConvertProcessor(fields); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); + Processor processor = new ConvertProcessor(fieldName, Type.STRING); processor.execute(ingestDocument); - for (Map.Entry> entry : expectedResult.entrySet()) { - assertThat(ingestDocument.getFieldValue(entry.getKey(), List.class), equalTo(entry.getValue())); - } + assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList)); } public void testConvertNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); Type type = randomFrom(Type.values()); - Map convert = Collections.singletonMap(fieldName, type); - Processor processor = new ConvertProcessor(convert); + Processor processor = new ConvertProcessor(fieldName, type); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -326,8 +254,7 @@ public class ConvertProcessorTests extends ESTestCase { public void testConvertNullField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); Type type = randomFrom(Type.values()); - Map convert = Collections.singletonMap("field", type); - Processor processor = new ConvertProcessor(convert); + Processor processor = new ConvertProcessor("field", type); try { processor.execute(ingestDocument); fail("processor execute should have failed"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java index c032a19347a..e1e085d135f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java @@ -33,80 +33,51 @@ public class GsubProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { GsubProcessor.Factory factory = new GsubProcessor.Factory(); Map config = new HashMap<>(); - List> expressions = new ArrayList<>(); - Map expression = new HashMap<>(); - expression.put("field", "field1"); - expression.put("pattern", "\\."); - expression.put("replacement", "-"); - expressions.add(expression); - config.put("expressions", expressions); + config.put("field", "field1"); + config.put("pattern", "\\."); + config.put("replacement", "-"); GsubProcessor gsubProcessor = factory.create(config); - assertThat(gsubProcessor.getGsubExpressions().size(), equalTo(1)); - GsubExpression gsubExpression = gsubProcessor.getGsubExpressions().get(0); - assertThat(gsubExpression.getFieldName(), equalTo("field1")); - assertThat(gsubExpression.getPattern().toString(), equalTo("\\.")); - assertThat(gsubExpression.getReplacement(), equalTo("-")); - } - - public void testCreateMissingExpressions() throws Exception { - GsubProcessor.Factory factory = new GsubProcessor.Factory(); - Map config = new HashMap<>(); - try { - factory.create(config); - fail("factory create should have failed"); - } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("required property [expressions] is missing")); - } + assertThat(gsubProcessor.getField(), equalTo("field1")); + assertThat(gsubProcessor.getPattern().toString(), equalTo("\\.")); + assertThat(gsubProcessor.getReplacement(), equalTo("-")); } public void testCreateNoFieldPresent() throws Exception { GsubProcessor.Factory factory = new GsubProcessor.Factory(); Map config = new HashMap<>(); - List> expressions = new ArrayList<>(); - Map expression = new HashMap<>(); - expression.put("pattern", "\\."); - expression.put("replacement", "-"); - expressions.add(expression); - config.put("expressions", expressions); + config.put("pattern", "\\."); + config.put("replacement", "-"); try { factory.create(config); fail("factory create should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("no [field] specified for gsub expression")); + assertThat(e.getMessage(), equalTo("required property [field] is missing")); } } public void testCreateNoPatternPresent() throws Exception { GsubProcessor.Factory factory = new GsubProcessor.Factory(); Map config = new HashMap<>(); - List> expressions = new ArrayList<>(); - Map expression = new HashMap<>(); - expression.put("field", "field1"); - expression.put("replacement", "-"); - expressions.add(expression); - config.put("expressions", expressions); + config.put("field", "field1"); + config.put("replacement", "-"); try { factory.create(config); fail("factory create should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("no [pattern] specified for gsub expression")); + assertThat(e.getMessage(), equalTo("required property [pattern] is missing")); } } public void testCreateNoReplacementPresent() throws Exception { GsubProcessor.Factory factory = new GsubProcessor.Factory(); Map config = new HashMap<>(); - List> expressions = new ArrayList<>(); - Map expression = new HashMap<>(); - expression.put("field", "field1"); - expression.put("pattern", "\\."); - expressions.add(expression); - config.put("expressions", expressions); + config.put("field", "field1"); + config.put("pattern", "\\."); try { factory.create(config); fail("factory create should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("no [replacement] specified for gsub expression")); + assertThat(e.getMessage(), equalTo("required property [replacement] is missing")); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java index 89c77135687..8eb5be790ea 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java @@ -37,25 +37,17 @@ public class GsubProcessorTests extends ESTestCase { public void testGsub() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - int numFields = randomIntBetween(1, 5); - List expressions = new ArrayList<>(); - for (int i = 0; i < numFields; i++) { - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "127.0.0.1"); - expressions.add(new GsubExpression(fieldName, Pattern.compile("\\."), "-")); - } - Processor processor = new GsubProcessor(expressions); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "127.0.0.1"); + Processor processor = new GsubProcessor(fieldName, Pattern.compile("\\."), "-"); processor.execute(ingestDocument); - for (GsubExpression expression : expressions) { - assertThat(ingestDocument.getFieldValue(expression.getFieldName(), String.class), equalTo("127-0-0-1")); - } + assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo("127-0-0-1")); } public void testGsubNotAStringValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, 123); - List gsubExpressions = Collections.singletonList(new GsubExpression(fieldName, Pattern.compile("\\."), "-")); - Processor processor = new GsubProcessor(gsubExpressions); + Processor processor = new GsubProcessor(fieldName, Pattern.compile("\\."), "-"); try { processor.execute(ingestDocument); fail("processor execution should have failed"); @@ -67,8 +59,7 @@ public class GsubProcessorTests extends ESTestCase { public void testGsubFieldNotFound() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - List gsubExpressions = Collections.singletonList(new GsubExpression(fieldName, Pattern.compile("\\."), "-")); - Processor processor = new GsubProcessor(gsubExpressions); + Processor processor = new GsubProcessor(fieldName, Pattern.compile("\\."), "-"); try { processor.execute(ingestDocument); fail("processor execution should have failed"); @@ -79,8 +70,7 @@ public class GsubProcessorTests extends ESTestCase { public void testGsubNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); - List gsubExpressions = Collections.singletonList(new GsubExpression("field", Pattern.compile("\\."), "-")); - Processor processor = new GsubProcessor(gsubExpressions); + Processor processor = new GsubProcessor("field", Pattern.compile("\\."), "-"); try { processor.execute(ingestDocument); fail("processor execution should have failed"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java index 8ad05eec6f8..deebe50b9c5 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest.processor.join; import org.elasticsearch.test.ESTestCase; -import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -32,20 +31,34 @@ public class JoinProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { JoinProcessor.Factory factory = new JoinProcessor.Factory(); Map config = new HashMap<>(); - Map fields = Collections.singletonMap("field1", "-"); - config.put("fields", fields); + config.put("field", "field1"); + config.put("separator", "-"); JoinProcessor joinProcessor = factory.create(config); - assertThat(joinProcessor.getFields(), equalTo(fields)); + assertThat(joinProcessor.getField(), equalTo("field1")); + assertThat(joinProcessor.getSeparator(), equalTo("-")); } - public void testCreateMissingFields() throws Exception { + public void testCreateNoFieldPresent() throws Exception { JoinProcessor.Factory factory = new JoinProcessor.Factory(); Map config = new HashMap<>(); + config.put("separator", "-"); try { factory.create(config); fail("factory create should have failed"); - } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [field] is missing")); + } + } + + public void testCreateNoSeparatorPresent() throws Exception { + JoinProcessor.Factory factory = new JoinProcessor.Factory(); + Map config = new HashMap<>(); + config.put("field", "field1"); + try { + factory.create(config); + fail("factory create should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [separator] is missing")); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java index 4df240ece45..df6c835b3c9 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java @@ -35,68 +35,49 @@ public class JoinProcessorTests extends ESTestCase { public void testJoinStrings() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - Map fields = new HashMap<>(); - Map expectedResultMap = new HashMap<>(); - int numFields = randomIntBetween(1, 5); - for (int i = 0; i < numFields; i++) { - int numItems = randomIntBetween(1, 10); - String separator = randomFrom(SEPARATORS); - List fieldValue = new ArrayList<>(numItems); - String expectedResult = ""; - for (int j = 0; j < numItems; j++) { - String value = randomAsciiOfLengthBetween(1, 10); - fieldValue.add(value); - expectedResult += value; - if (j < numItems - 1) { - expectedResult += separator; - } + int numItems = randomIntBetween(1, 10); + String separator = randomFrom(SEPARATORS); + List fieldValue = new ArrayList<>(numItems); + String expectedResult = ""; + for (int j = 0; j < numItems; j++) { + String value = randomAsciiOfLengthBetween(1, 10); + fieldValue.add(value); + expectedResult += value; + if (j < numItems - 1) { + expectedResult += separator; } - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - expectedResultMap.put(fieldName, expectedResult); - fields.put(fieldName, separator); } - Processor processor = new JoinProcessor(fields); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); + Processor processor = new JoinProcessor(fieldName, separator); processor.execute(ingestDocument); - for (Map.Entry entry : expectedResultMap.entrySet()) { - assertThat(ingestDocument.getFieldValue(entry.getKey(), String.class), equalTo(entry.getValue())); - } + assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedResult)); } public void testJoinIntegers() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - Map fields = new HashMap<>(); - Map expectedResultMap = new HashMap<>(); - int numFields = randomIntBetween(1, 5); - for (int i = 0; i < numFields; i++) { - int numItems = randomIntBetween(1, 10); - String separator = randomFrom(SEPARATORS); - List fieldValue = new ArrayList<>(numItems); - String expectedResult = ""; - for (int j = 0; j < numItems; j++) { - int value = randomInt(); - fieldValue.add(value); - expectedResult += value; - if (j < numItems - 1) { - expectedResult += separator; - } + int numItems = randomIntBetween(1, 10); + String separator = randomFrom(SEPARATORS); + List fieldValue = new ArrayList<>(numItems); + String expectedResult = ""; + for (int j = 0; j < numItems; j++) { + int value = randomInt(); + fieldValue.add(value); + expectedResult += value; + if (j < numItems - 1) { + expectedResult += separator; } - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - expectedResultMap.put(fieldName, expectedResult); - fields.put(fieldName, separator); } - Processor processor = new JoinProcessor(fields); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); + Processor processor = new JoinProcessor(fieldName, separator); processor.execute(ingestDocument); - for (Map.Entry entry : expectedResultMap.entrySet()) { - assertThat(ingestDocument.getFieldValue(entry.getKey(), String.class), equalTo(entry.getValue())); - } + assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedResult)); } public void testJoinNonListField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, randomAsciiOfLengthBetween(1, 10)); - Map join = Collections.singletonMap(fieldName, "-"); - Processor processor = new JoinProcessor(join); + Processor processor = new JoinProcessor(fieldName, "-"); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { @@ -107,7 +88,7 @@ public class JoinProcessorTests extends ESTestCase { public void testJoinNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new JoinProcessor(Collections.singletonMap(fieldName, "-")); + Processor processor = new JoinProcessor(fieldName, "-"); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { @@ -117,7 +98,7 @@ public class JoinProcessorTests extends ESTestCase { public void testJoinNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); - Processor processor = new JoinProcessor(Collections.singletonMap("field", "-")); + Processor processor = new JoinProcessor("field", "-"); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java index 2a18eddf64c..34864e38eea 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java @@ -33,20 +33,19 @@ public class LowercaseProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { LowercaseProcessor.Factory factory = new LowercaseProcessor.Factory(); Map config = new HashMap<>(); - List fields = Collections.singletonList("field1"); - config.put("fields", fields); + config.put("field", "field1"); LowercaseProcessor uppercaseProcessor = factory.create(config); - assertThat(uppercaseProcessor.getFields(), equalTo(fields)); + assertThat(uppercaseProcessor.getField(), equalTo("field1")); } - public void testCreateMissingFields() throws Exception { + public void testCreateMissingField() throws Exception { LowercaseProcessor.Factory factory = new LowercaseProcessor.Factory(); Map config = new HashMap<>(); try { factory.create(config); fail("factory create should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + assertThat(e.getMessage(), equalTo("required property [field] is missing")); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java index 07e14062764..6e85b338b1a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java @@ -27,8 +27,8 @@ import java.util.Locale; public class LowercaseProcessorTests extends AbstractStringProcessorTestCase { @Override - protected AbstractStringProcessor newProcessor(Collection fields) { - return new LowercaseProcessor(fields); + protected AbstractStringProcessor newProcessor(String field) { + return new LowercaseProcessor(field); } @Override diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java index 27933ea66e3..f45f3bc59d0 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java @@ -33,20 +33,19 @@ public class RemoveProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { RemoveProcessor.Factory factory = new RemoveProcessor.Factory(); Map config = new HashMap<>(); - List fields = Collections.singletonList("field1"); - config.put("fields", fields); + config.put("field", "field1"); RemoveProcessor removeProcessor = factory.create(config); - assertThat(removeProcessor.getFields(), equalTo(fields)); + assertThat(removeProcessor.getField(), equalTo("field1")); } - public void testCreateMissingFields() throws Exception { + public void testCreateMissingField() throws Exception { RemoveProcessor.Factory factory = new RemoveProcessor.Factory(); Map config = new HashMap<>(); try { factory.create(config); fail("factory create should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + assertThat(e.getMessage(), equalTo("required property [field] is missing")); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java index 50b1ee198f9..2ccfd5add93 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java @@ -36,22 +36,16 @@ public class RemoveProcessorTests extends ESTestCase { public void testRemoveFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - int numFields = randomIntBetween(1, 5); - Set fields = new HashSet<>(); - for (int i = 0; i < numFields; i++) { - fields.add(RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument)); - } - Processor processor = new RemoveProcessor(fields); + String field = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); + Processor processor = new RemoveProcessor(field); processor.execute(ingestDocument); - for (String field : fields) { - assertThat(ingestDocument.hasField(field), equalTo(false)); - } + assertThat(ingestDocument.hasField(field), equalTo(false)); } public void testRemoveNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new RemoveProcessor(Collections.singletonList(fieldName)); + Processor processor = new RemoveProcessor(fieldName); try { processor.execute(ingestDocument); fail("remove field should have failed"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java index d858b4aea07..eba08ad6c46 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java @@ -32,20 +32,34 @@ public class RenameProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { RenameProcessor.Factory factory = new RenameProcessor.Factory(); Map config = new HashMap<>(); - Map fields = Collections.singletonMap("field1", "value1"); - config.put("fields", fields); + config.put("field", "old_field"); + config.put("to", "new_field"); RenameProcessor renameProcessor = factory.create(config); - assertThat(renameProcessor.getFields(), equalTo(fields)); + assertThat(renameProcessor.getOldFieldName(), equalTo("old_field")); + assertThat(renameProcessor.getNewFieldName(), equalTo("new_field")); } - public void testCreateMissingFields() throws Exception { + public void testCreateNoFieldPresent() throws Exception { RenameProcessor.Factory factory = new RenameProcessor.Factory(); Map config = new HashMap<>(); + config.put("to", "new_field"); try { factory.create(config); fail("factory create should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + assertThat(e.getMessage(), equalTo("required property [field] is missing")); + } + } + + public void testCreateNoToPresent() throws Exception { + RenameProcessor.Factory factory = new RenameProcessor.Factory(); + Map config = new HashMap<>(); + config.put("field", "old_field"); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [to] is missing")); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java index 338a5ff8f60..2feacc88190 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java @@ -34,26 +34,15 @@ public class RenameProcessorTests extends ESTestCase { public void testRename() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - int numFields = randomIntBetween(1, 5); - Map fields = new HashMap<>(); - Map newFields = new HashMap<>(); - for (int i = 0; i < numFields; i++) { - String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); - if (fields.containsKey(fieldName)) { - continue; - } - String newFieldName; - do { - newFieldName = RandomDocumentPicks.randomFieldName(random()); - } while (RandomDocumentPicks.canAddField(newFieldName, ingestDocument) == false || newFields.containsKey(newFieldName)); - newFields.put(newFieldName, ingestDocument.getFieldValue(fieldName, Object.class)); - fields.put(fieldName, newFieldName); - } - Processor processor = new RenameProcessor(fields); + String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); + Object fieldValue = ingestDocument.getFieldValue(fieldName, Object.class); + String newFieldName; + do { + newFieldName = RandomDocumentPicks.randomFieldName(random()); + } while (RandomDocumentPicks.canAddField(newFieldName, ingestDocument) == false || newFieldName.equals(fieldName)); + Processor processor = new RenameProcessor(fieldName, newFieldName); processor.execute(ingestDocument); - for (Map.Entry entry : newFields.entrySet()) { - assertThat(ingestDocument.getFieldValue(entry.getKey(), Object.class), equalTo(entry.getValue())); - } + assertThat(ingestDocument.getFieldValue(newFieldName, Object.class), equalTo(fieldValue)); } public void testRenameArrayElement() throws Exception { @@ -69,7 +58,7 @@ public class RenameProcessorTests extends ESTestCase { document.put("one", one); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - Processor processor = new RenameProcessor(Collections.singletonMap("list.0", "item")); + Processor processor = new RenameProcessor("list.0", "item"); processor.execute(ingestDocument); Object actualObject = ingestDocument.getSource().get("list"); assertThat(actualObject, instanceOf(List.class)); @@ -82,7 +71,7 @@ public class RenameProcessorTests extends ESTestCase { assertThat(actualObject, instanceOf(String.class)); assertThat(actualObject, equalTo("item1")); - processor = new RenameProcessor(Collections.singletonMap("list.0", "list.3")); + processor = new RenameProcessor("list.0", "list.3"); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -97,7 +86,7 @@ public class RenameProcessorTests extends ESTestCase { public void testRenameNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new RenameProcessor(Collections.singletonMap(fieldName, RandomDocumentPicks.randomFieldName(random()))); + Processor processor = new RenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random())); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -109,7 +98,7 @@ public class RenameProcessorTests extends ESTestCase { public void testRenameNewFieldAlreadyExists() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); - Processor processor = new RenameProcessor(Collections.singletonMap(RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument), fieldName)); + Processor processor = new RenameProcessor(RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument), fieldName); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -123,7 +112,7 @@ public class RenameProcessorTests extends ESTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, null); String newFieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new RenameProcessor(Collections.singletonMap(fieldName, newFieldName)); + Processor processor = new RenameProcessor(fieldName, newFieldName); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(fieldName), equalTo(false)); assertThat(ingestDocument.hasField(newFieldName), equalTo(true)); @@ -144,7 +133,7 @@ public class RenameProcessorTests extends ESTestCase { document.put("list", Collections.singletonList("item")); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - Processor processor = new RenameProcessor(Collections.singletonMap("list", "new_field")); + Processor processor = new RenameProcessor("list", "new_field"); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -169,7 +158,7 @@ public class RenameProcessorTests extends ESTestCase { document.put("list", Collections.singletonList("item")); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - Processor processor = new RenameProcessor(Collections.singletonMap("list", "new_field")); + Processor processor = new RenameProcessor("list", "new_field"); try { processor.execute(ingestDocument); fail("processor execute should have failed"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java index 1f3c345b1db..9eb6b2a4907 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java @@ -32,20 +32,47 @@ public class SetProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { SetProcessor.Factory factory = new SetProcessor.Factory(); Map config = new HashMap<>(); - Map fields = Collections.singletonMap("field1", "value1"); - config.put("fields", fields); + config.put("field", "field1"); + config.put("value", "value1"); SetProcessor setProcessor = factory.create(config); - assertThat(setProcessor.getFields(), equalTo(fields)); + assertThat(setProcessor.getField(), equalTo("field1")); + assertThat(setProcessor.getValue(), equalTo("value1")); } - public void testCreateMissingFields() throws Exception { + public void testCreateNoFieldPresent() throws Exception { SetProcessor.Factory factory = new SetProcessor.Factory(); Map config = new HashMap<>(); + config.put("value", "value1"); try { factory.create(config); fail("factory create should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + assertThat(e.getMessage(), equalTo("required property [field] is missing")); + } + } + + public void testCreateNoValuePresent() throws Exception { + SetProcessor.Factory factory = new SetProcessor.Factory(); + Map config = new HashMap<>(); + config.put("field", "field1"); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [value] is missing")); + } + } + + public void testCreateNullValue() throws Exception { + SetProcessor.Factory factory = new SetProcessor.Factory(); + Map config = new HashMap<>(); + config.put("field", "field1"); + config.put("value", null); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [value] is missing")); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java index c675c9c51eb..7d693066595 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java @@ -32,45 +32,30 @@ public class SetProcessorTests extends ESTestCase { public void testSetExistingFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - int numFields = randomIntBetween(1, 5); - Map fields = new HashMap<>(); - for (int i = 0; i < numFields; i++) { - String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); - Object fieldValue = RandomDocumentPicks.randomFieldValue(random()); - fields.put(fieldName, fieldValue); - } - Processor processor = new SetProcessor(fields); + String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); + Object fieldValue = RandomDocumentPicks.randomFieldValue(random()); + Processor processor = new SetProcessor(fieldName, fieldValue); processor.execute(ingestDocument); - - for (Map.Entry field : fields.entrySet()) { - assertThat(ingestDocument.hasField(field.getKey()), equalTo(true)); - assertThat(ingestDocument.getFieldValue(field.getKey(), Object.class), equalTo(field.getValue())); - } + assertThat(ingestDocument.hasField(fieldName), equalTo(true)); + assertThat(ingestDocument.getFieldValue(fieldName, Object.class), equalTo(fieldValue)); } public void testSetNewFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); //used to verify that there are no conflicts between subsequent fields going to be added IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); - int numFields = randomIntBetween(1, 5); - Map fields = new HashMap<>(); - for (int i = 0; i < numFields; i++) { - Object fieldValue = RandomDocumentPicks.randomFieldValue(random()); - String fieldName = RandomDocumentPicks.addRandomField(random(), testIngestDocument, fieldValue); - fields.put(fieldName, fieldValue); - } - Processor processor = new SetProcessor(fields); + Object fieldValue = RandomDocumentPicks.randomFieldValue(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), testIngestDocument, fieldValue); + Processor processor = new SetProcessor(fieldName, fieldValue); processor.execute(ingestDocument); - for (Map.Entry field : fields.entrySet()) { - assertThat(ingestDocument.hasField(field.getKey()), equalTo(true)); - assertThat(ingestDocument.getFieldValue(field.getKey(), Object.class), equalTo(field.getValue())); - } + assertThat(ingestDocument.hasField(fieldName), equalTo(true)); + assertThat(ingestDocument.getFieldValue(fieldName, Object.class), equalTo(fieldValue)); } public void testSetFieldsTypeMismatch() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestDocument.setFieldValue("field", "value"); - Processor processor = new SetProcessor(Collections.singletonMap("field.inner", "value")); + Processor processor = new SetProcessor("field.inner", "value"); try { processor.execute(ingestDocument); fail("processor execute should have failed"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java index e1c80859ac0..4d6634b8568 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java @@ -32,20 +32,34 @@ public class SplitProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { SplitProcessor.Factory factory = new SplitProcessor.Factory(); Map config = new HashMap<>(); - Map fields = Collections.singletonMap("field1", "\\."); - config.put("fields", fields); + config.put("field", "field1"); + config.put("separator", "\\."); SplitProcessor splitProcessor = factory.create(config); - assertThat(splitProcessor.getFields(), equalTo(fields)); + assertThat(splitProcessor.getField(), equalTo("field1")); + assertThat(splitProcessor.getSeparator(), equalTo("\\.")); } - public void testCreateMissingFields() throws Exception { + public void testCreateNoFieldPresent() throws Exception { SplitProcessor.Factory factory = new SplitProcessor.Factory(); Map config = new HashMap<>(); + config.put("separator", "\\."); try { factory.create(config); fail("factory create should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + assertThat(e.getMessage(), equalTo("required property [field] is missing")); + } + } + + public void testCreateNoSeparatorPresent() throws Exception { + SplitProcessor.Factory factory = new SplitProcessor.Factory(); + Map config = new HashMap<>(); + config.put("field", "field1"); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [separator] is missing")); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java index a190dddc791..594ba3b4590 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java @@ -33,24 +33,16 @@ public class SplitProcessorTests extends ESTestCase { public void testSplit() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - Map fields = new HashMap<>(); - int numFields = randomIntBetween(1, 5); - for (int i = 0; i < numFields; i++) { - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "127.0.0.1"); - fields.put(fieldName, "\\."); - } - Processor processor = new SplitProcessor(fields); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "127.0.0.1"); + Processor processor = new SplitProcessor(fieldName, "\\."); processor.execute(ingestDocument); - for (String field : fields.keySet()) { - assertThat(ingestDocument.getFieldValue(field, List.class), equalTo(Arrays.asList("127", "0", "0", "1"))); - } + assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(Arrays.asList("127", "0", "0", "1"))); } public void testSplitFieldNotFound() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Map split = Collections.singletonMap(fieldName, "\\."); - Processor processor = new SplitProcessor(split); + Processor processor = new SplitProcessor(fieldName, "\\."); try { processor.execute(ingestDocument); fail("split processor should have failed"); @@ -61,8 +53,7 @@ public class SplitProcessorTests extends ESTestCase { public void testSplitNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); - Map split = Collections.singletonMap("field", "\\."); - Processor processor = new SplitProcessor(split); + Processor processor = new SplitProcessor("field", "\\."); try { processor.execute(ingestDocument); fail("split processor should have failed"); @@ -75,7 +66,7 @@ public class SplitProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, randomInt()); - Processor processor = new SplitProcessor(Collections.singletonMap(fieldName, "\\.")); + Processor processor = new SplitProcessor(fieldName, "\\."); try { processor.execute(ingestDocument); fail("split processor should have failed"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java index 2475b04db77..169ebda0064 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java @@ -33,20 +33,19 @@ public class TrimProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { TrimProcessor.Factory factory = new TrimProcessor.Factory(); Map config = new HashMap<>(); - List fields = Collections.singletonList("field1"); - config.put("fields", fields); + config.put("field", "field1"); TrimProcessor uppercaseProcessor = factory.create(config); - assertThat(uppercaseProcessor.getFields(), equalTo(fields)); + assertThat(uppercaseProcessor.getField(), equalTo("field1")); } - public void testCreateMissingFields() throws Exception { + public void testCreateMissingField() throws Exception { TrimProcessor.Factory factory = new TrimProcessor.Factory(); Map config = new HashMap<>(); try { factory.create(config); fail("factory create should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + assertThat(e.getMessage(), equalTo("required property [field] is missing")); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorTests.java index 586b9e5b4e3..eea867e57bd 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorTests.java @@ -22,13 +22,11 @@ package org.elasticsearch.ingest.processor.trim; import org.elasticsearch.ingest.processor.AbstractStringProcessor; import org.elasticsearch.ingest.processor.AbstractStringProcessorTestCase; -import java.util.Collection; - public class TrimProcessorTests extends AbstractStringProcessorTestCase { @Override - protected AbstractStringProcessor newProcessor(Collection fields) { - return new TrimProcessor(fields); + protected AbstractStringProcessor newProcessor(String field) { + return new TrimProcessor(field); } @Override diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java index ec38b65d86a..a8e048bdcf2 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java @@ -33,20 +33,19 @@ public class UppercaseProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { UppercaseProcessor.Factory factory = new UppercaseProcessor.Factory(); Map config = new HashMap<>(); - List fields = Collections.singletonList("field1"); - config.put("fields", fields); + config.put("field", "field1"); UppercaseProcessor uppercaseProcessor = factory.create(config); - assertThat(uppercaseProcessor.getFields(), equalTo(fields)); + assertThat(uppercaseProcessor.getField(), equalTo("field1")); } - public void testCreateMissingFields() throws Exception { + public void testCreateMissingField() throws Exception { UppercaseProcessor.Factory factory = new UppercaseProcessor.Factory(); Map config = new HashMap<>(); try { factory.create(config); fail("factory create should have failed"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("required property [fields] is missing")); + assertThat(e.getMessage(), equalTo("required property [field] is missing")); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java index f23f21ece74..00e4d1826ca 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java @@ -28,8 +28,8 @@ import java.util.Locale; public class UppercaseProcessorTests extends AbstractStringProcessorTestCase { @Override - protected AbstractStringProcessor newProcessor(Collection fields) { - return new UppercaseProcessor(fields); + protected AbstractStringProcessor newProcessor(String field) { + return new UppercaseProcessor(field); } @Override diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml index cc2cee0c742..607bed4d35f 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml @@ -13,9 +13,8 @@ "processors": [ { "set" : { - "fields" : { - "field2": "_value" - } + "field" : "field2", + "value": "_value" } } ] diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml index a6126ddca45..eb59cada2d0 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml @@ -13,76 +13,59 @@ "processors": [ { "set" : { - "fields" : { - "new_field": "new_value" - } + "field" : "new_field", + "value": "new_value" } }, { "rename" : { - "fields" : { - "field_to_rename": "renamed_field" - } + "field" : "field_to_rename", + "to": "renamed_field" } }, { "remove" : { - "fields" : [ - "field_to_remove" - ] + "field" : "field_to_remove" } }, { "lowercase" : { - "fields" : [ - "field_to_lowercase" - ] + "field" : "field_to_lowercase" } }, { "uppercase" : { - "fields" : [ - "field_to_uppercase" - ] + "field" : "field_to_uppercase" } }, { "trim" : { - "fields" : [ - "field_to_trim" - ] + "field" : "field_to_trim" } }, { "split" : { - "fields" : { - "field_to_split": "-" - } + "field" : "field_to_split", + "separator": "-" } }, { "join" : { - "fields" : { - "field_to_join": "-" - } + "field" : "field_to_join", + "separator": "-" } }, { "convert" : { - "fields" : { - "field_to_convert": "integer" - } + "field" : "field_to_convert", + "type": "integer" } }, { "gsub" : { - "expressions" : [ - { - "field": "field_to_gsub", - "pattern" : "-", - "replacement" : "." - } - ] + "field": "field_to_gsub", + "pattern" : "-", + "replacement" : "." } } ] diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml index c84f525af28..06873116db2 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml @@ -13,9 +13,8 @@ "processors": [ { "set" : { - "fields" : { - "field2" : "_value" - } + "field" : "field2", + "value" : "_value" } } ] @@ -67,9 +66,8 @@ "processors": [ { "set" : { - "fields" : { - "field2" : "_value" - } + "field" : "field2", + "value" : "_value" } } ] @@ -130,16 +128,14 @@ "processors": [ { "set" : { - "fields" : { - "field2" : "_value" - } + "field" : "field2", + "value" : "_value" } }, { "set" : { - "fields" : { - "field3" : "third_val" - } + "field" : "field3", + "value" : "third_val" } } ] @@ -182,7 +178,7 @@ "processors": [ { "uppercase" : { - "fields" : ["foo"] + "field" : "foo" } } ] @@ -227,14 +223,13 @@ "processors": [ { "convert" : { - "fields" : { - "foo": "integer" - } + "field" : "foo", + "type" : "integer" } }, { "uppercase" : { - "fields" : ["bar"] + "field" : "bar" } } ] From a2cda4e3f294c65291c182bec6421290dd70bef7 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 8 Dec 2015 13:54:24 +0100 Subject: [PATCH 109/347] Streamline the put and delete pipelines responses with the index and delete response in core. --- .../action/delete/DeleteResponse.java | 38 ++++++++- .../action/index/IndexResponse.java | 38 ++++++++- .../rest/action/delete/RestDeleteAction.java | 38 ++------- .../rest/action/index/RestIndexAction.java | 29 +------ .../ingest/rest/RestDeletePipelineAction.java | 7 +- .../ingest/rest/RestPutPipelineAction.java | 4 +- .../delete/DeletePipelineAction.java | 7 +- .../delete/DeletePipelineRequestBuilder.java | 3 +- .../delete/DeletePipelineResponse.java | 80 ------------------- .../delete/DeletePipelineTransportAction.java | 18 +---- .../transport/put/PutPipelineAction.java | 7 +- .../put/PutPipelineRequestBuilder.java | 3 +- .../transport/put/PutPipelineResponse.java | 79 ------------------ .../put/PutPipelineTransportAction.java | 19 +---- .../elasticsearch/ingest/IngestClientIT.java | 68 +++++++--------- .../rest-api-spec/test/ingest/20_crud.yaml | 8 +- 16 files changed, 139 insertions(+), 307 deletions(-) delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineResponse.java delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineResponse.java diff --git a/core/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java b/core/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java index 26cfa57a13d..3767267a2ba 100644 --- a/core/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java +++ b/core/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java @@ -22,16 +22,23 @@ package org.elasticsearch.action.delete; import org.elasticsearch.action.ActionWriteResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.StatusToXContent; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import static org.elasticsearch.rest.RestStatus.NOT_FOUND; + /** * The response of the delete action. * * @see org.elasticsearch.action.delete.DeleteRequest * @see org.elasticsearch.client.Client#delete(DeleteRequest) */ -public class DeleteResponse extends ActionWriteResponse { +public class DeleteResponse extends ActionWriteResponse implements StatusToXContent { private String index; private String id; @@ -105,4 +112,33 @@ public class DeleteResponse extends ActionWriteResponse { out.writeLong(version); out.writeBoolean(found); } + + @Override + public RestStatus status() { + RestStatus status = getShardInfo().status(); + if (isFound() == false) { + status = NOT_FOUND; + } + return status; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + ActionWriteResponse.ShardInfo shardInfo = getShardInfo(); + builder.field(Fields.FOUND, found) + .field(Fields._INDEX, index) + .field(Fields._TYPE, type) + .field(Fields._ID, id) + .field(Fields._VERSION, version) + .value(shardInfo); + return builder; + } + + static final class Fields { + static final XContentBuilderString FOUND = new XContentBuilderString("found"); + static final XContentBuilderString _INDEX = new XContentBuilderString("_index"); + static final XContentBuilderString _TYPE = new XContentBuilderString("_type"); + static final XContentBuilderString _ID = new XContentBuilderString("_id"); + static final XContentBuilderString _VERSION = new XContentBuilderString("_version"); + } } diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexResponse.java b/core/src/main/java/org/elasticsearch/action/index/IndexResponse.java index 5727b2b673b..8f43d43e552 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexResponse.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexResponse.java @@ -22,16 +22,23 @@ package org.elasticsearch.action.index; import org.elasticsearch.action.ActionWriteResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.StatusToXContent; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import static org.elasticsearch.rest.RestStatus.CREATED; + /** * A response of an index operation, * * @see org.elasticsearch.action.index.IndexRequest * @see org.elasticsearch.client.Client#index(IndexRequest) */ -public class IndexResponse extends ActionWriteResponse { +public class IndexResponse extends ActionWriteResponse implements StatusToXContent { private String index; private String id; @@ -106,6 +113,27 @@ public class IndexResponse extends ActionWriteResponse { out.writeBoolean(created); } + @Override + public RestStatus status() { + RestStatus status = getShardInfo().status(); + if (created) { + status = CREATED; + } + return status; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + ActionWriteResponse.ShardInfo shardInfo = getShardInfo(); + builder.field(Fields._INDEX, index) + .field(Fields._TYPE, type) + .field(Fields._ID, id) + .field(Fields._VERSION, version); + shardInfo.toXContent(builder, params); + builder.field(Fields.CREATED, created); + return builder; + } + @Override public String toString() { StringBuilder builder = new StringBuilder(); @@ -118,4 +146,12 @@ public class IndexResponse extends ActionWriteResponse { builder.append(",shards=").append(getShardInfo()); return builder.append("]").toString(); } + + static final class Fields { + static final XContentBuilderString _INDEX = new XContentBuilderString("_index"); + static final XContentBuilderString _TYPE = new XContentBuilderString("_type"); + static final XContentBuilderString _ID = new XContentBuilderString("_id"); + static final XContentBuilderString _VERSION = new XContentBuilderString("_version"); + static final XContentBuilderString CREATED = new XContentBuilderString("created"); + } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java b/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java index 209ab686ce5..4a5182f326f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java @@ -19,22 +19,20 @@ package org.elasticsearch.rest.action.delete; -import org.elasticsearch.action.ActionWriteResponse; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; -import org.elasticsearch.rest.action.support.RestBuilderListener; +import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import static org.elasticsearch.rest.RestRequest.Method.DELETE; -import static org.elasticsearch.rest.RestStatus.NOT_FOUND; /** * @@ -62,31 +60,7 @@ public class RestDeleteAction extends BaseRestHandler { deleteRequest.consistencyLevel(WriteConsistencyLevel.fromString(consistencyLevel)); } - client.delete(deleteRequest, new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(DeleteResponse result, XContentBuilder builder) throws Exception { - ActionWriteResponse.ShardInfo shardInfo = result.getShardInfo(); - builder.startObject().field(Fields.FOUND, result.isFound()) - .field(Fields._INDEX, result.getIndex()) - .field(Fields._TYPE, result.getType()) - .field(Fields._ID, result.getId()) - .field(Fields._VERSION, result.getVersion()) - .value(shardInfo) - .endObject(); - RestStatus status = shardInfo.status(); - if (!result.isFound()) { - status = NOT_FOUND; - } - return new BytesRestResponse(status, builder); - } - }); + client.delete(deleteRequest, new RestStatusToXContentListener<>(channel)); } - static final class Fields { - static final XContentBuilderString FOUND = new XContentBuilderString("found"); - static final XContentBuilderString _INDEX = new XContentBuilderString("_index"); - static final XContentBuilderString _TYPE = new XContentBuilderString("_type"); - static final XContentBuilderString _ID = new XContentBuilderString("_id"); - static final XContentBuilderString _VERSION = new XContentBuilderString("_version"); - } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java index c7fc29155cc..3714f83f20e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.*; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; +import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import java.io.IOException; @@ -99,33 +100,7 @@ public class RestIndexAction extends BaseRestHandler { if (consistencyLevel != null) { indexRequest.consistencyLevel(WriteConsistencyLevel.fromString(consistencyLevel)); } - client.index(indexRequest, new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(IndexResponse response, XContentBuilder builder) throws Exception { - builder.startObject(); - ActionWriteResponse.ShardInfo shardInfo = response.getShardInfo(); - builder.field(Fields._INDEX, response.getIndex()) - .field(Fields._TYPE, response.getType()) - .field(Fields._ID, response.getId()) - .field(Fields._VERSION, response.getVersion()); - shardInfo.toXContent(builder, request); - builder.field(Fields.CREATED, response.isCreated()); - builder.endObject(); - RestStatus status = shardInfo.status(); - if (response.isCreated()) { - status = CREATED; - } - return new BytesRestResponse(status, builder); - } - }); - } - - static final class Fields { - static final XContentBuilderString _INDEX = new XContentBuilderString("_index"); - static final XContentBuilderString _TYPE = new XContentBuilderString("_type"); - static final XContentBuilderString _ID = new XContentBuilderString("_id"); - static final XContentBuilderString _VERSION = new XContentBuilderString("_version"); - static final XContentBuilderString CREATED = new XContentBuilderString("created"); + client.index(indexRequest, new RestStatusToXContentListener<>(channel)); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestDeletePipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestDeletePipelineAction.java index f09c229a710..bf8645377f9 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestDeletePipelineAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestDeletePipelineAction.java @@ -20,18 +20,15 @@ package org.elasticsearch.plugin.ingest.rest; import org.elasticsearch.client.Client; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequest; -import org.elasticsearch.plugin.ingest.transport.get.GetPipelineAction; -import org.elasticsearch.plugin.ingest.transport.get.GetPipelineRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.support.RestToXContentListener; +import org.elasticsearch.rest.action.support.RestStatusToXContentListener; public class RestDeletePipelineAction extends BaseRestHandler { @@ -45,6 +42,6 @@ public class RestDeletePipelineAction extends BaseRestHandler { protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { DeletePipelineRequest request = new DeletePipelineRequest(); request.id(restRequest.param("id")); - client.execute(DeletePipelineAction.INSTANCE, request, new RestToXContentListener<>(channel)); + client.execute(DeletePipelineAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestPutPipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestPutPipelineAction.java index 5b5bd0a0d2e..2fc5508e15e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestPutPipelineAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestPutPipelineAction.java @@ -28,7 +28,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.support.RestToXContentListener; +import org.elasticsearch.rest.action.support.RestStatusToXContentListener; public class RestPutPipelineAction extends BaseRestHandler { @@ -45,6 +45,6 @@ public class RestPutPipelineAction extends BaseRestHandler { if (restRequest.hasContent()) { request.source(restRequest.content()); } - client.execute(PutPipelineAction.INSTANCE, request, new RestToXContentListener<>(channel)); + client.execute(PutPipelineAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineAction.java index b6405362ce5..c1fba7fc89f 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineAction.java @@ -20,9 +20,10 @@ package org.elasticsearch.plugin.ingest.transport.delete; import org.elasticsearch.action.Action; +import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.client.ElasticsearchClient; -public class DeletePipelineAction extends Action { +public class DeletePipelineAction extends Action { public static final DeletePipelineAction INSTANCE = new DeletePipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/delete"; @@ -37,7 +38,7 @@ public class DeletePipelineAction extends Action { +public class DeletePipelineRequestBuilder extends ActionRequestBuilder { public DeletePipelineRequestBuilder(ElasticsearchClient client, DeletePipelineAction action) { super(client, action, new DeletePipelineRequest()); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineResponse.java deleted file mode 100644 index a35752636b6..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineResponse.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugin.ingest.transport.delete; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.common.xcontent.XContentHelper; - -import java.io.IOException; -import java.util.Map; - -public class DeletePipelineResponse extends ActionResponse implements ToXContent { - - private String id; - private boolean found; - - DeletePipelineResponse() { - } - - public DeletePipelineResponse(String id, boolean found) { - this.id = id; - this.found = found; - } - - public String id() { - return id; - } - - public boolean found() { - return found; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - this.id = in.readString(); - this.found = in.readBoolean(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(id); - out.writeBoolean(found); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(Fields.ID, id); - builder.field(Fields.FOUND, found); - return builder; - } - - static final class Fields { - static final XContentBuilderString ID = new XContentBuilderString("_id"); - static final XContentBuilderString FOUND = new XContentBuilderString("_found"); - } -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java index dff4d4697b0..3b5e72c01d4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java @@ -20,9 +20,7 @@ package org.elasticsearch.plugin.ingest.transport.delete; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -32,7 +30,7 @@ import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -public class DeletePipelineTransportAction extends HandledTransportAction { +public class DeletePipelineTransportAction extends HandledTransportAction { private final PipelineStore pipelineStore; @@ -43,17 +41,7 @@ public class DeletePipelineTransportAction extends HandledTransportAction listener) { - pipelineStore.delete(request, new ActionListener() { - @Override - public void onResponse(DeleteResponse deleteResponse) { - listener.onResponse(new DeletePipelineResponse(deleteResponse.getId(), deleteResponse.isFound())); - } - - @Override - public void onFailure(Throwable e) { - listener.onFailure(e); - } - }); + protected void doExecute(DeletePipelineRequest request, ActionListener listener) { + pipelineStore.delete(request, listener); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineAction.java index a638d0c8010..1356503b673 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineAction.java @@ -20,9 +20,10 @@ package org.elasticsearch.plugin.ingest.transport.put; import org.elasticsearch.action.Action; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.ElasticsearchClient; -public class PutPipelineAction extends Action { +public class PutPipelineAction extends Action { public static final PutPipelineAction INSTANCE = new PutPipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/put"; @@ -37,7 +38,7 @@ public class PutPipelineAction extends Action { +public class PutPipelineRequestBuilder extends ActionRequestBuilder { public PutPipelineRequestBuilder(ElasticsearchClient client, PutPipelineAction action) { super(client, action, new PutPipelineRequest()); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineResponse.java deleted file mode 100644 index eb733bcbffc..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineResponse.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugin.ingest.transport.put; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; - -import java.io.IOException; - -public class PutPipelineResponse extends ActionResponse implements ToXContent { - - private String id; - private long version; - - public String id() { - return id; - } - - public PutPipelineResponse id(String id) { - this.id = id; - return this; - } - - public long version() { - return version; - } - - public PutPipelineResponse version(long version) { - this.version = version; - return this; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(id); - out.writeLong(version); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - id = in.readString(); - version = in.readLong(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(Fields.ID, id); - builder.field(Fields.VERSION, version); - return builder; - } - - static final class Fields { - static final XContentBuilderString ID = new XContentBuilderString("_id"); - static final XContentBuilderString VERSION = new XContentBuilderString("_version"); - } -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java index 72e6391167f..476dcf6e895 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java @@ -36,7 +36,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Map; -public class PutPipelineTransportAction extends HandledTransportAction { +public class PutPipelineTransportAction extends HandledTransportAction { private final PipelineStore pipelineStore; @@ -47,20 +47,7 @@ public class PutPipelineTransportAction extends HandledTransportAction listener) { - pipelineStore.put(request, new ActionListener() { - @Override - public void onResponse(IndexResponse indexResponse) { - PutPipelineResponse response = new PutPipelineResponse(); - response.id(indexResponse.getId()); - response.version(indexResponse.getVersion()); - listener.onResponse(response); - } - - @Override - public void onFailure(Throwable e) { - listener.onFailure(e); - } - }); + protected void doExecute(PutPipelineRequest request, ActionListener listener) { + pipelineStore.put(request, listener); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 01d7baa4ee3..2a53daef452 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; @@ -31,7 +32,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequestBuilder; -import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineResponse; import org.elasticsearch.plugin.ingest.transport.get.GetPipelineAction; import org.elasticsearch.plugin.ingest.transport.get.GetPipelineRequestBuilder; import org.elasticsearch.plugin.ingest.transport.get.GetPipelineResponse; @@ -199,16 +199,13 @@ public class IngestClientIT extends ESIntegTestCase { .endArray() .endObject().bytes()) .get(); - assertBusy(new Runnable() { - @Override - public void run() { - GetPipelineResponse response = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) - .setIds("_id") - .get(); - assertThat(response.isFound(), is(true)); - assertThat(response.pipelines().size(), equalTo(1)); - assertThat(response.pipelines().get(0).getId(), equalTo("_id")); - } + assertBusy(() -> { + GetPipelineResponse response = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) + .setIds("_id") + .get(); + assertThat(response.isFound(), is(true)); + assertThat(response.pipelines().size(), equalTo(1)); + assertThat(response.pipelines().get(0).getId(), equalTo("_id")); }); createIndex("test"); @@ -224,45 +221,36 @@ public class IngestClientIT extends ESIntegTestCase { .putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id") .get(); - assertBusy(new Runnable() { - @Override - public void run() { - Map doc = client().prepareGet("test", "type", "1") - .get().getSourceAsMap(); - assertThat(doc.get("val"), equalTo(123.42)); - assertThat(doc.get("status"), equalTo(400)); - assertThat(doc.get("msg"), equalTo("foo")); - } + assertBusy(() -> { + Map doc = client().prepareGet("test", "type", "1") + .get().getSourceAsMap(); + assertThat(doc.get("val"), equalTo(123.42)); + assertThat(doc.get("status"), equalTo(400)); + assertThat(doc.get("msg"), equalTo("foo")); }); client().prepareBulk().add( client().prepareIndex("test", "type", "2").setSource("field1", "123.42 400 ") ).putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id").get(); - assertBusy(new Runnable() { - @Override - public void run() { - Map doc = client().prepareGet("test", "type", "2").get().getSourceAsMap(); - assertThat(doc.get("val"), equalTo(123.42)); - assertThat(doc.get("status"), equalTo(400)); - assertThat(doc.get("msg"), equalTo("foo")); - } + assertBusy(() -> { + Map doc = client().prepareGet("test", "type", "2").get().getSourceAsMap(); + assertThat(doc.get("val"), equalTo(123.42)); + assertThat(doc.get("status"), equalTo(400)); + assertThat(doc.get("msg"), equalTo("foo")); }); - DeletePipelineResponse response = new DeletePipelineRequestBuilder(client(), DeletePipelineAction.INSTANCE) + DeleteResponse response = new DeletePipelineRequestBuilder(client(), DeletePipelineAction.INSTANCE) .setId("_id") .get(); - assertThat(response.found(), is(true)); - assertThat(response.id(), equalTo("_id")); + assertThat(response.isFound(), is(true)); + assertThat(response.getId(), equalTo("_id")); - assertBusy(new Runnable() { - @Override - public void run() { - GetPipelineResponse response = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) - .setIds("_id") - .get(); - assertThat(response.isFound(), is(false)); - assertThat(response.pipelines().size(), equalTo(0)); - } + assertBusy(() -> { + GetPipelineResponse response1 = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) + .setIds("_id") + .get(); + assertThat(response1.isFound(), is(false)); + assertThat(response1.pipelines().size(), equalTo(0)); }); } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml index cc2cee0c742..02cc2fde7f2 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml @@ -20,6 +20,9 @@ } ] } + - match: { _index: ".ingest" } + - match: { _type: "pipeline" } + - match: { _version: 1 } - match: { _id: "my_pipeline" } # Simulate a Thread.sleep(), because pipeline are updated in the background @@ -39,8 +42,11 @@ - do: ingest.delete_pipeline: id: "my_pipeline" + - match: { _index: ".ingest" } + - match: { _type: "pipeline" } + - match: { _version: 2 } - match: { _id: "my_pipeline" } - - match: { _found: true } + - match: { found: true } # Simulate a Thread.sleep(), because pipeline are updated in the background - do: From 283d9c15233aaee51227922571fdbe9d2a36478f Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 9 Dec 2015 18:24:26 +0100 Subject: [PATCH 110/347] [TEST] adjust processor docs. remove throws exception when the field is not there. --- docs/plugins/ingest.asciidoc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 0ae37433bae..4c0cc6a77c3 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -4,7 +4,7 @@ === Processors ==== Set processor -Sets one field and associates it with the specified value. If a field already exists, +Sets one field and associates it with the specified value. If the field already exists, its value will be replaced with the provided one. [source,js] @@ -17,7 +17,7 @@ its value will be replaced with the provided one. -------------------------------------------------- ==== Remove processor -Removes an existing field. If a field doesn't exist, nothing will happen. +Removes an existing field. If the field doesn't exist, an exception will be thrown [source,js] -------------------------------------------------- @@ -29,7 +29,7 @@ Removes an existing field. If a field doesn't exist, nothing will happen. -------------------------------------------------- ==== Rename processor -Renames an existing fields. If a field doesn't exist, an exception will be thrown. Also, the new field +Renames an existing field. If the field doesn't exist, an exception will be thrown. Also, the new field name must not exist. [source,js] @@ -48,7 +48,7 @@ If the field value is an array, all members will be converted. The supported types include: `integer`, `float`, `string`, and `boolean`. -`boolean` will set a field to true if its string value is equal to `true` (ignore case), to +`boolean` will set the field to true if its string value is equal to `true` (ignore case), to false if its string value is equal to `false` (ignore case) and it will throw exception otherwise. [source,js] From 5bc1e461137610bf698fb2a2a6d4c0cb100b6e1f Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 9 Dec 2015 18:28:07 +0100 Subject: [PATCH 111/347] setFieldValue for list to replace when an index is specified It used to do add instead, which is not consistent with the behaviour of set, which always replaces. --- .../main/java/org/elasticsearch/ingest/IngestDocument.java | 4 +--- .../java/org/elasticsearch/ingest/IngestDocumentTests.java | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index fbb68ab812b..4cc43ea4b8b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -20,8 +20,6 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.HppcMaps; -import org.elasticsearch.search.aggregations.support.format.ValueParser; import java.util.*; @@ -330,7 +328,7 @@ public final class IngestDocument { if (index < 0 || index >= list.size()) { throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path + "]"); } - list.add(index, value); + list.set(index, value); this.sourceModified = true; } else { throw new IllegalArgumentException("cannot set [" + leafKey + "] with parent object of type [" + context.getClass().getName() + "] as part of path [" + path + "]"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index 3f3bc832b2a..087b537cdb5 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.ingest; -import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -303,10 +302,9 @@ public class IngestDocumentTests extends ESTestCase { assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; - assertThat(list.size(), equalTo(3)); + assertThat(list.size(), equalTo(2)); assertThat(list.get(0), equalTo(Collections.singletonMap("field", "value"))); assertThat(list.get(1), equalTo("value")); - assertThat(list.get(2), nullValue()); } public void testSetFieldValueListAsPartOfPath() { From b0d7d604ffd2e0b0c2da481987d807e4bdeaa38d Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 7 Dec 2015 17:16:43 +0100 Subject: [PATCH 112/347] Add support for transient metadata to IngestDocument IngestDocument now holds an additional map of transient metadata. The only field that gets added automatically is `timestamp`, which contains the timestamp of ingestion in ISO8601 format. In the future it will be possible to eventually add or modify these fields, which will not get indexed, but they will be available via templates to all of the processors. Transient metadata will be visualized by the simulate api, although they will never get indexed. Moved WriteableIngestDocument to the simulate package as it's only used by simulate and it's now modelled for that specific usecase. Also taken the chance to remove one IngestDocument constructor used only for testing (accepting only a subset of es metadata fields). While doing that introduced some more randomizations to some existing processor tests. Closes #15036 --- .../elasticsearch/ingest/IngestDocument.java | 82 ++++++++---- .../processor/meta/MetaDataProcessor.java | 2 +- .../ingest/PipelineExecutionService.java | 14 +- .../SimulateDocumentSimpleResult.java | 7 +- .../simulate/SimulateProcessorResult.java | 8 +- .../WriteableIngestDocument.java | 71 +++++------ .../elasticsearch/ingest/IngestClientIT.java | 2 +- .../ingest/IngestDocumentTests.java | 102 ++++++++++----- .../ingest/RandomDocumentPicks.java | 18 ++- .../processor/date/DateProcessorTests.java | 21 +-- .../processor/geoip/GeoIpProcessorTests.java | 7 +- .../processor/grok/GrokProcessorTests.java | 8 +- .../meta/MetaDataProcessorTests.java | 6 +- .../ingest/PipelineExecutionServiceTests.java | 14 +- .../transport/IngestActionFilterTests.java | 3 +- .../WriteableIngestDocumentTests.java | 98 -------------- .../SimulateDocumentSimpleResultTests.java | 5 +- .../SimulateExecutionServiceTests.java | 5 +- .../SimulatePipelineRequestParsingTests.java | 12 +- .../SimulatePipelineResponseTests.java | 9 +- .../SimulateProcessorResultTests.java | 5 +- .../WriteableIngestDocumentTests.java | 120 ++++++++++++++++++ .../test/ingest/80_simulate.yaml | 18 ++- 23 files changed, 381 insertions(+), 256 deletions(-) rename plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/{ => simulate}/WriteableIngestDocument.java (50%) delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocumentTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocumentTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 4cc43ea4b8b..1077b16453b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -21,6 +21,8 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.Strings; +import java.text.DateFormat; +import java.text.SimpleDateFormat; import java.util.*; /** @@ -28,38 +30,54 @@ import java.util.*; */ public final class IngestDocument { - private final Map metaData; + static final String TIMESTAMP = "timestamp"; + + private final Map esMetadata; private final Map source; + private final Map ingestMetadata; private boolean sourceModified = false; - public IngestDocument(String index, String type, String id, Map source) { - this(index, type, id, null, null, null, null, source); - } - public IngestDocument(String index, String type, String id, String routing, String parent, String timestamp, String ttl, Map source) { - this.metaData = new HashMap<>(); - this.metaData.put(MetaData.INDEX.getFieldName(), index); - this.metaData.put(MetaData.TYPE.getFieldName(), type); - this.metaData.put(MetaData.ID.getFieldName(), id); + this.esMetadata = new HashMap<>(); + this.esMetadata.put(MetaData.INDEX.getFieldName(), index); + this.esMetadata.put(MetaData.TYPE.getFieldName(), type); + this.esMetadata.put(MetaData.ID.getFieldName(), id); if (routing != null) { - this.metaData.put(MetaData.ROUTING.getFieldName(), routing); + this.esMetadata.put(MetaData.ROUTING.getFieldName(), routing); } if (parent != null) { - this.metaData.put(MetaData.PARENT.getFieldName(), parent); + this.esMetadata.put(MetaData.PARENT.getFieldName(), parent); } if (timestamp != null) { - this.metaData.put(MetaData.TIMESTAMP.getFieldName(), timestamp); + this.esMetadata.put(MetaData.TIMESTAMP.getFieldName(), timestamp); } if (ttl != null) { - this.metaData.put(MetaData.TTL.getFieldName(), ttl); + this.esMetadata.put(MetaData.TTL.getFieldName(), ttl); } this.source = source; + this.ingestMetadata = new HashMap<>(); + DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", Locale.ROOT); + df.setTimeZone(TimeZone.getTimeZone("UTC")); + this.ingestMetadata.put(TIMESTAMP, df.format(new Date())); } + /** + * Copy constructor that creates a new {@link IngestDocument} which has exactly the same properties of the one provided as argument + */ public IngestDocument(IngestDocument other) { - this.metaData = new HashMap<>(other.metaData); - this.source = new HashMap<>(other.source); + this(other.esMetadata, other.source, other.ingestMetadata); + } + + /** + * Constructor needed for testing that allows to create a new {@link IngestDocument} given the provided elasticsearch metadata, + * source and ingest metadata. This is needed because the ingest metadata will be initialized with the current timestamp at + * init time, which makes comparisons impossible in tests. + */ + public IngestDocument(Map esMetadata, Map source, Map ingestMetadata) { + this.esMetadata = new HashMap<>(esMetadata); + this.source = new HashMap<>(source); + this.ingestMetadata = new HashMap<>(ingestMetadata); } /** @@ -335,12 +353,28 @@ public final class IngestDocument { } } - public String getMetadata(MetaData metaData) { - return this.metaData.get(metaData.getFieldName()); + public String getEsMetadata(MetaData esMetadata) { + return this.esMetadata.get(esMetadata.getFieldName()); } - public void setMetaData(MetaData metaData, String value) { - this.metaData.put(metaData.getFieldName(), value); + public Map getEsMetadata() { + return Collections.unmodifiableMap(esMetadata); + } + + public void setEsMetadata(MetaData metaData, String value) { + this.esMetadata.put(metaData.getFieldName(), value); + } + + public String getIngestMetadata(String ingestMetadata) { + return this.ingestMetadata.get(ingestMetadata); + } + + public Map getIngestMetadata() { + return Collections.unmodifiableMap(this.ingestMetadata); + } + + public void setIngestMetadata(String metadata, String value) { + this.ingestMetadata.put(metadata, value); } /** @@ -391,19 +425,21 @@ public final class IngestDocument { IngestDocument other = (IngestDocument) obj; return Objects.equals(source, other.source) && - Objects.equals(metaData, other.metaData); + Objects.equals(esMetadata, other.esMetadata) && + Objects.equals(ingestMetadata, other.ingestMetadata); } @Override public int hashCode() { - return Objects.hash(metaData, source); + return Objects.hash(esMetadata, source); } @Override public String toString() { return "IngestDocument{" + - "metaData=" + metaData + + "esMetadata=" + esMetadata + ", source=" + source + + ", ingestMetadata=" + ingestMetadata + '}'; } @@ -447,7 +483,5 @@ public final class IngestDocument { throw new IllegalArgumentException("no valid metadata field name [" + value + "]"); } } - } - } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java index 1abdd995662..7cfab487678 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java @@ -30,7 +30,7 @@ public final class MetaDataProcessor implements Processor { for (Map.Entry entry : templates.entrySet()) { StringWriter writer = new StringWriter(); entry.getValue().execute(writer, model); - ingestDocument.setMetaData(entry.getKey(), writer.toString()); + ingestDocument.setEsMetadata(entry.getKey(), writer.toString()); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index 4a959209152..b94dbd391cd 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -65,13 +65,13 @@ public class PipelineExecutionService { if (ingestDocument.isSourceModified()) { indexRequest.source(ingestDocument.getSource()); } - indexRequest.index(ingestDocument.getMetadata(IngestDocument.MetaData.INDEX)); - indexRequest.type(ingestDocument.getMetadata(IngestDocument.MetaData.TYPE)); - indexRequest.id(ingestDocument.getMetadata(IngestDocument.MetaData.ID)); - indexRequest.routing(ingestDocument.getMetadata(IngestDocument.MetaData.ROUTING)); - indexRequest.parent(ingestDocument.getMetadata(IngestDocument.MetaData.PARENT)); - indexRequest.timestamp(ingestDocument.getMetadata(IngestDocument.MetaData.TIMESTAMP)); - indexRequest.ttl(ingestDocument.getMetadata(IngestDocument.MetaData.TTL)); + indexRequest.index(ingestDocument.getEsMetadata(IngestDocument.MetaData.INDEX)); + indexRequest.type(ingestDocument.getEsMetadata(IngestDocument.MetaData.TYPE)); + indexRequest.id(ingestDocument.getEsMetadata(IngestDocument.MetaData.ID)); + indexRequest.routing(ingestDocument.getEsMetadata(IngestDocument.MetaData.ROUTING)); + indexRequest.parent(ingestDocument.getEsMetadata(IngestDocument.MetaData.PARENT)); + indexRequest.timestamp(ingestDocument.getEsMetadata(IngestDocument.MetaData.TIMESTAMP)); + indexRequest.ttl(ingestDocument.getEsMetadata(IngestDocument.MetaData.TTL)); listener.onResponse(ingestDocument); } catch (Throwable e) { listener.onFailure(e); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java index 87f2b42ccaf..589c2ec8ac4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java @@ -23,17 +23,20 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.plugin.ingest.transport.WriteableIngestDocument; import java.io.IOException; public class SimulateDocumentSimpleResult implements SimulateDocumentResult { - private static final SimulateDocumentSimpleResult PROTOTYPE = new SimulateDocumentSimpleResult((IngestDocument)null); + private static final SimulateDocumentSimpleResult PROTOTYPE = new SimulateDocumentSimpleResult(); private WriteableIngestDocument ingestDocument; private Exception failure; + private SimulateDocumentSimpleResult() { + this.ingestDocument = null; + } + public SimulateDocumentSimpleResult(IngestDocument ingestDocument) { this.ingestDocument = new WriteableIngestDocument(ingestDocument); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java index a31aee147af..74c969d1ece 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java @@ -26,18 +26,22 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.plugin.ingest.transport.WriteableIngestDocument; import java.io.IOException; public class SimulateProcessorResult implements Writeable, ToXContent { - private static final SimulateProcessorResult PROTOTYPE = new SimulateProcessorResult(null, (IngestDocument)null); + private static final SimulateProcessorResult PROTOTYPE = new SimulateProcessorResult(); private String processorId; private WriteableIngestDocument ingestDocument; private Exception failure; + private SimulateProcessorResult() { + this.processorId = null; + this.ingestDocument = null; + } + public SimulateProcessorResult(String processorId, IngestDocument ingestDocument) { this.processorId = processorId; this.ingestDocument = new WriteableIngestDocument(ingestDocument); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java similarity index 50% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocument.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java index 3a75218857f..4f0b2eeee60 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport; +package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -31,63 +31,59 @@ import java.io.IOException; import java.util.Map; import java.util.Objects; -import static org.elasticsearch.ingest.IngestDocument.MetaData.*; +final class WriteableIngestDocument implements Writeable, ToXContent { -public class WriteableIngestDocument implements Writeable, ToXContent { - - private static final WriteableIngestDocument PROTOTYPE = new WriteableIngestDocument(null); + private static final WriteableIngestDocument PROTOTYPE = new WriteableIngestDocument(); private final IngestDocument ingestDocument; - public WriteableIngestDocument(IngestDocument ingestDocument) { + private WriteableIngestDocument() { + this.ingestDocument = null; + } + + WriteableIngestDocument(IngestDocument ingestDocument) { + assert ingestDocument != null; this.ingestDocument = ingestDocument; } - public IngestDocument getIngestDocument() { + IngestDocument getIngestDocument() { return ingestDocument; } - public static WriteableIngestDocument readWriteableIngestDocumentFrom(StreamInput in) throws IOException { + static WriteableIngestDocument readWriteableIngestDocumentFrom(StreamInput in) throws IOException { return PROTOTYPE.readFrom(in); } @Override public WriteableIngestDocument readFrom(StreamInput in) throws IOException { - String index = in.readString(); - String type = in.readString(); - String id = in.readString(); - String routing = in.readOptionalString(); - String parent = in.readOptionalString(); - String timestamp = in.readOptionalString(); - String ttl = in.readOptionalString(); - Map doc = in.readMap(); - return new WriteableIngestDocument(new IngestDocument(index, type, id, routing, parent, timestamp, ttl, doc)); + @SuppressWarnings("unchecked") + Map esMetadata = (Map) in.readGenericValue(); + Map source = in.readMap(); + @SuppressWarnings("unchecked") + Map ingestMetadata = (Map) in.readGenericValue(); + return new WriteableIngestDocument(new IngestDocument(esMetadata, source, ingestMetadata)); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(ingestDocument.getMetadata(INDEX)); - out.writeString(ingestDocument.getMetadata(TYPE)); - out.writeString(ingestDocument.getMetadata(ID)); - out.writeOptionalString(ingestDocument.getMetadata(ROUTING)); - out.writeOptionalString(ingestDocument.getMetadata(PARENT)); - out.writeOptionalString(ingestDocument.getMetadata(TIMESTAMP)); - out.writeOptionalString(ingestDocument.getMetadata(TTL)); + out.writeGenericValue(ingestDocument.getEsMetadata()); out.writeMap(ingestDocument.getSource()); + out.writeGenericValue(ingestDocument.getIngestMetadata()); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields.DOCUMENT); builder.field(Fields.MODIFIED, ingestDocument.isSourceModified()); - builder.field(Fields.INDEX, ingestDocument.getMetadata(INDEX)); - builder.field(Fields.TYPE, ingestDocument.getMetadata(TYPE)); - builder.field(Fields.ID, ingestDocument.getMetadata(ID)); - builder.field(Fields.ROUTING, ingestDocument.getMetadata(ROUTING)); - builder.field(Fields.PARENT, ingestDocument.getMetadata(PARENT)); - builder.field(Fields.TIMESTAMP, ingestDocument.getMetadata(TIMESTAMP)); - builder.field(Fields.TTL, ingestDocument.getMetadata(TTL)); + for (Map.Entry esMetadata : ingestDocument.getEsMetadata().entrySet()) { + builder.field(esMetadata.getKey(), esMetadata.getValue()); + } builder.field(Fields.SOURCE, ingestDocument.getSource()); + builder.startObject(Fields.INGEST); + for (Map.Entry ingestMetadata : ingestDocument.getIngestMetadata().entrySet()) { + builder.field(ingestMetadata.getKey(), ingestMetadata.getValue()); + } + builder.endObject(); builder.endObject(); return builder; } @@ -109,16 +105,15 @@ public class WriteableIngestDocument implements Writeable esMetadata = new HashMap<>(); + int numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); + for (int i = 0; i < numFields; i++) { + esMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + } + Map ingestMetadata = new HashMap<>(); + numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); + } + Map document = RandomDocumentPicks.randomDocument(random()); + IngestDocument ingestDocument = new IngestDocument(esMetadata, document, ingestMetadata); boolean changed = false; - String otherIndex; + Map otherEsMetadata; if (randomBoolean()) { - otherIndex = randomAsciiOfLengthBetween(1, 10); + otherEsMetadata = new HashMap<>(); + numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); + for (int i = 0; i < numFields; i++) { + otherEsMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + } changed = true; } else { - otherIndex = index; - } - String otherType; - if (randomBoolean()) { - otherType = randomAsciiOfLengthBetween(1, 10); - changed = true; - } else { - otherType = type; - } - String otherId; - if (randomBoolean()) { - otherId = randomAsciiOfLengthBetween(1, 10); - changed = true; - } else { - otherId = id; - } - Map document; - if (randomBoolean()) { - document = Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); - changed = true; - } else { - document = Collections.singletonMap(fieldName, fieldValue); + otherEsMetadata = Collections.unmodifiableMap(esMetadata); } - IngestDocument otherIngestDocument = new IngestDocument(otherIndex, otherType, otherId, document); + Map otherIngestMetadata; + if (randomBoolean()) { + otherIngestMetadata = new HashMap<>(); + numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + otherIngestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); + } + changed = true; + } else { + otherIngestMetadata = Collections.unmodifiableMap(ingestMetadata); + } + + Map otherDocument; + if (randomBoolean()) { + otherDocument = RandomDocumentPicks.randomDocument(random()); + changed = true; + } else { + otherDocument = Collections.unmodifiableMap(document); + } + + IngestDocument otherIngestDocument = new IngestDocument(otherEsMetadata, otherDocument, otherIngestMetadata); if (changed) { assertThat(ingestDocument, not(equalTo(otherIngestDocument))); assertThat(otherIngestDocument, not(equalTo(ingestDocument))); @@ -533,7 +544,7 @@ public class IngestDocumentTests extends ESTestCase { assertThat(ingestDocument, equalTo(otherIngestDocument)); assertThat(otherIngestDocument, equalTo(ingestDocument)); assertThat(ingestDocument.hashCode(), equalTo(otherIngestDocument.hashCode())); - IngestDocument thirdIngestDocument = new IngestDocument(index, type, id, Collections.singletonMap(fieldName, fieldValue)); + IngestDocument thirdIngestDocument = new IngestDocument(Collections.unmodifiableMap(esMetadata), Collections.unmodifiableMap(document), Collections.unmodifiableMap(ingestMetadata)); assertThat(thirdIngestDocument, equalTo(ingestDocument)); assertThat(ingestDocument, equalTo(thirdIngestDocument)); assertThat(ingestDocument.hashCode(), equalTo(thirdIngestDocument.hashCode())); @@ -554,7 +565,7 @@ public class IngestDocumentTests extends ESTestCase { Map myPreciousMap = new HashMap<>(); myPreciousMap.put("field2", "value2"); - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", new HashMap<>()); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, null, new HashMap<>()); ingestDocument.setFieldValue("field1", myPreciousMap); ingestDocument.removeField("field1.field2"); @@ -566,11 +577,32 @@ public class IngestDocumentTests extends ESTestCase { List myPreciousList = new ArrayList<>(); myPreciousList.add("value"); - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", new HashMap<>()); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, null, new HashMap<>()); ingestDocument.setFieldValue("field1", myPreciousList); ingestDocument.removeField("field1.0"); assertThat(myPreciousList.size(), equalTo(1)); assertThat(myPreciousList.get(0), equalTo("value")); } + + public void testIngestCustomMetadata() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String metadata = randomAsciiOfLengthBetween(1, 10); + String value = randomAsciiOfLengthBetween(1, 10); + ingestDocument.setIngestMetadata(metadata, value); + assertThat(ingestDocument.getIngestMetadata(metadata), equalTo(value)); + } + + public void testIngestMetadataTimestamp() throws Exception { + long before = System.currentTimeMillis(); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + long after = System.currentTimeMillis(); + String timestampString = ingestDocument.getIngestMetadata("timestamp"); + assertThat(timestampString, notNullValue()); + assertThat(timestampString, endsWith("+0000")); + DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", Locale.ROOT); + Date timestamp = df.parse(timestampString); + assertThat(timestamp.getTime(), greaterThanOrEqualTo(before)); + assertThat(timestamp.getTime(), lessThanOrEqualTo(after)); + } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java index e83a66a38d6..c54a8bfa126 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java @@ -132,7 +132,23 @@ public final class RandomDocumentPicks { String index = randomString(random); String type = randomString(random); String id = randomString(random); - return new IngestDocument(index, type, id, document); + String routing = null; + if (random.nextBoolean()) { + routing = randomString(random); + } + String parent = null; + if (random.nextBoolean()) { + parent = randomString(random); + } + String timestamp = null; + if (random.nextBoolean()) { + timestamp = randomString(random); + } + String ttl = null; + if (random.nextBoolean()) { + ttl = randomString(random); + } + return new IngestDocument(index, type, id, routing, parent, timestamp, ttl, document); } public static Map randomDocument(Random random) { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java index 182ab1dbaaf..86e4017d57c 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest.processor.date; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -36,7 +37,7 @@ public class DateProcessorTests extends ESTestCase { "date_as_string", Collections.singletonList("yyyy dd MM hh:mm:ss"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 06 11:05:15"); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); dateProcessor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T11:05:15.000+02:00")); } @@ -51,25 +52,25 @@ public class DateProcessorTests extends ESTestCase { Map document = new HashMap<>(); document.put("date_as_string", "2010 12 06"); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); dateProcessor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "12/06/2010"); - ingestDocument = new IngestDocument("index", "type", "id", document); + ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); dateProcessor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "12-06-2010"); - ingestDocument = new IngestDocument("index", "type", "id", document); + ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); dateProcessor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); document = new HashMap<>(); document.put("date_as_string", "2010"); - ingestDocument = new IngestDocument("index", "type", "id", document); + ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); try { dateProcessor.execute(ingestDocument); fail("processor should have failed due to not supported date format"); @@ -83,7 +84,7 @@ public class DateProcessorTests extends ESTestCase { "date_as_string", Collections.singletonList("yyyy dd MMM"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 giugno"); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); dateProcessor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00")); } @@ -93,7 +94,7 @@ public class DateProcessorTests extends ESTestCase { "date_as_string", Collections.singletonList("dd/MM"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "12/06"); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); dateProcessor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00")); } @@ -104,7 +105,7 @@ public class DateProcessorTests extends ESTestCase { Map document = new HashMap<>(); String dateAsString = (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024"; document.put("date_as_string", dateAsString); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); dateProcessor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2012-12-22T03:00:46.767+02:00")); } @@ -114,7 +115,7 @@ public class DateProcessorTests extends ESTestCase { "date_as_string", Collections.singletonList(DateFormat.UnixMs.toString()), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "1000500"); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); dateProcessor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); } @@ -124,7 +125,7 @@ public class DateProcessorTests extends ESTestCase { "date_as_string", Collections.singletonList(DateFormat.Unix.toString()), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "1000.5"); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); dateProcessor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z")); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java index 8064240c317..e77678b2fa5 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.ingest.processor.geoip; import com.maxmind.geoip2.DatabaseReader; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; import java.io.InputStream; @@ -39,7 +40,7 @@ public class GeoIpProcessorTests extends ESTestCase { Map document = new HashMap<>(); document.put("source_field", "82.170.213.79"); - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); assertThat(ingestDocument.getSource().size(), equalTo(2)); @@ -65,7 +66,7 @@ public class GeoIpProcessorTests extends ESTestCase { Map document = new HashMap<>(); document.put("source_field", "82.170.213.79"); - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); assertThat(ingestDocument.getSource().size(), equalTo(2)); @@ -85,7 +86,7 @@ public class GeoIpProcessorTests extends ESTestCase { Map document = new HashMap<>(); document.put("source_field", "202.45.11.11"); - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); @SuppressWarnings("unchecked") Map geoData = (Map) ingestDocument.getSource().get("target_field"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java index 5afd15490dc..cb20c5887cd 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java @@ -32,7 +32,7 @@ public class GrokProcessorTests extends ESTestCase { public void testMatch() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); - IngestDocument doc = new IngestDocument("index", "type", "id", new HashMap<>()); + IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, "1"); Grok grok = new Grok(Collections.singletonMap("ONE", "1"), "%{ONE:one}"); GrokProcessor processor = new GrokProcessor(grok, fieldName); @@ -42,7 +42,7 @@ public class GrokProcessorTests extends ESTestCase { public void testNoMatch() { String fieldName = RandomDocumentPicks.randomFieldName(random()); - IngestDocument doc = new IngestDocument("index", "type", "id", new HashMap<>()); + IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, "23"); Grok grok = new Grok(Collections.singletonMap("ONE", "1"), "%{ONE:one}"); GrokProcessor processor = new GrokProcessor(grok, fieldName); @@ -56,7 +56,7 @@ public class GrokProcessorTests extends ESTestCase { public void testNotStringField() { String fieldName = RandomDocumentPicks.randomFieldName(random()); - IngestDocument doc = new IngestDocument("index", "type", "id", new HashMap<>()); + IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, 1); Grok grok = new Grok(Collections.singletonMap("ONE", "1"), "%{ONE:one}"); GrokProcessor processor = new GrokProcessor(grok, fieldName); @@ -70,7 +70,7 @@ public class GrokProcessorTests extends ESTestCase { public void testMissingField() { String fieldName = "foo.bar"; - IngestDocument doc = new IngestDocument("index", "type", "id", new HashMap<>()); + IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); Grok grok = new Grok(Collections.singletonMap("ONE", "1"), "%{ONE:one}"); GrokProcessor processor = new GrokProcessor(grok, fieldName); try { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java index 13d56017d60..99340c5857e 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java @@ -4,6 +4,7 @@ import com.github.mustachejava.DefaultMustacheFactory; import com.github.mustachejava.Mustache; import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -22,12 +23,11 @@ public class MetaDataProcessorTests extends ESTestCase { } MetaDataProcessor processor = new MetaDataProcessor(templates); - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", Collections.singletonMap("field", "value")); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", "value")); processor.execute(ingestDocument); for (MetaData metaData : MetaData.values()) { - assertThat(ingestDocument.getMetadata(metaData), Matchers.equalTo("some value")); + assertThat(ingestDocument.getEsMetadata(metaData), Matchers.equalTo("some value")); } } - } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index 2aa7d2207f8..7723ef5402f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -82,9 +82,9 @@ public class PipelineExecutionServiceTests extends ESTestCase { IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; for (IngestDocument.MetaData metaData : IngestDocument.MetaData.values()) { if (metaData == IngestDocument.MetaData.TTL) { - ingestDocument.setMetaData(IngestDocument.MetaData.TTL, "5w"); + ingestDocument.setEsMetadata(IngestDocument.MetaData.TTL, "5w"); } else { - ingestDocument.setMetaData(metaData, "update" + metaData.getFieldName()); + ingestDocument.setEsMetadata(metaData, "update" + metaData.getFieldName()); } } @@ -176,12 +176,18 @@ public class PipelineExecutionServiceTests extends ESTestCase { private final IngestDocument ingestDocument; public IngestDocumentMatcher(String index, String type, String id, Map source) { - this.ingestDocument = new IngestDocument(index, type, id, source); + this.ingestDocument = new IngestDocument(index, type, id, null, null, null, null, source); } @Override public boolean matches(Object o) { - return Objects.equals(ingestDocument, o); + if (o.getClass() == IngestDocument.class) { + IngestDocument otherIngestDocument = (IngestDocument) o; + //ingest metadata will not be the same (timestamp differs every time) + return Objects.equals(ingestDocument.getSource(), otherIngestDocument.getSource()) + && Objects.equals(ingestDocument.getEsMetadata(), otherIngestDocument.getEsMetadata()); + } + return false; } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index 534f76ecc72..fe34295173a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -123,7 +123,8 @@ public class IngestActionFilterTests extends ESTestCase { Answer answer = invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; - listener.onResponse(new IngestDocument(indexRequest.index(), indexRequest.type(), indexRequest.id(), indexRequest.sourceAsMap())); + listener.onResponse(new IngestDocument(indexRequest.index(), indexRequest.type(), indexRequest.id(), indexRequest.routing(), indexRequest.parent(), + indexRequest.timestamp(), indexRequest.ttl() == null ? null : indexRequest.ttl().toString(), indexRequest.sourceAsMap())); return null; }; doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocumentTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocumentTests.java deleted file mode 100644 index 168afa40ccf..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/WriteableIngestDocumentTests.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugin.ingest.transport; - -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; -import java.util.Collections; -import java.util.Map; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; - -public class WriteableIngestDocumentTests extends ESTestCase { - - public void testEqualsAndHashcode() throws Exception { - String index = randomAsciiOfLengthBetween(1, 10); - String type = randomAsciiOfLengthBetween(1, 10); - String id = randomAsciiOfLengthBetween(1, 10); - String fieldName = randomAsciiOfLengthBetween(1, 10); - String fieldValue = randomAsciiOfLengthBetween(1, 10); - WriteableIngestDocument writeableIngestDocument = new WriteableIngestDocument(new IngestDocument(index, type, id, Collections.singletonMap(fieldName, fieldValue))); - - boolean changed = false; - String otherIndex; - if (randomBoolean()) { - otherIndex = randomAsciiOfLengthBetween(1, 10); - changed = true; - } else { - otherIndex = index; - } - String otherType; - if (randomBoolean()) { - otherType = randomAsciiOfLengthBetween(1, 10); - changed = true; - } else { - otherType = type; - } - String otherId; - if (randomBoolean()) { - otherId = randomAsciiOfLengthBetween(1, 10); - changed = true; - } else { - otherId = id; - } - Map document; - if (randomBoolean()) { - document = Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); - changed = true; - } else { - document = Collections.singletonMap(fieldName, fieldValue); - } - - WriteableIngestDocument otherWriteableIngestDocument = new WriteableIngestDocument(new IngestDocument(otherIndex, otherType, otherId, document)); - if (changed) { - assertThat(writeableIngestDocument, not(equalTo(otherWriteableIngestDocument))); - assertThat(otherWriteableIngestDocument, not(equalTo(writeableIngestDocument))); - } else { - assertThat(writeableIngestDocument, equalTo(otherWriteableIngestDocument)); - assertThat(otherWriteableIngestDocument, equalTo(writeableIngestDocument)); - WriteableIngestDocument thirdWriteableIngestDocument = new WriteableIngestDocument(new IngestDocument(index, type, id, Collections.singletonMap(fieldName, fieldValue))); - assertThat(thirdWriteableIngestDocument, equalTo(writeableIngestDocument)); - assertThat(writeableIngestDocument, equalTo(thirdWriteableIngestDocument)); - } - } - - public void testSerialization() throws IOException { - IngestDocument ingestDocument = new IngestDocument(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), - Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); - WriteableIngestDocument writeableIngestDocument = new WriteableIngestDocument(ingestDocument); - - BytesStreamOutput out = new BytesStreamOutput(); - writeableIngestDocument.writeTo(out); - StreamInput streamInput = StreamInput.wrap(out.bytes()); - WriteableIngestDocument otherWriteableIngestDocument = WriteableIngestDocument.readWriteableIngestDocumentFrom(streamInput); - assertThat(otherWriteableIngestDocument, equalTo(writeableIngestDocument)); - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java index 7954b4a51a9..38c1e88bdb3 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java @@ -22,10 +22,10 @@ package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.util.Collections; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; @@ -38,8 +38,7 @@ public class SimulateDocumentSimpleResultTests extends ESTestCase { if (isFailure) { simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(new IllegalArgumentException("test")); } else { - IngestDocument ingestDocument = new IngestDocument(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), - Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(ingestDocument); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java index 42cedf520ad..43ae965fb27 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -29,7 +30,6 @@ import org.junit.After; import org.junit.Before; import java.util.Arrays; -import java.util.Collections; import static org.hamcrest.Matchers.*; import static org.mockito.Mockito.*; @@ -53,7 +53,8 @@ public class SimulateExecutionServiceTests extends ESTestCase { processor = mock(Processor.class); when(processor.getType()).thenReturn("mock"); pipeline = new Pipeline("_id", "_description", Arrays.asList(processor, processor)); - ingestDocument = new IngestDocument("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); + //ingestDocument = new IngestDocument("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); + ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); } @After diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java index 1bedcc456af..e10391f7667 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java @@ -84,9 +84,9 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { for (IngestDocument ingestDocument : actualRequest.getDocuments()) { Map expectedDocument = expectedDocsIterator.next(); assertThat(ingestDocument.getSource(), equalTo(expectedDocument.get(Fields.SOURCE))); - assertThat(ingestDocument.getMetadata(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName()))); - assertThat(ingestDocument.getMetadata(TYPE), equalTo(expectedDocument.get(TYPE.getFieldName()))); - assertThat(ingestDocument.getMetadata(ID), equalTo(expectedDocument.get(ID.getFieldName()))); + assertThat(ingestDocument.getEsMetadata(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName()))); + assertThat(ingestDocument.getEsMetadata(TYPE), equalTo(expectedDocument.get(TYPE.getFieldName()))); + assertThat(ingestDocument.getEsMetadata(ID), equalTo(expectedDocument.get(ID.getFieldName()))); } assertThat(actualRequest.getPipeline().getId(), equalTo(SimulatePipelineRequest.SIMULATED_PIPELINE_ID)); @@ -137,9 +137,9 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { for (IngestDocument ingestDocument : actualRequest.getDocuments()) { Map expectedDocument = expectedDocsIterator.next(); assertThat(ingestDocument.getSource(), equalTo(expectedDocument.get(Fields.SOURCE))); - assertThat(ingestDocument.getMetadata(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName()))); - assertThat(ingestDocument.getMetadata(TYPE), equalTo(expectedDocument.get(TYPE.getFieldName()))); - assertThat(ingestDocument.getMetadata(ID), equalTo(expectedDocument.get(ID.getFieldName()))); + assertThat(ingestDocument.getEsMetadata(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName()))); + assertThat(ingestDocument.getEsMetadata(TYPE), equalTo(expectedDocument.get(TYPE.getFieldName()))); + assertThat(ingestDocument.getEsMetadata(ID), equalTo(expectedDocument.get(ID.getFieldName()))); } assertThat(actualRequest.getPipeline().getId(), equalTo(SimulatePipelineRequest.SIMULATED_PIPELINE_ID)); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java index 1e4ca765970..1b3b35e2154 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java @@ -22,17 +22,15 @@ package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.Iterator; import java.util.List; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.CoreMatchers.*; public class SimulatePipelineResponseTests extends ESTestCase { @@ -42,8 +40,7 @@ public class SimulatePipelineResponseTests extends ESTestCase { List results = new ArrayList<>(numResults); for (int i = 0; i < numResults; i++) { boolean isFailure = randomBoolean(); - IngestDocument ingestDocument = new IngestDocument(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), - Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); if (isVerbose) { int numProcessors = randomIntBetween(1, 10); List processorResults = new ArrayList<>(numProcessors); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java index 7f7925f451e..a2af6056fa2 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java @@ -22,10 +22,10 @@ package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.util.Collections; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -39,8 +39,7 @@ public class SimulateProcessorResultTests extends ESTestCase { if (isFailure) { simulateProcessorResult = new SimulateProcessorResult(processorId, new IllegalArgumentException("test")); } else { - IngestDocument ingestDocument = new IngestDocument(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10), - Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); simulateProcessorResult = new SimulateProcessorResult(processorId, ingestDocument); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocumentTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocumentTests.java new file mode 100644 index 00000000000..da0db6c7b18 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocumentTests.java @@ -0,0 +1,120 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.simulate; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +public class WriteableIngestDocumentTests extends ESTestCase { + + public void testEqualsAndHashcode() throws Exception { + Map esMetadata = new HashMap<>(); + int numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); + for (int i = 0; i < numFields; i++) { + esMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + } + Map ingestMetadata = new HashMap<>(); + numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); + } + Map document = RandomDocumentPicks.randomDocument(random()); + WriteableIngestDocument ingestDocument = new WriteableIngestDocument(new IngestDocument(esMetadata, document, ingestMetadata)); + + boolean changed = false; + Map otherEsMetadata; + if (randomBoolean()) { + otherEsMetadata = new HashMap<>(); + numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); + for (int i = 0; i < numFields; i++) { + otherEsMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + } + changed = true; + } else { + otherEsMetadata = Collections.unmodifiableMap(esMetadata); + } + + Map otherIngestMetadata; + if (randomBoolean()) { + otherIngestMetadata = new HashMap<>(); + numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + otherIngestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); + } + changed = true; + } else { + otherIngestMetadata = Collections.unmodifiableMap(ingestMetadata); + } + + Map otherDocument; + if (randomBoolean()) { + otherDocument = RandomDocumentPicks.randomDocument(random()); + changed = true; + } else { + otherDocument = Collections.unmodifiableMap(document); + } + + WriteableIngestDocument otherIngestDocument = new WriteableIngestDocument(new IngestDocument(otherEsMetadata, otherDocument, otherIngestMetadata)); + if (changed) { + assertThat(ingestDocument, not(equalTo(otherIngestDocument))); + assertThat(otherIngestDocument, not(equalTo(ingestDocument))); + } else { + assertThat(ingestDocument, equalTo(otherIngestDocument)); + assertThat(otherIngestDocument, equalTo(ingestDocument)); + assertThat(ingestDocument.hashCode(), equalTo(otherIngestDocument.hashCode())); + WriteableIngestDocument thirdIngestDocument = new WriteableIngestDocument(new IngestDocument(Collections.unmodifiableMap(esMetadata), Collections.unmodifiableMap(document), Collections.unmodifiableMap(ingestMetadata))); + assertThat(thirdIngestDocument, equalTo(ingestDocument)); + assertThat(ingestDocument, equalTo(thirdIngestDocument)); + assertThat(ingestDocument.hashCode(), equalTo(thirdIngestDocument.hashCode())); + } + } + + public void testSerialization() throws IOException { + Map esMetadata = new HashMap<>(); + int numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); + for (int i = 0; i < numFields; i++) { + esMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + } + Map ingestMetadata = new HashMap<>(); + numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); + } + Map document = RandomDocumentPicks.randomDocument(random()); + WriteableIngestDocument writeableIngestDocument = new WriteableIngestDocument(new IngestDocument(esMetadata, document, ingestMetadata)); + + BytesStreamOutput out = new BytesStreamOutput(); + writeableIngestDocument.writeTo(out); + StreamInput streamInput = StreamInput.wrap(out.bytes()); + WriteableIngestDocument otherWriteableIngestDocument = WriteableIngestDocument.readWriteableIngestDocumentFrom(streamInput); + assertThat(otherWriteableIngestDocument, equalTo(writeableIngestDocument)); + } +} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml index 06873116db2..e0db65cf003 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml @@ -49,7 +49,8 @@ - is_true: docs.0.doc.modified - match: { docs.0.doc._source.foo: "bar" } - match: { docs.0.doc._source.field2: "_value" } - + - length: { docs.0.doc._ingest: 1 } + - is_true: docs.0.doc._ingest.timestamp --- "Test simulate with provided pipeline definition": @@ -158,10 +159,14 @@ - length: { docs.0.processor_results.0.doc._source: 2 } - match: { docs.0.processor_results.0.doc._source.foo: "bar" } - match: { docs.0.processor_results.0.doc._source.field2: "_value" } + - length: { docs.0.processor_results.0.doc._ingest: 1 } + - is_true: docs.0.processor_results.0.doc._ingest.timestamp - length: { docs.0.processor_results.1.doc._source: 3 } - match: { docs.0.processor_results.1.doc._source.foo: "bar" } - match: { docs.0.processor_results.1.doc._source.field2: "_value" } - match: { docs.0.processor_results.1.doc._source..field3: "third_val" } + - length: { docs.0.processor_results.1.doc._ingest: 1 } + - is_true: docs.0.processor_results.1.doc._ingest.timestamp --- "Test simulate with exception thrown": @@ -206,6 +211,8 @@ - match: { docs.0.error.type: "illegal_argument_exception" } - is_true: docs.1.doc.modified - match: { docs.1.doc._source.foo: "BAR" } + - length: { docs.1.doc._ingest: 1 } + - is_true: docs.1.doc._ingest.timestamp --- "Test verbose simulate with exception thrown": @@ -262,8 +269,15 @@ - match: { docs.0.processor_results.1.doc._type: "type" } - match: { docs.0.processor_results.1.doc._id: "id" } - match: { docs.0.processor_results.1.doc._source.foo: "bar" } - - match: { docs.1.processor_results.1.doc._source.bar: "HELLO" } + - match: { docs.0.processor_results.1.doc._source.bar: "HELLO" } + - length: { docs.0.processor_results.1.doc._ingest: 1 } + - is_true: docs.0.processor_results.1.doc._ingest.timestamp - match: { docs.1.processor_results.0.doc._source.foo: 5 } - match: { docs.1.processor_results.0.doc._source.bar: "hello" } + - length: { docs.1.processor_results.0.doc._ingest: 1 } + - is_true: docs.1.processor_results.0.doc._ingest.timestamp - match: { docs.1.processor_results.1.doc._source.foo: 5 } - match: { docs.1.processor_results.1.doc._source.bar: "HELLO" } + - length: { docs.1.processor_results.1.doc._ingest: 1 } + - is_true: docs.1.processor_results.1.doc._ingest.timestamp + From 6b7446beb9e77be27636208928d00ed1c6474fc8 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 9 Dec 2015 10:50:10 +0100 Subject: [PATCH 113/347] Remove sourceModified flag from IngestDocument If one is using the ingest plugin and providing a pipeline id with the request, the chance that the source is going to be modified is 99%. We shouldn't worry about keeping track of whether something changed. That seemed useful at first so we can save the resources for setting back the source (map to bytes) when not needed. Also, we are trying to unify metadata fields and source in the same map and that is going to complicate how we keep track of changes that happen in the source only. Best solution is to remove the flag. --- .../elasticsearch/ingest/IngestDocument.java | 12 -------- .../ingest/PipelineExecutionService.java | 4 +-- .../simulate/WriteableIngestDocument.java | 2 -- .../ingest/IngestDocumentTests.java | 29 ------------------- 4 files changed, 1 insertion(+), 46 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 1077b16453b..8eed959f1f4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -36,8 +36,6 @@ public final class IngestDocument { private final Map source; private final Map ingestMetadata; - private boolean sourceModified = false; - public IngestDocument(String index, String type, String id, String routing, String parent, String timestamp, String ttl, Map source) { this.esMetadata = new HashMap<>(); this.esMetadata.put(MetaData.INDEX.getFieldName(), index); @@ -190,7 +188,6 @@ public final class IngestDocument { Map map = (Map) context; if (map.containsKey(leafKey)) { map.remove(leafKey); - this.sourceModified = true; return; } throw new IllegalArgumentException("field [" + leafKey + "] not present as part of path [" + path + "]"); @@ -208,7 +205,6 @@ public final class IngestDocument { throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path + "]"); } list.remove(index); - this.sourceModified = true; return; } @@ -292,7 +288,6 @@ public final class IngestDocument { } else { HashMap newMap = new HashMap<>(); map.put(pathElement, newMap); - sourceModified = true; context = newMap; } } else if (context instanceof List) { @@ -327,13 +322,11 @@ public final class IngestDocument { @SuppressWarnings("unchecked") List list = (List) object; list.add(value); - sourceModified = true; return; } } } map.put(leafKey, value); - sourceModified = true; } else if (context instanceof List) { @SuppressWarnings("unchecked") List list = (List) context; @@ -347,7 +340,6 @@ public final class IngestDocument { throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path + "]"); } list.set(index, value); - this.sourceModified = true; } else { throw new IllegalArgumentException("cannot set [" + leafKey + "] with parent object of type [" + context.getClass().getName() + "] as part of path [" + path + "]"); } @@ -386,10 +378,6 @@ public final class IngestDocument { return source; } - public boolean isSourceModified() { - return sourceModified; - } - static Object deepCopy(Object value) { if (value instanceof Map) { @SuppressWarnings("unchecked") diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index b94dbd391cd..d155ed543c3 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -62,9 +62,7 @@ public class PipelineExecutionService { IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, parent, timestamp, ttl, sourceAsMap); try { pipeline.execute(ingestDocument); - if (ingestDocument.isSourceModified()) { - indexRequest.source(ingestDocument.getSource()); - } + indexRequest.source(ingestDocument.getSource()); indexRequest.index(ingestDocument.getEsMetadata(IngestDocument.MetaData.INDEX)); indexRequest.type(ingestDocument.getEsMetadata(IngestDocument.MetaData.TYPE)); indexRequest.id(ingestDocument.getEsMetadata(IngestDocument.MetaData.ID)); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java index 4f0b2eeee60..0f66206848d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java @@ -74,7 +74,6 @@ final class WriteableIngestDocument implements Writeable esMetadata : ingestDocument.getEsMetadata().entrySet()) { builder.field(esMetadata.getKey(), esMetadata.getValue()); } @@ -112,7 +111,6 @@ final class WriteableIngestDocument implements Writeable Date: Wed, 9 Dec 2015 10:54:30 +0100 Subject: [PATCH 114/347] avoid null values in simulate serialization prototypes, use empty maps instead --- .../transport/simulate/SimulateDocumentSimpleResult.java | 7 ++----- .../transport/simulate/SimulateProcessorResult.java | 8 ++------ .../transport/simulate/WriteableIngestDocument.java | 7 ++----- 3 files changed, 6 insertions(+), 16 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java index 589c2ec8ac4..2a052f88efe 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java @@ -25,18 +25,15 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.ingest.IngestDocument; import java.io.IOException; +import java.util.Collections; public class SimulateDocumentSimpleResult implements SimulateDocumentResult { - private static final SimulateDocumentSimpleResult PROTOTYPE = new SimulateDocumentSimpleResult(); + private static final SimulateDocumentSimpleResult PROTOTYPE = new SimulateDocumentSimpleResult(new WriteableIngestDocument(new IngestDocument(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()))); private WriteableIngestDocument ingestDocument; private Exception failure; - private SimulateDocumentSimpleResult() { - this.ingestDocument = null; - } - public SimulateDocumentSimpleResult(IngestDocument ingestDocument) { this.ingestDocument = new WriteableIngestDocument(ingestDocument); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java index 74c969d1ece..7af9aef2561 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java @@ -28,20 +28,16 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.ingest.IngestDocument; import java.io.IOException; +import java.util.Collections; public class SimulateProcessorResult implements Writeable, ToXContent { - private static final SimulateProcessorResult PROTOTYPE = new SimulateProcessorResult(); + private static final SimulateProcessorResult PROTOTYPE = new SimulateProcessorResult("_na", new WriteableIngestDocument(new IngestDocument(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()))); private String processorId; private WriteableIngestDocument ingestDocument; private Exception failure; - private SimulateProcessorResult() { - this.processorId = null; - this.ingestDocument = null; - } - public SimulateProcessorResult(String processorId, IngestDocument ingestDocument) { this.processorId = processorId; this.ingestDocument = new WriteableIngestDocument(ingestDocument); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java index 0f66206848d..09c2ea4b3f1 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java @@ -28,19 +28,16 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.ingest.IngestDocument; import java.io.IOException; +import java.util.Collections; import java.util.Map; import java.util.Objects; final class WriteableIngestDocument implements Writeable, ToXContent { - private static final WriteableIngestDocument PROTOTYPE = new WriteableIngestDocument(); + private static final WriteableIngestDocument PROTOTYPE = new WriteableIngestDocument(new IngestDocument(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap())); private final IngestDocument ingestDocument; - private WriteableIngestDocument() { - this.ingestDocument = null; - } - WriteableIngestDocument(IngestDocument ingestDocument) { assert ingestDocument != null; this.ingestDocument = ingestDocument; From 57d697125225d694376287eda7e644108ccbebda Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 9 Dec 2015 18:07:36 +0100 Subject: [PATCH 115/347] Streamline support for get/set/remove of metadata fields and ingest metadata fields Unify metadata map and source, add also support for _ingest prefix. Depending on the prefix, either _source, nothing or _ingest, we will figure out which map to use for values retrieval, but also modifications. --- .../elasticsearch/ingest/IngestDocument.java | 207 +++++++------- .../processor/meta/MetaDataProcessor.java | 7 +- .../ingest/PipelineExecutionService.java | 21 +- .../ingest/transport/IngestActionFilter.java | 9 +- .../SimulateDocumentSimpleResult.java | 2 +- .../simulate/SimulateProcessorResult.java | 2 +- .../simulate/WriteableIngestDocument.java | 18 +- .../elasticsearch/ingest/IngestClientIT.java | 2 +- .../ingest/IngestDocumentTests.java | 262 ++++++++++++++---- .../ingest/RandomDocumentPicks.java | 12 +- .../processor/geoip/GeoIpProcessorTests.java | 12 +- .../meta/MetaDataProcessorTests.java | 3 +- .../rename/RenameProcessorTests.java | 24 +- .../ingest/PipelineExecutionServiceTests.java | 31 +-- .../transport/IngestActionFilterTests.java | 6 +- .../SimulateExecutionServiceTests.java | 5 + .../SimulatePipelineRequestParsingTests.java | 18 +- .../WriteableIngestDocumentTests.java | 44 ++- .../test/ingest/80_simulate.yaml | 3 - 19 files changed, 420 insertions(+), 268 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 8eed959f1f4..7a695ce416b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -26,34 +26,34 @@ import java.text.SimpleDateFormat; import java.util.*; /** - * Represents a single document being captured before indexing and holds the source and meta data (like id, type and index). + * Represents a single document being captured before indexing and holds the source and metadata (like id, type and index). */ public final class IngestDocument { static final String TIMESTAMP = "timestamp"; - private final Map esMetadata; - private final Map source; + private final Map sourceAndMetadata; private final Map ingestMetadata; public IngestDocument(String index, String type, String id, String routing, String parent, String timestamp, String ttl, Map source) { - this.esMetadata = new HashMap<>(); - this.esMetadata.put(MetaData.INDEX.getFieldName(), index); - this.esMetadata.put(MetaData.TYPE.getFieldName(), type); - this.esMetadata.put(MetaData.ID.getFieldName(), id); + this.sourceAndMetadata = new HashMap<>(); + this.sourceAndMetadata.putAll(source); + this.sourceAndMetadata.put(MetaData.INDEX.getFieldName(), index); + this.sourceAndMetadata.put(MetaData.TYPE.getFieldName(), type); + this.sourceAndMetadata.put(MetaData.ID.getFieldName(), id); if (routing != null) { - this.esMetadata.put(MetaData.ROUTING.getFieldName(), routing); + this.sourceAndMetadata.put(MetaData.ROUTING.getFieldName(), routing); } if (parent != null) { - this.esMetadata.put(MetaData.PARENT.getFieldName(), parent); + this.sourceAndMetadata.put(MetaData.PARENT.getFieldName(), parent); } if (timestamp != null) { - this.esMetadata.put(MetaData.TIMESTAMP.getFieldName(), timestamp); + this.sourceAndMetadata.put(MetaData.TIMESTAMP.getFieldName(), timestamp); } if (ttl != null) { - this.esMetadata.put(MetaData.TTL.getFieldName(), ttl); + this.sourceAndMetadata.put(MetaData.TTL.getFieldName(), ttl); } - this.source = source; + this.ingestMetadata = new HashMap<>(); DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", Locale.ROOT); df.setTimeZone(TimeZone.getTimeZone("UTC")); @@ -61,21 +61,20 @@ public final class IngestDocument { } /** - * Copy constructor that creates a new {@link IngestDocument} which has exactly the same properties of the one provided as argument + * Copy constructor that creates a new {@link IngestDocument} which has exactly the same properties as the one provided as argument */ public IngestDocument(IngestDocument other) { - this(other.esMetadata, other.source, other.ingestMetadata); + this(new HashMap<>(other.sourceAndMetadata), new HashMap<>(other.ingestMetadata)); } /** * Constructor needed for testing that allows to create a new {@link IngestDocument} given the provided elasticsearch metadata, * source and ingest metadata. This is needed because the ingest metadata will be initialized with the current timestamp at - * init time, which makes comparisons impossible in tests. + * init time, which makes equality comparisons impossible in tests. */ - public IngestDocument(Map esMetadata, Map source, Map ingestMetadata) { - this.esMetadata = new HashMap<>(esMetadata); - this.source = new HashMap<>(source); - this.ingestMetadata = new HashMap<>(ingestMetadata); + public IngestDocument(Map sourceAndMetadata, Map ingestMetadata) { + this.sourceAndMetadata = sourceAndMetadata; + this.ingestMetadata = ingestMetadata; } /** @@ -83,45 +82,29 @@ public final class IngestDocument { * @param path The path within the document in dot-notation * @param clazz The expected class of the field value * @return the value for the provided path if existing, null otherwise - * @throws IllegalArgumentException if the field is null, empty, or if the source contains a field within the path - * which is not of the expected type + * @throws IllegalArgumentException if the path is null, empty, invalid, if the field doesn't exist + * or if the field that is found at the provided path is not of the expected type. */ public T getFieldValue(String path, Class clazz) { - if (Strings.isEmpty(path)) { - throw new IllegalArgumentException("path cannot be null nor empty"); - } - String[] pathElements = Strings.splitStringToArray(path, '.'); - assert pathElements.length > 0; - - Object context = source; - for (String pathElement : pathElements) { + FieldPath fieldPath = new FieldPath(path); + Object context = fieldPath.initialContext; + for (String pathElement : fieldPath.pathElements) { context = resolve(pathElement, path, context); } - - if (context == null) { - return null; - } - if (clazz.isInstance(context)) { - return clazz.cast(context); - } - throw new IllegalArgumentException("field [" + path + "] of type [" + context.getClass().getName() + "] cannot be cast to [" + clazz.getName() + "]"); + return cast(path, context, clazz); } /** * Checks whether the document contains a value for the provided path * @param path The path within the document in dot-notation * @return true if the document contains a value for the field, false otherwise + * @throws IllegalArgumentException if the path is null, empty or invalid. */ public boolean hasField(String path) { - if (Strings.isEmpty(path)) { - return false; - } - String[] pathElements = Strings.splitStringToArray(path, '.'); - assert pathElements.length > 0; - - Object context = source; - for (int i = 0; i < pathElements.length - 1; i++) { - String pathElement = pathElements[i]; + FieldPath fieldPath = new FieldPath(path); + Object context = fieldPath.initialContext; + for (int i = 0; i < fieldPath.pathElements.length - 1; i++) { + String pathElement = fieldPath.pathElements[i]; if (context == null) { return false; } @@ -147,7 +130,7 @@ public final class IngestDocument { } } - String leafKey = pathElements[pathElements.length - 1]; + String leafKey = fieldPath.pathElements[fieldPath.pathElements.length - 1]; if (context instanceof Map) { @SuppressWarnings("unchecked") Map map = (Map) context; @@ -167,22 +150,18 @@ public final class IngestDocument { } /** - * Removes the field identified by the provided path + * Removes the field identified by the provided path. * @param path the path of the field to be removed + * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. */ public void removeField(String path) { - if (Strings.isEmpty(path)) { - throw new IllegalArgumentException("path cannot be null nor empty"); - } - String[] pathElements = Strings.splitStringToArray(path, '.'); - assert pathElements.length > 0; - - Object context = source; - for (int i = 0; i < pathElements.length - 1; i++) { - context = resolve(pathElements[i], path, context); + FieldPath fieldPath = new FieldPath(path); + Object context = fieldPath.initialContext; + for (int i = 0; i < fieldPath.pathElements.length - 1; i++) { + context = resolve(fieldPath.pathElements[i], path, context); } - String leafKey = pathElements[pathElements.length - 1]; + String leafKey = fieldPath.pathElements[fieldPath.pathElements.length - 1]; if (context instanceof Map) { @SuppressWarnings("unchecked") Map map = (Map) context; @@ -249,6 +228,7 @@ public final class IngestDocument { * but if the last element is a list, the value will be appended to the existing list. * @param path The path within the document in dot-notation * @param value The value to put in for the path key + * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. */ public void appendFieldValue(String path, Object value) { setFieldValue(path, value, true); @@ -260,23 +240,20 @@ public final class IngestDocument { * the value will replace the existing list. * @param path The path within the document in dot-notation * @param value The value to put in for the path key + * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. */ public void setFieldValue(String path, Object value) { setFieldValue(path, value, false); } private void setFieldValue(String path, Object value, boolean append) { - if (Strings.isEmpty(path)) { - throw new IllegalArgumentException("path cannot be null nor empty"); - } - String[] pathElements = Strings.splitStringToArray(path, '.'); - assert pathElements.length > 0; + FieldPath fieldPath = new FieldPath(path); + Object context = fieldPath.initialContext; value = deepCopy(value); - Object context = source; - for (int i = 0; i < pathElements.length - 1; i++) { - String pathElement = pathElements[i]; + for (int i = 0; i < fieldPath.pathElements.length - 1; i++) { + String pathElement = fieldPath.pathElements[i]; if (context == null) { throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from null as part of path [" + path + "]"); } @@ -308,7 +285,7 @@ public final class IngestDocument { } } - String leafKey = pathElements[pathElements.length - 1]; + String leafKey = fieldPath.pathElements[fieldPath.pathElements.length - 1]; if (context == null) { throw new IllegalArgumentException("cannot set [" + leafKey + "] with null parent as part of path [" + path + "]"); } @@ -345,37 +322,43 @@ public final class IngestDocument { } } - public String getEsMetadata(MetaData esMetadata) { - return this.esMetadata.get(esMetadata.getFieldName()); - } - - public Map getEsMetadata() { - return Collections.unmodifiableMap(esMetadata); - } - - public void setEsMetadata(MetaData metaData, String value) { - this.esMetadata.put(metaData.getFieldName(), value); - } - - public String getIngestMetadata(String ingestMetadata) { - return this.ingestMetadata.get(ingestMetadata); - } - - public Map getIngestMetadata() { - return Collections.unmodifiableMap(this.ingestMetadata); - } - - public void setIngestMetadata(String metadata, String value) { - this.ingestMetadata.put(metadata, value); + private static T cast(String path, Object object, Class clazz) { + if (object == null) { + return null; + } + if (clazz.isInstance(object)) { + return clazz.cast(object); + } + throw new IllegalArgumentException("field [" + path + "] of type [" + object.getClass().getName() + "] cannot be cast to [" + clazz.getName() + "]"); } /** - * Returns the document. Should be used only for reading. Any change made to the returned map will - * not be reflected to the sourceModified flag. Modify the document instead using {@link #setFieldValue(String, Object)} - * and {@link #removeField(String)} + * one time operation that extracts the metadata fields from the ingest document and returns them. + * Metadata fields that used to be accessible as ordinary top level fields will be removed as part of this call. */ - public Map getSource() { - return source; + public Map extractMetadata() { + Map metadataMap = new HashMap<>(); + for (MetaData metaData : MetaData.values()) { + metadataMap.put(metaData, cast(metaData.getFieldName(), sourceAndMetadata.remove(metaData.getFieldName()), String.class)); + } + return metadataMap; + } + + /** + * Returns the available ingest metadata fields, by default only timestamp, but it is possible to set additional ones. + * Use only for reading values, modify them instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)} + */ + public Map getIngestMetadata() { + return this.ingestMetadata; + } + + /** + * Returns the document including its metadata fields, unless {@link #extractMetadata()} has been called, in which case the + * metadata fields will not be present anymore. Should be used only for reading. + * Modify the document instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)} + */ + public Map getSourceAndMetadata() { + return this.sourceAndMetadata; } static Object deepCopy(Object value) { @@ -412,27 +395,24 @@ public final class IngestDocument { } IngestDocument other = (IngestDocument) obj; - return Objects.equals(source, other.source) && - Objects.equals(esMetadata, other.esMetadata) && + return Objects.equals(sourceAndMetadata, other.sourceAndMetadata) && Objects.equals(ingestMetadata, other.ingestMetadata); } @Override public int hashCode() { - return Objects.hash(esMetadata, source); + return Objects.hash(sourceAndMetadata, ingestMetadata); } @Override public String toString() { return "IngestDocument{" + - "esMetadata=" + esMetadata + - ", source=" + source + + " sourceAndMetadata=" + sourceAndMetadata + ", ingestMetadata=" + ingestMetadata + '}'; } public enum MetaData { - INDEX("_index"), TYPE("_type"), ID("_id"), @@ -472,4 +452,31 @@ public final class IngestDocument { } } } + + private class FieldPath { + private final String[] pathElements; + private final Object initialContext; + + private FieldPath(String path) { + if (Strings.isEmpty(path)) { + throw new IllegalArgumentException("path cannot be null nor empty"); + } + String newPath; + if (path.startsWith("_ingest.")) { + initialContext = ingestMetadata; + newPath = path.substring(8, path.length()); + } else { + initialContext = sourceAndMetadata; + newPath = path; + } + if (newPath.startsWith("_source.")) { + newPath = newPath.substring(8, path.length()); + } + + this.pathElements = Strings.splitStringToArray(newPath, '.'); + if (pathElements.length == 0) { + throw new IllegalArgumentException("path [" + path + "] is not valid"); + } + } + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java index 7cfab487678..4de13f5b107 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java @@ -14,6 +14,9 @@ import java.util.HashMap; import java.util.Iterator; import java.util.Map; + +//TODO this processor needs to be removed, as the set processor allows now to set any field, including metadata ones. +//The only reason for it to be still here is that it supports templating, we will remove once any processor supports templating. public final class MetaDataProcessor implements Processor { public final static String TYPE = "meta"; @@ -26,11 +29,11 @@ public final class MetaDataProcessor implements Processor { @Override public void execute(IngestDocument ingestDocument) { - Map model = ingestDocument.getSource(); + Map model = ingestDocument.getSourceAndMetadata(); for (Map.Entry entry : templates.entrySet()) { StringWriter writer = new StringWriter(); entry.getValue().execute(writer, model); - ingestDocument.setEsMetadata(entry.getKey(), writer.toString()); + ingestDocument.setFieldValue(entry.getKey().getFieldName(), writer.toString()); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index d155ed543c3..bdf37dc87f7 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -43,7 +43,7 @@ public class PipelineExecutionService { this.threadPool = threadPool; } - public void execute(IndexRequest indexRequest, String pipelineId, ActionListener listener) { + public void execute(IndexRequest indexRequest, String pipelineId, ActionListener listener) { Pipeline pipeline = store.get(pipelineId); if (pipeline == null) { listener.onFailure(new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist")); @@ -62,15 +62,16 @@ public class PipelineExecutionService { IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, parent, timestamp, ttl, sourceAsMap); try { pipeline.execute(ingestDocument); - indexRequest.source(ingestDocument.getSource()); - indexRequest.index(ingestDocument.getEsMetadata(IngestDocument.MetaData.INDEX)); - indexRequest.type(ingestDocument.getEsMetadata(IngestDocument.MetaData.TYPE)); - indexRequest.id(ingestDocument.getEsMetadata(IngestDocument.MetaData.ID)); - indexRequest.routing(ingestDocument.getEsMetadata(IngestDocument.MetaData.ROUTING)); - indexRequest.parent(ingestDocument.getEsMetadata(IngestDocument.MetaData.PARENT)); - indexRequest.timestamp(ingestDocument.getEsMetadata(IngestDocument.MetaData.TIMESTAMP)); - indexRequest.ttl(ingestDocument.getEsMetadata(IngestDocument.MetaData.TTL)); - listener.onResponse(ingestDocument); + Map metadataMap = ingestDocument.extractMetadata(); + indexRequest.index(metadataMap.get(IngestDocument.MetaData.INDEX)); + indexRequest.type(metadataMap.get(IngestDocument.MetaData.TYPE)); + indexRequest.id(metadataMap.get(IngestDocument.MetaData.ID)); + indexRequest.routing(metadataMap.get(IngestDocument.MetaData.ROUTING)); + indexRequest.parent(metadataMap.get(IngestDocument.MetaData.PARENT)); + indexRequest.timestamp(metadataMap.get(IngestDocument.MetaData.TIMESTAMP)); + indexRequest.ttl(metadataMap.get(IngestDocument.MetaData.TTL)); + indexRequest.source(ingestDocument.getSourceAndMetadata()); + listener.onResponse(null); } catch (Throwable e) { listener.onFailure(e); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index 91453270803..e552c76d4bf 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -31,7 +31,6 @@ import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineExecutionService; @@ -84,9 +83,9 @@ public final class IngestActionFilter extends AbstractComponent implements Actio chain.proceed(action, indexRequest, listener); return; } - executionService.execute(indexRequest, pipelineId, new ActionListener() { + executionService.execute(indexRequest, pipelineId, new ActionListener() { @Override - public void onResponse(IngestDocument ingestDocument) { + public void onResponse(Void aVoid) { indexRequest.putHeader(IngestPlugin.PIPELINE_ALREADY_PROCESSED, true); chain.proceed(action, indexRequest, listener); } @@ -121,9 +120,9 @@ public final class IngestActionFilter extends AbstractComponent implements Actio } IndexRequest indexRequest = (IndexRequest) actionRequest; - executionService.execute(indexRequest, pipelineId, new ActionListener() { + executionService.execute(indexRequest, pipelineId, new ActionListener() { @Override - public void onResponse(IngestDocument ingestDocument) { + public void onResponse(Void aVoid) { processBulkIndexRequest(bulkRequestModifier, pipelineId, action, chain, listener); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java index 2a052f88efe..eb6170e1fd1 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java @@ -29,7 +29,7 @@ import java.util.Collections; public class SimulateDocumentSimpleResult implements SimulateDocumentResult { - private static final SimulateDocumentSimpleResult PROTOTYPE = new SimulateDocumentSimpleResult(new WriteableIngestDocument(new IngestDocument(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()))); + private static final SimulateDocumentSimpleResult PROTOTYPE = new SimulateDocumentSimpleResult(new WriteableIngestDocument(new IngestDocument(Collections.emptyMap(), Collections.emptyMap()))); private WriteableIngestDocument ingestDocument; private Exception failure; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java index 7af9aef2561..78eafd50655 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java @@ -32,7 +32,7 @@ import java.util.Collections; public class SimulateProcessorResult implements Writeable, ToXContent { - private static final SimulateProcessorResult PROTOTYPE = new SimulateProcessorResult("_na", new WriteableIngestDocument(new IngestDocument(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()))); + private static final SimulateProcessorResult PROTOTYPE = new SimulateProcessorResult("_na", new WriteableIngestDocument(new IngestDocument(Collections.emptyMap(), Collections.emptyMap()))); private String processorId; private WriteableIngestDocument ingestDocument; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java index 09c2ea4b3f1..2b9ac56b341 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java @@ -34,7 +34,7 @@ import java.util.Objects; final class WriteableIngestDocument implements Writeable, ToXContent { - private static final WriteableIngestDocument PROTOTYPE = new WriteableIngestDocument(new IngestDocument(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap())); + private static final WriteableIngestDocument PROTOTYPE = new WriteableIngestDocument(new IngestDocument(Collections.emptyMap(), Collections.emptyMap())); private final IngestDocument ingestDocument; @@ -53,28 +53,26 @@ final class WriteableIngestDocument implements Writeable esMetadata = (Map) in.readGenericValue(); - Map source = in.readMap(); + Map sourceAndMetadata = in.readMap(); @SuppressWarnings("unchecked") Map ingestMetadata = (Map) in.readGenericValue(); - return new WriteableIngestDocument(new IngestDocument(esMetadata, source, ingestMetadata)); + return new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, ingestMetadata)); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeGenericValue(ingestDocument.getEsMetadata()); - out.writeMap(ingestDocument.getSource()); + out.writeMap(ingestDocument.getSourceAndMetadata()); out.writeGenericValue(ingestDocument.getIngestMetadata()); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields.DOCUMENT); - for (Map.Entry esMetadata : ingestDocument.getEsMetadata().entrySet()) { - builder.field(esMetadata.getKey(), esMetadata.getValue()); + Map metadataMap = ingestDocument.extractMetadata(); + for (Map.Entry metadata : metadataMap.entrySet()) { + builder.field(metadata.getKey().getFieldName(), metadata.getValue()); } - builder.field(Fields.SOURCE, ingestDocument.getSource()); + builder.field(Fields.SOURCE, ingestDocument.getSourceAndMetadata()); builder.startObject(Fields.INGEST); for (Map.Entry ingestMetadata : ingestDocument.getIngestMetadata().entrySet()) { builder.field(ingestMetadata.getKey(), ingestMetadata.getValue()); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 4c14d254718..9db406a7a9e 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -143,7 +143,7 @@ public class IngestClientIT extends ESIntegTestCase { source.put("status", 400); source.put("msg", "foo"); IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, source); - assertThat(simulateDocumentSimpleResult.getIngestDocument().getSource(), equalTo(ingestDocument.getSource())); + assertThat(simulateDocumentSimpleResult.getIngestDocument().getSourceAndMetadata(), equalTo(ingestDocument.getSourceAndMetadata())); assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index 1a95722a319..7d9bd58dc79 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -35,6 +35,9 @@ public class IngestDocumentTests extends ESTestCase { @Before public void setIngestDocument() { Map document = new HashMap<>(); + Map ingestMap = new HashMap<>(); + ingestMap.put("timestamp", "bogus_timestamp"); + document.put("_ingest", ingestMap); document.put("foo", "bar"); document.put("int", 123); Map innerObject = new HashMap<>(); @@ -54,6 +57,42 @@ public class IngestDocumentTests extends ESTestCase { public void testSimpleGetFieldValue() { assertThat(ingestDocument.getFieldValue("foo", String.class), equalTo("bar")); assertThat(ingestDocument.getFieldValue("int", Integer.class), equalTo(123)); + assertThat(ingestDocument.getFieldValue("_source.foo", String.class), equalTo("bar")); + assertThat(ingestDocument.getFieldValue("_source.int", Integer.class), equalTo(123)); + assertThat(ingestDocument.getFieldValue("_index", String.class), equalTo("index")); + assertThat(ingestDocument.getFieldValue("_type", String.class), equalTo("type")); + assertThat(ingestDocument.getFieldValue("_id", String.class), equalTo("id")); + assertThat(ingestDocument.getFieldValue("_ingest.timestamp", String.class), both(notNullValue()).and(not(equalTo("bogus_timestamp")))); + assertThat(ingestDocument.getFieldValue("_source._ingest.timestamp", String.class), equalTo("bogus_timestamp")); + } + + public void testGetSourceObject() { + try { + ingestDocument.getFieldValue("_source", Object.class); + fail("get field value should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [_source] not present as part of path [_source]")); + } + } + + public void testGetIngestObject() { + assertThat(ingestDocument.getFieldValue("_ingest", Map.class), notNullValue()); + } + + public void testGetEmptyPathAfterStrippingOutPrefix() { + try { + ingestDocument.getFieldValue("_source.", Object.class); + fail("get field value should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("path [_source.] is not valid")); + } + + try { + ingestDocument.getFieldValue("_ingest.", Object.class); + fail("get field value should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("path [_ingest.] is not valid")); + } } public void testGetFieldValueNullValue() { @@ -151,10 +190,16 @@ public class IngestDocumentTests extends ESTestCase { public void testHasField() { assertTrue(ingestDocument.hasField("fizz")); + assertTrue(ingestDocument.hasField("_index")); + assertTrue(ingestDocument.hasField("_type")); + assertTrue(ingestDocument.hasField("_id")); + assertTrue(ingestDocument.hasField("_source.fizz")); + assertTrue(ingestDocument.hasField("_ingest.timestamp")); } public void testHasFieldNested() { assertTrue(ingestDocument.hasField("fizz.buzz")); + assertTrue(ingestDocument.hasField("_source._ingest.timestamp")); } public void testListHasField() { @@ -190,7 +235,12 @@ public class IngestDocumentTests extends ESTestCase { } public void testHasFieldNull() { - assertFalse(ingestDocument.hasField(null)); + try { + ingestDocument.hasField(null); + fail("has field should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("path cannot be null nor empty")); + } } public void testHasFieldNullValue() { @@ -198,25 +248,63 @@ public class IngestDocumentTests extends ESTestCase { } public void testHasFieldEmpty() { - assertFalse(ingestDocument.hasField("")); + try { + ingestDocument.hasField(""); + fail("has field should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("path cannot be null nor empty")); + } + } + + public void testHasFieldSourceObject() { + assertThat(ingestDocument.hasField("_source"), equalTo(false)); + } + + public void testHasFieldIngestObject() { + assertThat(ingestDocument.hasField("_ingest"), equalTo(true)); + } + + public void testHasFieldEmptyPathAfterStrippingOutPrefix() { + try { + ingestDocument.hasField("_source."); + fail("has field value should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("path [_source.] is not valid")); + } + + try { + ingestDocument.hasField("_ingest."); + fail("has field value should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("path [_ingest.] is not valid")); + } } public void testSimpleSetFieldValue() { ingestDocument.setFieldValue("new_field", "foo"); - assertThat(ingestDocument.getSource().get("new_field"), equalTo("foo")); + assertThat(ingestDocument.getSourceAndMetadata().get("new_field"), equalTo("foo")); + ingestDocument.setFieldValue("_ttl", "ttl"); + assertThat(ingestDocument.getSourceAndMetadata().get("_ttl"), equalTo("ttl")); + ingestDocument.setFieldValue("_source.another_field", "bar"); + assertThat(ingestDocument.getSourceAndMetadata().get("another_field"), equalTo("bar")); + ingestDocument.setFieldValue("_ingest.new_field", "new_value"); + assertThat(ingestDocument.getIngestMetadata().size(), equalTo(2)); + assertThat(ingestDocument.getIngestMetadata().get("new_field"), equalTo("new_value")); + ingestDocument.setFieldValue("_ingest.timestamp", "timestamp"); + assertThat(ingestDocument.getIngestMetadata().get("timestamp"), equalTo("timestamp")); } public void testSetFieldValueNullValue() { ingestDocument.setFieldValue("new_field", null); - assertThat(ingestDocument.getSource().containsKey("new_field"), equalTo(true)); - assertThat(ingestDocument.getSource().get("new_field"), nullValue()); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(true)); + assertThat(ingestDocument.getSourceAndMetadata().get("new_field"), nullValue()); } @SuppressWarnings("unchecked") public void testNestedSetFieldValue() { ingestDocument.setFieldValue("a.b.c.d", "foo"); - assertThat(ingestDocument.getSource().get("a"), instanceOf(Map.class)); - Map a = (Map) ingestDocument.getSource().get("a"); + assertThat(ingestDocument.getSourceAndMetadata().get("a"), instanceOf(Map.class)); + Map a = (Map) ingestDocument.getSourceAndMetadata().get("a"); assertThat(a.get("b"), instanceOf(Map.class)); Map b = (Map) a.get("b"); assertThat(b.get("c"), instanceOf(Map.class)); @@ -228,14 +316,14 @@ public class IngestDocumentTests extends ESTestCase { public void testSetFieldValueOnExistingField() { ingestDocument.setFieldValue("foo", "newbar"); - assertThat(ingestDocument.getSource().get("foo"), equalTo("newbar")); + assertThat(ingestDocument.getSourceAndMetadata().get("foo"), equalTo("newbar")); } @SuppressWarnings("unchecked") public void testSetFieldValueOnExistingParent() { ingestDocument.setFieldValue("fizz.new", "bar"); - assertThat(ingestDocument.getSource().get("fizz"), instanceOf(Map.class)); - Map innerMap = (Map) ingestDocument.getSource().get("fizz"); + assertThat(ingestDocument.getSourceAndMetadata().get("fizz"), instanceOf(Map.class)); + Map innerMap = (Map) ingestDocument.getSourceAndMetadata().get("fizz"); assertThat(innerMap.get("new"), instanceOf(String.class)); String value = (String) innerMap.get("new"); assertThat(value, equalTo("bar")); @@ -268,16 +356,42 @@ public class IngestDocumentTests extends ESTestCase { } } + public void testSetSourceObject() { + ingestDocument.setFieldValue("_source", "value"); + assertThat(ingestDocument.getSourceAndMetadata().get("_source"), equalTo("value")); + } + + public void testSetIngestObject() { + ingestDocument.setFieldValue("_ingest", "value"); + assertThat(ingestDocument.getSourceAndMetadata().get("_ingest"), equalTo("value")); + } + + public void testSetEmptyPathAfterStrippingOutPrefix() { + try { + ingestDocument.setFieldValue("_source.", "value"); + fail("set field value should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("path [_source.] is not valid")); + } + + try { + ingestDocument.setFieldValue("_ingest.", Object.class); + fail("set field value should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("path [_ingest.] is not valid")); + } + } + public void testListSetFieldValueNoIndexProvided() { ingestDocument.setFieldValue("list", "value"); - Object object = ingestDocument.getSource().get("list"); + Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(String.class)); assertThat(object, equalTo("value")); } public void testListAppendFieldValue() { ingestDocument.appendFieldValue("list", "new_value"); - Object object = ingestDocument.getSource().get("list"); + Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -289,7 +403,7 @@ public class IngestDocumentTests extends ESTestCase { public void testListSetFieldValueIndexProvided() { ingestDocument.setFieldValue("list.1", "value"); - Object object = ingestDocument.getSource().get("list"); + Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -300,7 +414,7 @@ public class IngestDocumentTests extends ESTestCase { public void testSetFieldValueListAsPartOfPath() { ingestDocument.setFieldValue("list.0.field", "new_value"); - Object object = ingestDocument.getSource().get("list"); + Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -348,28 +462,38 @@ public class IngestDocumentTests extends ESTestCase { public void testRemoveField() { ingestDocument.removeField("foo"); - assertThat(ingestDocument.getSource().size(), equalTo(3)); - assertThat(ingestDocument.getSource().containsKey("foo"), equalTo(false)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(7)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("foo"), equalTo(false)); + ingestDocument.removeField("_index"); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(6)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("_index"), equalTo(false)); + ingestDocument.removeField("_source.fizz"); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(5)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(false)); + assertThat(ingestDocument.getIngestMetadata().size(), equalTo(1)); + ingestDocument.removeField("_ingest.timestamp"); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(5)); + assertThat(ingestDocument.getIngestMetadata().size(), equalTo(0)); } public void testRemoveInnerField() { ingestDocument.removeField("fizz.buzz"); - assertThat(ingestDocument.getSource().size(), equalTo(4)); - assertThat(ingestDocument.getSource().get("fizz"), instanceOf(Map.class)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); + assertThat(ingestDocument.getSourceAndMetadata().get("fizz"), instanceOf(Map.class)); @SuppressWarnings("unchecked") - Map map = (Map) ingestDocument.getSource().get("fizz"); + Map map = (Map) ingestDocument.getSourceAndMetadata().get("fizz"); assertThat(map.size(), equalTo(2)); assertThat(map.containsKey("buzz"), equalTo(false)); ingestDocument.removeField("fizz.foo_null"); assertThat(map.size(), equalTo(1)); - assertThat(ingestDocument.getSource().size(), equalTo(4)); - assertThat(ingestDocument.getSource().containsKey("fizz"), equalTo(true)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true)); ingestDocument.removeField("fizz.1"); assertThat(map.size(), equalTo(0)); - assertThat(ingestDocument.getSource().size(), equalTo(4)); - assertThat(ingestDocument.getSource().containsKey("fizz"), equalTo(true)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true)); } public void testRemoveNonExistingField() { @@ -390,11 +514,42 @@ public class IngestDocumentTests extends ESTestCase { } } + public void testRemoveSourceObject() { + try { + ingestDocument.removeField("_source"); + fail("remove field should have failed"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field [_source] not present as part of path [_source]")); + } + } + + public void testRemoveIngestObject() { + ingestDocument.removeField("_ingest"); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(7)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("_ingest"), equalTo(false)); + } + + public void testRemoveEmptyPathAfterStrippingOutPrefix() { + try { + ingestDocument.removeField("_source."); + fail("set field value should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("path [_source.] is not valid")); + } + + try { + ingestDocument.removeField("_ingest."); + fail("set field value should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("path [_ingest.] is not valid")); + } + } + public void testListRemoveField() { ingestDocument.removeField("list.0.field"); - assertThat(ingestDocument.getSource().size(), equalTo(4)); - assertThat(ingestDocument.getSource().containsKey("list"), equalTo(true)); - Object object = ingestDocument.getSource().get("list"); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true)); + Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(List.class)); @SuppressWarnings("unchecked") List list = (List) object; @@ -461,30 +616,32 @@ public class IngestDocumentTests extends ESTestCase { } public void testEqualsAndHashcode() throws Exception { - Map esMetadata = new HashMap<>(); + Map sourceAndMetadata = RandomDocumentPicks.randomSource(random()); int numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); for (int i = 0; i < numFields; i++) { - esMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); } Map ingestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); for (int i = 0; i < numFields; i++) { ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); } - Map document = RandomDocumentPicks.randomDocument(random()); - IngestDocument ingestDocument = new IngestDocument(esMetadata, document, ingestMetadata); + IngestDocument ingestDocument = new IngestDocument(sourceAndMetadata, ingestMetadata); boolean changed = false; - Map otherEsMetadata; + Map otherSourceAndMetadata; if (randomBoolean()) { - otherEsMetadata = new HashMap<>(); - numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); - for (int i = 0; i < numFields; i++) { - otherEsMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); - } + otherSourceAndMetadata = RandomDocumentPicks.randomSource(random()); changed = true; } else { - otherEsMetadata = Collections.unmodifiableMap(esMetadata); + otherSourceAndMetadata = new HashMap<>(sourceAndMetadata); + } + if (randomBoolean()) { + numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); + for (int i = 0; i < numFields; i++) { + otherSourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + } + changed = true; } Map otherIngestMetadata; @@ -499,15 +656,7 @@ public class IngestDocumentTests extends ESTestCase { otherIngestMetadata = Collections.unmodifiableMap(ingestMetadata); } - Map otherDocument; - if (randomBoolean()) { - otherDocument = RandomDocumentPicks.randomDocument(random()); - changed = true; - } else { - otherDocument = Collections.unmodifiableMap(document); - } - - IngestDocument otherIngestDocument = new IngestDocument(otherEsMetadata, otherDocument, otherIngestMetadata); + IngestDocument otherIngestDocument = new IngestDocument(otherSourceAndMetadata, otherIngestMetadata); if (changed) { assertThat(ingestDocument, not(equalTo(otherIngestDocument))); assertThat(otherIngestDocument, not(equalTo(ingestDocument))); @@ -515,7 +664,7 @@ public class IngestDocumentTests extends ESTestCase { assertThat(ingestDocument, equalTo(otherIngestDocument)); assertThat(otherIngestDocument, equalTo(ingestDocument)); assertThat(ingestDocument.hashCode(), equalTo(otherIngestDocument.hashCode())); - IngestDocument thirdIngestDocument = new IngestDocument(Collections.unmodifiableMap(esMetadata), Collections.unmodifiableMap(document), Collections.unmodifiableMap(ingestMetadata)); + IngestDocument thirdIngestDocument = new IngestDocument(Collections.unmodifiableMap(sourceAndMetadata), Collections.unmodifiableMap(ingestMetadata)); assertThat(thirdIngestDocument, equalTo(ingestDocument)); assertThat(ingestDocument, equalTo(thirdIngestDocument)); assertThat(ingestDocument.hashCode(), equalTo(thirdIngestDocument.hashCode())); @@ -525,7 +674,7 @@ public class IngestDocumentTests extends ESTestCase { public void testDeepCopy() { int iterations = scaledRandomIntBetween(8, 64); for (int i = 0; i < iterations; i++) { - Map map = RandomDocumentPicks.randomDocument(random()); + Map map = RandomDocumentPicks.randomSource(random()); Object copy = IngestDocument.deepCopy(map); assertThat("iteration: " + i, copy, equalTo(map)); assertThat("iteration: " + i, copy, not(sameInstance(map))); @@ -556,19 +705,11 @@ public class IngestDocumentTests extends ESTestCase { assertThat(myPreciousList.get(0), equalTo("value")); } - public void testIngestCustomMetadata() throws Exception { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - String metadata = randomAsciiOfLengthBetween(1, 10); - String value = randomAsciiOfLengthBetween(1, 10); - ingestDocument.setIngestMetadata(metadata, value); - assertThat(ingestDocument.getIngestMetadata(metadata), equalTo(value)); - } - public void testIngestMetadataTimestamp() throws Exception { long before = System.currentTimeMillis(); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); long after = System.currentTimeMillis(); - String timestampString = ingestDocument.getIngestMetadata("timestamp"); + String timestampString = ingestDocument.getIngestMetadata().get("timestamp"); assertThat(timestampString, notNullValue()); assertThat(timestampString, endsWith("+0000")); DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", Locale.ROOT); @@ -576,4 +717,11 @@ public class IngestDocumentTests extends ESTestCase { assertThat(timestamp.getTime(), greaterThanOrEqualTo(before)); assertThat(timestamp.getTime(), lessThanOrEqualTo(after)); } + + public void testCopyConstructor() { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + IngestDocument copy = new IngestDocument(ingestDocument); + assertThat(ingestDocument.getSourceAndMetadata(), not(sameInstance(copy.getSourceAndMetadata()))); + assertThat(ingestDocument.getSourceAndMetadata(), equalTo(copy.getSourceAndMetadata())); + } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java index c54a8bfa126..73a5395fd63 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java @@ -64,7 +64,7 @@ public final class RandomDocumentPicks { * in the document provided as an argument. */ public static String randomExistingFieldName(Random random, IngestDocument ingestDocument) { - Map source = new TreeMap<>(ingestDocument.getSource()); + Map source = new TreeMap<>(ingestDocument.getSourceAndMetadata()); Map.Entry randomEntry = RandomPicks.randomFrom(random, source.entrySet()); String key = randomEntry.getKey(); while (randomEntry.getValue() instanceof Map) { @@ -99,7 +99,7 @@ public final class RandomDocumentPicks { */ public static boolean canAddField(String path, IngestDocument ingestDocument) { String[] pathElements = Strings.splitStringToArray(path, '.'); - Map innerMap = ingestDocument.getSource(); + Map innerMap = ingestDocument.getSourceAndMetadata(); if (pathElements.length > 1) { for (int i = 0; i < pathElements.length - 1; i++) { Object currentLevel = innerMap.get(pathElements[i]); @@ -122,13 +122,13 @@ public final class RandomDocumentPicks { * Generates a random document and random metadata */ public static IngestDocument randomIngestDocument(Random random) { - return randomIngestDocument(random, randomDocument(random)); + return randomIngestDocument(random, randomSource(random)); } /** * Generates a document that holds random metadata and the document provided as a map argument */ - public static IngestDocument randomIngestDocument(Random random, Map document) { + public static IngestDocument randomIngestDocument(Random random, Map source) { String index = randomString(random); String type = randomString(random); String id = randomString(random); @@ -148,10 +148,10 @@ public final class RandomDocumentPicks { if (random.nextBoolean()) { ttl = randomString(random); } - return new IngestDocument(index, type, id, routing, parent, timestamp, ttl, document); + return new IngestDocument(index, type, id, routing, parent, timestamp, ttl, source); } - public static Map randomDocument(Random random) { + public static Map randomSource(Random random) { Map document = new HashMap<>(); addRandomFields(random, document, 0); return document; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java index e77678b2fa5..479541b0f32 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java @@ -43,10 +43,9 @@ public class GeoIpProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(2)); - assertThat(ingestDocument.getSource().get("source_field"), equalTo("82.170.213.79")); + assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo("82.170.213.79")); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSource().get("target_field"); + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData.size(), equalTo(10)); assertThat(geoData.get("ip"), equalTo("82.170.213.79")); assertThat(geoData.get("country_iso_code"), equalTo("NL")); @@ -69,10 +68,9 @@ public class GeoIpProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - assertThat(ingestDocument.getSource().size(), equalTo(2)); - assertThat(ingestDocument.getSource().get("source_field"), equalTo("82.170.213.79")); + assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo("82.170.213.79")); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSource().get("target_field"); + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData.size(), equalTo(4)); assertThat(geoData.get("ip"), equalTo("82.170.213.79")); assertThat(geoData.get("country_iso_code"), equalTo("NL")); @@ -89,7 +87,7 @@ public class GeoIpProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSource().get("target_field"); + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData.size(), equalTo(0)); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java index 99340c5857e..c102849fdc4 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java @@ -26,8 +26,9 @@ public class MetaDataProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", "value")); processor.execute(ingestDocument); + Map metadataMap = ingestDocument.extractMetadata(); for (MetaData metaData : MetaData.values()) { - assertThat(ingestDocument.getEsMetadata(metaData), Matchers.equalTo("some value")); + assertThat(metadataMap.get(metaData), Matchers.equalTo("some value")); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java index 2feacc88190..3968a2ec73f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java @@ -60,14 +60,14 @@ public class RenameProcessorTests extends ESTestCase { Processor processor = new RenameProcessor("list.0", "item"); processor.execute(ingestDocument); - Object actualObject = ingestDocument.getSource().get("list"); + Object actualObject = ingestDocument.getSourceAndMetadata().get("list"); assertThat(actualObject, instanceOf(List.class)); @SuppressWarnings("unchecked") List actualList = (List) actualObject; assertThat(actualList.size(), equalTo(2)); assertThat(actualList.get(0), equalTo("item2")); assertThat(actualList.get(1), equalTo("item3")); - actualObject = ingestDocument.getSource().get("item"); + actualObject = ingestDocument.getSourceAndMetadata().get("item"); assertThat(actualObject, instanceOf(String.class)); assertThat(actualObject, equalTo("item1")); @@ -120,7 +120,7 @@ public class RenameProcessorTests extends ESTestCase { } public void testRenameAtomicOperationSetFails() throws Exception { - Map document = new HashMap() { + Map source = new HashMap() { private static final long serialVersionUID = 362498820763181265L; @Override public Object put(String key, Object value) { @@ -130,22 +130,22 @@ public class RenameProcessorTests extends ESTestCase { return super.put(key, value); } }; - document.put("list", Collections.singletonList("item")); + source.put("list", Collections.singletonList("item")); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); Processor processor = new RenameProcessor("list", "new_field"); try { processor.execute(ingestDocument); fail("processor execute should have failed"); } catch(UnsupportedOperationException e) { //the set failed, the old field has not been removed - assertThat(ingestDocument.getSource().containsKey("list"), equalTo(true)); - assertThat(ingestDocument.getSource().containsKey("new_field"), equalTo(false)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(false)); } } public void testRenameAtomicOperationRemoveFails() throws Exception { - Map document = new HashMap() { + Map source = new HashMap() { private static final long serialVersionUID = 362498820763181265L; @Override public Object remove(Object key) { @@ -155,17 +155,17 @@ public class RenameProcessorTests extends ESTestCase { return super.remove(key); } }; - document.put("list", Collections.singletonList("item")); + source.put("list", Collections.singletonList("item")); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); Processor processor = new RenameProcessor("list", "new_field"); try { processor.execute(ingestDocument); fail("processor execute should have failed"); } catch (UnsupportedOperationException e) { //the set failed, the old field has not been removed - assertThat(ingestDocument.getSource().containsKey("list"), equalTo(true)); - assertThat(ingestDocument.getSource().containsKey("new_field"), equalTo(false)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(false)); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index 7723ef5402f..8f8a3a3f2ea 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -57,7 +57,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(null); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener)mock(ActionListener.class); + ActionListener listener = (ActionListener)mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); verify(listener).onFailure(any(IllegalArgumentException.class)); verify(listener, times(0)).onResponse(any()); @@ -69,10 +69,11 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener)mock(ActionListener.class); + ActionListener listener = (ActionListener)mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); - verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(listener).onResponse(eqID("_index", "_type", "_id", Collections.emptyMap())); + //TODO we remove metadata, this check is not valid anymore, what do we replace it with? + //verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + verify(listener).onResponse(null); verify(listener, times(0)).onFailure(any(Exception.class)); } @@ -82,9 +83,9 @@ public class PipelineExecutionServiceTests extends ESTestCase { IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; for (IngestDocument.MetaData metaData : IngestDocument.MetaData.values()) { if (metaData == IngestDocument.MetaData.TTL) { - ingestDocument.setEsMetadata(IngestDocument.MetaData.TTL, "5w"); + ingestDocument.setFieldValue(IngestDocument.MetaData.TTL.getFieldName(), "5w"); } else { - ingestDocument.setEsMetadata(metaData, "update" + metaData.getFieldName()); + ingestDocument.setFieldValue(metaData.getFieldName(), "update" + metaData.getFieldName()); } } @@ -94,7 +95,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener)mock(ActionListener.class); + ActionListener listener = (ActionListener)mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); verify(processor).execute(any()); verify(listener).onResponse(any()); @@ -106,7 +107,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { assertThat(indexRequest.routing(), equalTo("update_routing")); assertThat(indexRequest.parent(), equalTo("update_parent")); assertThat(indexRequest.timestamp(), equalTo("update_timestamp")); - assertThat(indexRequest.ttl(), equalTo(new TimeValue(3024000000l))); + assertThat(indexRequest.ttl(), equalTo(new TimeValue(3024000000L))); } public void testExecuteFailure() throws Exception { @@ -115,10 +116,10 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener)mock(ActionListener.class); + ActionListener listener = (ActionListener)mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(listener, times(0)).onResponse(eqID("_index", "_type", "_id", Collections.emptyMap())); + verify(listener, times(0)).onResponse(null); verify(listener).onFailure(any(RuntimeException.class)); } @@ -132,7 +133,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - ActionListener listener = (ActionListener)mock(ActionListener.class); + ActionListener listener = (ActionListener)mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); assertThat(indexRequest.ttl(), equalTo(TimeValue.parseTimeValue("5d", null, "ttl"))); @@ -158,11 +159,11 @@ public class PipelineExecutionServiceTests extends ESTestCase { indexRequest = new IndexRequest("_index", "_type", "_id") .source(Collections.emptyMap()) - .ttl(1000l); + .ttl(1000L); listener = mock(ActionListener.class); executionService.execute(indexRequest, "_id", listener); - assertThat(indexRequest.ttl(), equalTo(new TimeValue(1000l))); + assertThat(indexRequest.ttl(), equalTo(new TimeValue(1000L))); verify(listener, times(1)).onResponse(any()); verify(listener, never()).onFailure(any(Throwable.class)); } @@ -184,11 +185,9 @@ public class PipelineExecutionServiceTests extends ESTestCase { if (o.getClass() == IngestDocument.class) { IngestDocument otherIngestDocument = (IngestDocument) o; //ingest metadata will not be the same (timestamp differs every time) - return Objects.equals(ingestDocument.getSource(), otherIngestDocument.getSource()) - && Objects.equals(ingestDocument.getEsMetadata(), otherIngestDocument.getEsMetadata()); + return Objects.equals(ingestDocument.getSourceAndMetadata(), otherIngestDocument.getSourceAndMetadata()); } return false; } } - } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index fe34295173a..569cf32ac72 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -122,9 +122,9 @@ public class IngestActionFilterTests extends ESTestCase { ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); Answer answer = invocationOnMock -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; - listener.onResponse(new IngestDocument(indexRequest.index(), indexRequest.type(), indexRequest.id(), indexRequest.routing(), indexRequest.parent(), - indexRequest.timestamp(), indexRequest.ttl() == null ? null : indexRequest.ttl().toString(), indexRequest.sourceAsMap())); + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(null); return null; }; doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java index 43ae965fb27..973e8c1d8e1 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java @@ -71,10 +71,15 @@ public class SimulateExecutionServiceTests extends ESTestCase { assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getProcessorId(), equalTo("processor[mock]-0")); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), not(sameInstance(ingestDocument))); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), equalTo(ingestDocument)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument().getSourceAndMetadata(), not(sameInstance(ingestDocument.getSourceAndMetadata()))); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(), nullValue()); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorId(), equalTo("processor[mock]-1")); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), not(sameInstance(ingestDocument))); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), equalTo(ingestDocument)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument().getSourceAndMetadata(), not(sameInstance(ingestDocument.getSourceAndMetadata()))); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument().getSourceAndMetadata(), + not(sameInstance(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument().getSourceAndMetadata()))); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getFailure(), nullValue()); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java index e10391f7667..9484a621d58 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java @@ -83,10 +83,11 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { Iterator> expectedDocsIterator = expectedDocs.iterator(); for (IngestDocument ingestDocument : actualRequest.getDocuments()) { Map expectedDocument = expectedDocsIterator.next(); - assertThat(ingestDocument.getSource(), equalTo(expectedDocument.get(Fields.SOURCE))); - assertThat(ingestDocument.getEsMetadata(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName()))); - assertThat(ingestDocument.getEsMetadata(TYPE), equalTo(expectedDocument.get(TYPE.getFieldName()))); - assertThat(ingestDocument.getEsMetadata(ID), equalTo(expectedDocument.get(ID.getFieldName()))); + Map metadataMap = ingestDocument.extractMetadata(); + assertThat(metadataMap.get(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName()))); + assertThat(metadataMap.get(TYPE), equalTo(expectedDocument.get(TYPE.getFieldName()))); + assertThat(metadataMap.get(ID), equalTo(expectedDocument.get(ID.getFieldName()))); + assertThat(ingestDocument.getSourceAndMetadata(), equalTo(expectedDocument.get(Fields.SOURCE))); } assertThat(actualRequest.getPipeline().getId(), equalTo(SimulatePipelineRequest.SIMULATED_PIPELINE_ID)); @@ -136,10 +137,11 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { Iterator> expectedDocsIterator = expectedDocs.iterator(); for (IngestDocument ingestDocument : actualRequest.getDocuments()) { Map expectedDocument = expectedDocsIterator.next(); - assertThat(ingestDocument.getSource(), equalTo(expectedDocument.get(Fields.SOURCE))); - assertThat(ingestDocument.getEsMetadata(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName()))); - assertThat(ingestDocument.getEsMetadata(TYPE), equalTo(expectedDocument.get(TYPE.getFieldName()))); - assertThat(ingestDocument.getEsMetadata(ID), equalTo(expectedDocument.get(ID.getFieldName()))); + Map metadataMap = ingestDocument.extractMetadata(); + assertThat(metadataMap.get(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName()))); + assertThat(metadataMap.get(TYPE), equalTo(expectedDocument.get(TYPE.getFieldName()))); + assertThat(metadataMap.get(ID), equalTo(expectedDocument.get(ID.getFieldName()))); + assertThat(ingestDocument.getSourceAndMetadata(), equalTo(expectedDocument.get(Fields.SOURCE))); } assertThat(actualRequest.getPipeline().getId(), equalTo(SimulatePipelineRequest.SIMULATED_PIPELINE_ID)); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocumentTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocumentTests.java index da0db6c7b18..b153cced84c 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocumentTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocumentTests.java @@ -36,30 +36,32 @@ import static org.hamcrest.Matchers.not; public class WriteableIngestDocumentTests extends ESTestCase { public void testEqualsAndHashcode() throws Exception { - Map esMetadata = new HashMap<>(); + Map sourceAndMetadata = RandomDocumentPicks.randomSource(random()); int numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); for (int i = 0; i < numFields; i++) { - esMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); } Map ingestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); for (int i = 0; i < numFields; i++) { ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); } - Map document = RandomDocumentPicks.randomDocument(random()); - WriteableIngestDocument ingestDocument = new WriteableIngestDocument(new IngestDocument(esMetadata, document, ingestMetadata)); + WriteableIngestDocument ingestDocument = new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, ingestMetadata)); boolean changed = false; - Map otherEsMetadata; + Map otherSourceAndMetadata; if (randomBoolean()) { - otherEsMetadata = new HashMap<>(); - numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); - for (int i = 0; i < numFields; i++) { - otherEsMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); - } + otherSourceAndMetadata = RandomDocumentPicks.randomSource(random()); changed = true; } else { - otherEsMetadata = Collections.unmodifiableMap(esMetadata); + otherSourceAndMetadata = new HashMap<>(sourceAndMetadata); + } + if (randomBoolean()) { + numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); + for (int i = 0; i < numFields; i++) { + otherSourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + } + changed = true; } Map otherIngestMetadata; @@ -74,15 +76,7 @@ public class WriteableIngestDocumentTests extends ESTestCase { otherIngestMetadata = Collections.unmodifiableMap(ingestMetadata); } - Map otherDocument; - if (randomBoolean()) { - otherDocument = RandomDocumentPicks.randomDocument(random()); - changed = true; - } else { - otherDocument = Collections.unmodifiableMap(document); - } - - WriteableIngestDocument otherIngestDocument = new WriteableIngestDocument(new IngestDocument(otherEsMetadata, otherDocument, otherIngestMetadata)); + WriteableIngestDocument otherIngestDocument = new WriteableIngestDocument(new IngestDocument(otherSourceAndMetadata, otherIngestMetadata)); if (changed) { assertThat(ingestDocument, not(equalTo(otherIngestDocument))); assertThat(otherIngestDocument, not(equalTo(ingestDocument))); @@ -90,7 +84,7 @@ public class WriteableIngestDocumentTests extends ESTestCase { assertThat(ingestDocument, equalTo(otherIngestDocument)); assertThat(otherIngestDocument, equalTo(ingestDocument)); assertThat(ingestDocument.hashCode(), equalTo(otherIngestDocument.hashCode())); - WriteableIngestDocument thirdIngestDocument = new WriteableIngestDocument(new IngestDocument(Collections.unmodifiableMap(esMetadata), Collections.unmodifiableMap(document), Collections.unmodifiableMap(ingestMetadata))); + WriteableIngestDocument thirdIngestDocument = new WriteableIngestDocument(new IngestDocument(Collections.unmodifiableMap(sourceAndMetadata), Collections.unmodifiableMap(ingestMetadata))); assertThat(thirdIngestDocument, equalTo(ingestDocument)); assertThat(ingestDocument, equalTo(thirdIngestDocument)); assertThat(ingestDocument.hashCode(), equalTo(thirdIngestDocument.hashCode())); @@ -98,18 +92,18 @@ public class WriteableIngestDocumentTests extends ESTestCase { } public void testSerialization() throws IOException { - Map esMetadata = new HashMap<>(); + Map sourceAndMetadata = RandomDocumentPicks.randomSource(random()); int numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); for (int i = 0; i < numFields; i++) { - esMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); } Map ingestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); for (int i = 0; i < numFields; i++) { ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); } - Map document = RandomDocumentPicks.randomDocument(random()); - WriteableIngestDocument writeableIngestDocument = new WriteableIngestDocument(new IngestDocument(esMetadata, document, ingestMetadata)); + Map document = RandomDocumentPicks.randomSource(random()); + WriteableIngestDocument writeableIngestDocument = new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, ingestMetadata)); BytesStreamOutput out = new BytesStreamOutput(); writeableIngestDocument.writeTo(out); diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml index e0db65cf003..edbd9494088 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml @@ -46,7 +46,6 @@ ] } - length: { docs: 1 } - - is_true: docs.0.doc.modified - match: { docs.0.doc._source.foo: "bar" } - match: { docs.0.doc._source.field2: "_value" } - length: { docs.0.doc._ingest: 1 } @@ -155,7 +154,6 @@ - length: { docs: 1 } - length: { docs.0.processor_results: 2 } - match: { docs.0.processor_results.0.processor_id: "processor[set]-0" } - - is_true: docs.0.processor_results.0.doc.modified - length: { docs.0.processor_results.0.doc._source: 2 } - match: { docs.0.processor_results.0.doc._source.foo: "bar" } - match: { docs.0.processor_results.0.doc._source.field2: "_value" } @@ -209,7 +207,6 @@ } - length: { docs: 2 } - match: { docs.0.error.type: "illegal_argument_exception" } - - is_true: docs.1.doc.modified - match: { docs.1.doc._source.foo: "BAR" } - length: { docs.1.doc._ingest: 1 } - is_true: docs.1.doc._ingest.timestamp From a95f81c015aeabee64e5bda407e626e39de41e00 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 9 Dec 2015 18:42:20 +0100 Subject: [PATCH 116/347] avoid stripping out _source prefix when in _ingest context --- .../java/org/elasticsearch/ingest/IngestDocument.java | 10 +++++----- .../org/elasticsearch/ingest/IngestDocumentTests.java | 6 ++++++ 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 7a695ce416b..ca4d55ee2db 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -467,12 +467,12 @@ public final class IngestDocument { newPath = path.substring(8, path.length()); } else { initialContext = sourceAndMetadata; - newPath = path; + if (path.startsWith("_source.")) { + newPath = path.substring(8, path.length()); + } else { + newPath = path; + } } - if (newPath.startsWith("_source.")) { - newPath = newPath.substring(8, path.length()); - } - this.pathElements = Strings.splitStringToArray(newPath, '.'); if (pathElements.length == 0) { throw new IllegalArgumentException("path [" + path + "] is not valid"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index 7d9bd58dc79..11ac560fb3e 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -366,6 +366,12 @@ public class IngestDocumentTests extends ESTestCase { assertThat(ingestDocument.getSourceAndMetadata().get("_ingest"), equalTo("value")); } + public void testSetIngestSourceObject() { + //test that we don't strip out the _source prefix when _ingest is used + ingestDocument.setFieldValue("_ingest._source", "value"); + assertThat(ingestDocument.getIngestMetadata().get("_source"), equalTo("value")); + } + public void testSetEmptyPathAfterStrippingOutPrefix() { try { ingestDocument.setFieldValue("_source.", "value"); From a8382de09d88ead31fd52a97dda15572f92b1360 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 9 Dec 2015 18:52:43 +0100 Subject: [PATCH 117/347] add comment to clarifiy why metadata fields can be set all the time to IndexRequest in PipelineExecutionService --- .../elasticsearch/plugin/ingest/PipelineExecutionService.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index bdf37dc87f7..18a79104f5a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -63,6 +63,8 @@ public class PipelineExecutionService { try { pipeline.execute(ingestDocument); Map metadataMap = ingestDocument.extractMetadata(); + //it's fine to set all metadata fields all the time, as ingest document holds their starting values + //before ingestion, which might also get modified during ingestion. indexRequest.index(metadataMap.get(IngestDocument.MetaData.INDEX)); indexRequest.type(metadataMap.get(IngestDocument.MetaData.TYPE)); indexRequest.id(metadataMap.get(IngestDocument.MetaData.ID)); From d38cccb8a112d388dac0426bb5ab8406ed90d956 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 11 Dec 2015 14:51:45 +0100 Subject: [PATCH 118/347] Fix issues after merging in master --- plugins/ingest/build.gradle | 3 ++- plugins/ingest/licenses/compiler-0.9.1.jar.sha1 | 1 + plugins/ingest/licenses/compiler-LICENSE.txt | 14 ++++++++++++++ plugins/ingest/licenses/compiler-NOTICE.txt | 1 + .../main/plugin-metadata/plugin-security.policy | 1 + 5 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 plugins/ingest/licenses/compiler-0.9.1.jar.sha1 create mode 100644 plugins/ingest/licenses/compiler-LICENSE.txt create mode 100644 plugins/ingest/licenses/compiler-NOTICE.txt diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index 2d43139a7bb..a66e6bd96e5 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -33,9 +33,10 @@ dependencies { compile('com.fasterxml.jackson.core:jackson-databind:2.5.3') compile('com.maxmind.db:maxmind-db:1.0.0') + compile "com.github.spullara.mustache.java:compiler:0.9.1" compile 'joda-time:joda-time:2.8.2' testCompile 'org.elasticsearch:geolite2-databases:20151029' - testCompile 'org.elasticsearch:securemock:1.1' + testCompile 'org.elasticsearch:securemock:1.2' } sourceSets { diff --git a/plugins/ingest/licenses/compiler-0.9.1.jar.sha1 b/plugins/ingest/licenses/compiler-0.9.1.jar.sha1 new file mode 100644 index 00000000000..d1ef908bb33 --- /dev/null +++ b/plugins/ingest/licenses/compiler-0.9.1.jar.sha1 @@ -0,0 +1 @@ +14aec5344639782ee76441401b773946c65eb2b3 \ No newline at end of file diff --git a/plugins/ingest/licenses/compiler-LICENSE.txt b/plugins/ingest/licenses/compiler-LICENSE.txt new file mode 100644 index 00000000000..ac68303cc11 --- /dev/null +++ b/plugins/ingest/licenses/compiler-LICENSE.txt @@ -0,0 +1,14 @@ +Copyright 2010 RightTime, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/plugins/ingest/licenses/compiler-NOTICE.txt b/plugins/ingest/licenses/compiler-NOTICE.txt new file mode 100644 index 00000000000..8d1c8b69c3f --- /dev/null +++ b/plugins/ingest/licenses/compiler-NOTICE.txt @@ -0,0 +1 @@ + diff --git a/plugins/ingest/src/main/plugin-metadata/plugin-security.policy b/plugins/ingest/src/main/plugin-metadata/plugin-security.policy index 3faba716fd1..32e73816e2f 100644 --- a/plugins/ingest/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/ingest/src/main/plugin-metadata/plugin-security.policy @@ -20,4 +20,5 @@ grant { // needed because geoip2 is using reflection to deserialize data into its own domain classes permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + permission java.lang.RuntimePermission "accessDeclaredMembers"; }; From e87709f593056149928850f7bf3e8340a9ae540c Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 15 Dec 2015 10:18:19 +0100 Subject: [PATCH 119/347] fix ingest runner --- plugins/ingest/src/test/java/IngestRunner.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/plugins/ingest/src/test/java/IngestRunner.java b/plugins/ingest/src/test/java/IngestRunner.java index a860cb9938f..10f7662b4b0 100644 --- a/plugins/ingest/src/test/java/IngestRunner.java +++ b/plugins/ingest/src/test/java/IngestRunner.java @@ -19,6 +19,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; import org.elasticsearch.plugin.ingest.IngestPlugin; @@ -34,6 +35,7 @@ public class IngestRunner { settings.put("http.cors.allow-origin", "*"); settings.put("script.inline", "on"); settings.put("cluster.name", IngestRunner.class.getSimpleName()); + settings.put(DiscoveryService.SETTING_DISCOVERY_SEED, 0L); final CountDownLatch latch = new CountDownLatch(1); final Node node = new MockNode(settings.build(), Version.CURRENT, Collections.singleton(IngestPlugin.class)); From 07951fc731df06d7b1129a5cb3800fe65c9c4dc9 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 16 Dec 2015 10:08:48 +0100 Subject: [PATCH 120/347] added comment why 'accessDeclaredMembers' permission is needed --- .../ingest/src/main/plugin-metadata/plugin-security.policy | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/plugins/ingest/src/main/plugin-metadata/plugin-security.policy b/plugins/ingest/src/main/plugin-metadata/plugin-security.policy index 32e73816e2f..dadb2acd640 100644 --- a/plugins/ingest/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/ingest/src/main/plugin-metadata/plugin-security.policy @@ -20,5 +20,10 @@ grant { // needed because geoip2 is using reflection to deserialize data into its own domain classes permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + // needed because jackson-databind is using Class#getDeclaredConstructors(), Class#getDeclaredMethods() and + // Class#getDeclaredAnnotations() to find all public, private, protected, package protected and + // private constructors, methods or annotations. Just locating all public constructors, methods and annotations + // should be enough, so this permission wouldn't then be needed. Unfortunately this is not what jackson-databind does + // or can be configured to do. permission java.lang.RuntimePermission "accessDeclaredMembers"; }; From 885b01fb49e7c61099785b8be0379abfb9a4f20f Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 18 Dec 2015 10:36:29 +0100 Subject: [PATCH 121/347] adapt to upstream changes: RestModule -> NetworkModule --- .../elasticsearch/plugin/ingest/IngestPlugin.java | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 5405459b28a..a7c177d3386 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -25,6 +25,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugin.ingest.rest.RestDeletePipelineAction; import org.elasticsearch.plugin.ingest.rest.RestGetPipelineAction; @@ -40,9 +41,7 @@ import org.elasticsearch.plugin.ingest.transport.put.PutPipelineTransportAction; import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineAction; import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineTransportAction; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.rest.RestModule; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -108,10 +107,10 @@ public class IngestPlugin extends Plugin { module.registerAction(SimulatePipelineAction.INSTANCE, SimulatePipelineTransportAction.class); } - public void onModule(RestModule restModule) { - restModule.addRestAction(RestPutPipelineAction.class); - restModule.addRestAction(RestGetPipelineAction.class); - restModule.addRestAction(RestDeletePipelineAction.class); - restModule.addRestAction(RestSimulatePipelineAction.class); + public void onModule(NetworkModule networkModule) { + networkModule.registerRestHandler(RestPutPipelineAction.class); + networkModule.registerRestHandler(RestGetPipelineAction.class); + networkModule.registerRestHandler(RestDeletePipelineAction.class); + networkModule.registerRestHandler(RestSimulatePipelineAction.class); } } From 8bae93eee104f8597960e1e5fb3a8438fe5b0559 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 18 Dec 2015 11:13:18 +0100 Subject: [PATCH 122/347] adapt to upstream changes: StringText => Text --- .../plugin/ingest/PipelineStoreTests.java | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java index e67262b3df0..2f1409b4528 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java @@ -27,10 +27,8 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.common.inject.Provider; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.env.Environment; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.search.SearchHit; @@ -51,10 +49,12 @@ import java.util.Objects; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import static org.mockito.Matchers.any; public class PipelineStoreTests extends ESTestCase { @@ -80,7 +80,7 @@ public class PipelineStoreTests extends ESTestCase { public void testUpdatePipeline() throws Exception { List hits = new ArrayList<>(); - hits.add(new InternalSearchHit(0, "1", new StringText("type"), Collections.emptyMap()) + hits.add(new InternalSearchHit(0, "1", new Text("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) ); @@ -93,7 +93,7 @@ public class PipelineStoreTests extends ESTestCase { assertThat(store.get("1").getDescription(), equalTo("_description1")); when(client.get(any())).thenReturn(expectedGetResponse(true)); - hits.add(new InternalSearchHit(0, "2", new StringText("type"), Collections.emptyMap()) + hits.add(new InternalSearchHit(0, "2", new Text("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description2\"}")) ); store.updatePipelines(); @@ -112,7 +112,7 @@ public class PipelineStoreTests extends ESTestCase { public void testPipelineUpdater() throws Exception { List hits = new ArrayList<>(); - hits.add(new InternalSearchHit(0, "1", new StringText("type"), Collections.emptyMap()) + hits.add(new InternalSearchHit(0, "1", new Text("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) ); when(client.search(any())).thenReturn(expectedSearchReponse(hits)); @@ -126,7 +126,7 @@ public class PipelineStoreTests extends ESTestCase { assertThat(store.get("1").getDescription(), equalTo("_description1")); }); - hits.add(new InternalSearchHit(0, "2", new StringText("type"), Collections.emptyMap()) + hits.add(new InternalSearchHit(0, "2", new Text("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description2\"}")) ); assertBusy(() -> { @@ -142,9 +142,9 @@ public class PipelineStoreTests extends ESTestCase { public void testGetReference() throws Exception { // fill the store up for the test: List hits = new ArrayList<>(); - hits.add(new InternalSearchHit(0, "foo", new StringText("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); - hits.add(new InternalSearchHit(0, "bar", new StringText("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); - hits.add(new InternalSearchHit(0, "foobar", new StringText("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); + hits.add(new InternalSearchHit(0, "foo", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); + hits.add(new InternalSearchHit(0, "bar", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); + hits.add(new InternalSearchHit(0, "foobar", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); when(client.search(any())).thenReturn(expectedSearchReponse(hits)); store.updatePipelines(); @@ -187,7 +187,7 @@ public class PipelineStoreTests extends ESTestCase { @Override public SearchResponse get(long timeout, TimeUnit unit) { InternalSearchHits hits1 = new InternalSearchHits(hits.toArray(new InternalSearchHit[0]), hits.size(), 1f); - return new SearchResponse(new InternalSearchResponse(hits1, null, null, false, null), "_scrollId", 1, 1, 1, null); + return new SearchResponse(new InternalSearchResponse(hits1, null, null, null, false, null), "_scrollId", 1, 1, 1, null); } }; } From f349669071079657156541cd846370e39aa38944 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 18 Dec 2015 11:13:34 +0100 Subject: [PATCH 123/347] adapt to upstream changes: enableMockModules => getMockPlugins --- .../test/java/org/elasticsearch/ingest/IngestClientIT.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 9db406a7a9e..4c27d4d6dca 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -45,6 +45,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -255,7 +256,7 @@ public class IngestClientIT extends ESIntegTestCase { } @Override - protected boolean enableMockModules() { - return false; + protected Collection> getMockPlugins() { + return Collections.emptyList(); } } From 3e155f7b54190aba1cebcf3a177286e546b94c2a Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 18 Dec 2015 11:14:39 +0100 Subject: [PATCH 124/347] adapt to upstream changes: thirdPartyAudit.missingClasses set to true geoip depends on asm and google http client which we don't need --- plugins/ingest/build.gradle | 3 +++ 1 file changed, 3 insertions(+) diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index a66e6bd96e5..d9e98835372 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -63,3 +63,6 @@ bundlePlugin { into 'config/geoip' } } + +//geoip depends on asm and google http client which we don't need +thirdPartyAudit.missingClasses = true From 2c5bb84851e8584d5a0d6d8cc3ac704ce99ec058 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 18 Dec 2015 15:45:56 +0100 Subject: [PATCH 125/347] fix copyDefaultGeoIp2DatabaseFiles task to work again --- plugins/ingest/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index d9e98835372..861115f336d 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -48,7 +48,7 @@ sourceSets { } task copyDefaultGeoIp2DatabaseFiles(type: Copy) { - from zipTree(configurations.testCompile.files.find { it.name.contains('geolite2-databases')}) + from { zipTree(configurations.testCompile.files.find { it.name.contains('geolite2-databases')}) } into "${project.buildDir}/geoip" include "*.mmdb" } From a56902567e52dddcc99b2de9df665d2d495fd197 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 18 Dec 2015 15:49:00 +0100 Subject: [PATCH 126/347] don't register rest actions on transport clients --- .../org/elasticsearch/plugin/ingest/IngestPlugin.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index a7c177d3386..2474d64c067 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -108,9 +108,11 @@ public class IngestPlugin extends Plugin { } public void onModule(NetworkModule networkModule) { - networkModule.registerRestHandler(RestPutPipelineAction.class); - networkModule.registerRestHandler(RestGetPipelineAction.class); - networkModule.registerRestHandler(RestDeletePipelineAction.class); - networkModule.registerRestHandler(RestSimulatePipelineAction.class); + if (transportClient == false) { + networkModule.registerRestHandler(RestPutPipelineAction.class); + networkModule.registerRestHandler(RestGetPipelineAction.class); + networkModule.registerRestHandler(RestDeletePipelineAction.class); + networkModule.registerRestHandler(RestSimulatePipelineAction.class); + } } } From e8a8e22e09a345d091b52df283667641a15b702b Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 10 Dec 2015 15:59:41 +0100 Subject: [PATCH 127/347] Add template infrastructure, removed meta processor and added template support to set and remove processor. Added ingest wide template infrastructure to IngestDocument Added a TemplateService interface that the ingest framework uses Added a TemplateService implementation that the ingest plugin provides that delegates to the ES' script service Cut SetProcessor over to use the template infrastructure for the `field` and `value` settings. Removed the MetaDataProcessor Removed dependency on mustache library Added qa ingest mustache rest test so that the ingest and mustache integration can be tested. --- docs/plugins/ingest.asciidoc | 127 ++++++++---- plugins/ingest/build.gradle | 1 - .../ingest/licenses/compiler-0.9.1.jar.sha1 | 1 - plugins/ingest/licenses/compiler-LICENSE.txt | 14 -- plugins/ingest/licenses/compiler-NOTICE.txt | 1 - .../elasticsearch/ingest/IngestDocument.java | 88 ++++---- .../elasticsearch/ingest/TemplateService.java | 38 ++++ .../org/elasticsearch/ingest/ValueSource.java | 189 ++++++++++++++++++ .../processor/meta/MetaDataProcessor.java | 73 ------- .../processor/remove/RemoveProcessor.java | 16 +- .../ingest/processor/set/SetProcessor.java | 23 ++- .../plugin/ingest/IngestModule.java | 30 ++- .../plugin/ingest/IngestPlugin.java | 5 + .../ingest/InternalTemplateService.java | 90 +++++++++ .../plugin/ingest/PipelineStore.java | 46 ++--- .../ingest/ProcessorFactoryProvider.java | 37 ++++ .../ingest/IngestDocumentTests.java | 59 +----- .../ingest/TestTemplateService.java | 58 ++++++ .../ingest/ValueSourceTests.java | 65 ++++++ .../meta/MetaDataProcessorFactoryTests.java | 65 ------ .../meta/MetaDataProcessorTests.java | 34 ---- .../remove/RemoveProcessorFactoryTests.java | 15 +- .../remove/RemoveProcessorTests.java | 8 +- .../set/SetProcessorFactoryTests.java | 18 +- .../processor/set/SetProcessorTests.java | 26 ++- .../ingest/PipelineExecutionServiceTests.java | 15 +- .../plugin/ingest/PipelineStoreTests.java | 4 +- .../rest-api-spec/test/ingest/60_mutate.yaml | 48 +++++ .../test/ingest/70_meta_processor.yaml | 45 ----- .../{80_simulate.yaml => 70_simulate.yaml} | 0 qa/ingest-with-mustache/build.gradle | 31 +++ .../plugin/ingest/AbstractMustacheTests.java | 53 +++++ .../ingest/IngestDocumentMustacheIT.java | 85 ++++++++ .../IngestMustacheRemoveProcessorIT.java | 39 ++++ .../ingest/IngestMustacheSetProcessorIT.java | 71 +++++++ .../plugin/ingest/TemplateServiceIT.java | 55 +++++ .../plugin/ingest/ValueSourceMustacheIT.java | 70 +++++++ .../smoketest/IngestWithMustacheIT.java | 41 ++++ .../10_pipeline_with_mustache_templates.yaml | 171 ++++++++++++++++ settings.gradle | 1 + 40 files changed, 1395 insertions(+), 461 deletions(-) delete mode 100644 plugins/ingest/licenses/compiler-0.9.1.jar.sha1 delete mode 100644 plugins/ingest/licenses/compiler-LICENSE.txt delete mode 100644 plugins/ingest/licenses/compiler-NOTICE.txt create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/TemplateService.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/ValueSource.java delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/ProcessorFactoryProvider.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/TestTemplateService.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorFactoryTests.java delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java delete mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_meta_processor.yaml rename plugins/ingest/src/test/resources/rest-api-spec/test/ingest/{80_simulate.yaml => 70_simulate.yaml} (100%) create mode 100644 qa/ingest-with-mustache/build.gradle create mode 100644 qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java create mode 100644 qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java create mode 100644 qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheRemoveProcessorIT.java create mode 100644 qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java create mode 100644 qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java create mode 100644 qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java create mode 100644 qa/ingest-with-mustache/src/test/java/org/elasticsearch/smoketest/IngestWithMustacheIT.java create mode 100644 qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 4c0cc6a77c3..42585fe0e90 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -403,57 +403,110 @@ An example that adds the parsed date to the `timestamp` field based on the `init } -------------------------------------------------- -==== Meta processor +=== Accessing data in pipelines -The `meta` processor allows to modify metadata properties of a document being processed. +Processors in pipelines have read and write access to documents that pass through the pipeline. +The fields in the source of a document and its metadata fields are accessible. -The following example changes the index of a document to `alternative_index` instead of indexing it into an index -that was specified in the index or bulk request: +Accessing a field in the source is straightforward and one can refer to fields by +their name. For example: [source,js] -------------------------------------------------- { - "description" : "...", - "processors" : [ - { - "meta" : { - "_index" : "alternative_index" - } - } - ] + "set": { + "field": "my_field" + "value": 582.1 + } } -------------------------------------------------- -The following metadata attributes can be modified in this processor: `_index`, `_type`, `_id`, `_routing`, `_parent`, -`_timestamp` and `_ttl`. All these metadata attributes can be specified in the body of the `meta` processor. - -Also the metadata settings in this processor are templatable which allows metadata field values to be replaced with -field values in the source of the document being indexed. The mustache template language is used and anything between -`{{` and `}}` can contain a template and point to any field in the source of the document. - -The following example documents being processed end up being indexed into an index based on the resolved city name by -the `geoip` processor. (for example `city-amsterdam`) +On top of this fields from the source are always accessible via the `_source` prefix: [source,js] -------------------------------------------------- { - "description" : "...", - "processors" : [ - { - "geoip" : { - "source" : "ip" - } - }, - { - "meta" : { - "_index" : "city-{{geoip.city_name}}" - } - } - ] + "set": { + "field": "_source.my_field" + "value": 582.1 + } } -------------------------------------------------- -=== Put pipeline API +Metadata fields can also be accessed in the same way as fields from the source. This +is possible because Elasticsearch doesn't allow fields in the source that have the +same name as metadata fields. + +The following example sets the id of a document to `1`: + +[source,js] +-------------------------------------------------- +{ + "set": { + "field": "_id" + "value": "1" + } +} +-------------------------------------------------- + +The following metadata fields are accessible by a processor: `_index`, `_type`, `_id`, `_routing`, `_parent`, +`_timestamp` and `_ttl`. + +Beyond metadata fields and source fields, the ingest plugin also adds ingest metadata to documents being processed. +These metadata properties are accessible under the `_ingest` key. Currently the ingest plugin adds the ingest timestamp +under `_ingest.timestamp` key to the ingest metadata, which is the time the ingest plugin received the index or bulk +request to pre-process. But any processor is free to add more ingest related metadata to it. Ingest metadata is transient +and is lost after a document has been processed by the pipeline and thus ingest metadata won't be indexed. + +The following example adds a field with the name `received` and the value is the ingest timestamp: + +[source,js] +-------------------------------------------------- +{ + "set": { + "field": "received" + "value": "{{_ingest.timestamp}}" + } +} +-------------------------------------------------- + +As opposed to Elasticsearch metadata fields, the ingest metadata field name _ingest can be used as a valid field name +in the source of a document. Use _source._ingest to refer to it, otherwise _ingest will be interpreted as ingest +metadata fields by the ingest plugin. + +A number of processor settings also support templating. Settings that support templating can have zero or more +template snippets. A template snippet begins with `{{` and ends with `}}`. +Accessing fields and metafields in templates is exactly the same as via regular processor field settings. + +In this example a field by the name `field_c` is added and its value is a concatenation of +the values of `field_a` and `field_b`. + +[source,js] +-------------------------------------------------- +{ + "set": { + "field": "field_c" + "value": "{{field_a}} {{field_b}}" + } +} +-------------------------------------------------- + +The following example changes the index a document is going to be indexed into. The index a document will be redirected +to depends on the field in the source with name `geoip.country_iso_code`. + +[source,js] +-------------------------------------------------- +{ + "set": { + "field": "_index" + "value": "{{geoip.country_iso_code}}" + } +} +-------------------------------------------------- + +=== Ingest APIs + +==== Put pipeline API The put pipeline api adds pipelines and updates existing pipelines in the cluster. @@ -477,7 +530,7 @@ PUT _ingest/pipeline/my-pipeline-id NOTE: Each ingest node updates its processors asynchronously in the background, so it may take a few seconds for all nodes to have the latest version of the pipeline. -=== Get pipeline API +==== Get pipeline API The get pipeline api returns pipelines based on id. This api always returns a local reference of the pipeline. @@ -513,7 +566,7 @@ For each returned pipeline the source and the version is returned. The version is useful for knowing what version of the pipeline the node has. Multiple ids can be provided at the same time. Also wildcards are supported. -=== Delete pipeline API +==== Delete pipeline API The delete pipeline api deletes pipelines by id. diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index 861115f336d..383ad6c88fa 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -33,7 +33,6 @@ dependencies { compile('com.fasterxml.jackson.core:jackson-databind:2.5.3') compile('com.maxmind.db:maxmind-db:1.0.0') - compile "com.github.spullara.mustache.java:compiler:0.9.1" compile 'joda-time:joda-time:2.8.2' testCompile 'org.elasticsearch:geolite2-databases:20151029' testCompile 'org.elasticsearch:securemock:1.2' diff --git a/plugins/ingest/licenses/compiler-0.9.1.jar.sha1 b/plugins/ingest/licenses/compiler-0.9.1.jar.sha1 deleted file mode 100644 index d1ef908bb33..00000000000 --- a/plugins/ingest/licenses/compiler-0.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -14aec5344639782ee76441401b773946c65eb2b3 \ No newline at end of file diff --git a/plugins/ingest/licenses/compiler-LICENSE.txt b/plugins/ingest/licenses/compiler-LICENSE.txt deleted file mode 100644 index ac68303cc11..00000000000 --- a/plugins/ingest/licenses/compiler-LICENSE.txt +++ /dev/null @@ -1,14 +0,0 @@ -Copyright 2010 RightTime, Inc. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - diff --git a/plugins/ingest/licenses/compiler-NOTICE.txt b/plugins/ingest/licenses/compiler-NOTICE.txt deleted file mode 100644 index 8d1c8b69c3f..00000000000 --- a/plugins/ingest/licenses/compiler-NOTICE.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index ca4d55ee2db..a14a2647fd4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -30,6 +30,9 @@ import java.util.*; */ public final class IngestDocument { + public final static String INGEST_KEY = "_ingest"; + public final static String SOURCE_KEY = "_source"; + static final String TIMESTAMP = "timestamp"; private final Map sourceAndMetadata; @@ -149,6 +152,16 @@ public final class IngestDocument { return false; } + /** + * Removes the field identified by the provided path. + * @param fieldPathTemplate Resolves to the path with dot-notation within the document + * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. + */ + public void removeField(TemplateService.Template fieldPathTemplate) { + Map model = createTemplateModel(); + removeField(fieldPathTemplate.execute(model)); + } + /** * Removes the field identified by the provided path. * @param path the path of the field to be removed @@ -246,12 +259,22 @@ public final class IngestDocument { setFieldValue(path, value, false); } + /** + * Sets the provided value to the provided path in the document. + * Any non existing path element will be created. If the last element is a list, + * the value will replace the existing list. + * @param fieldPathTemplate Resolves to the path with dot-notation within the document + * @param valueSource The value source that will produce the value to put in for the path key + * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. + */ + public void setFieldValue(TemplateService.Template fieldPathTemplate, ValueSource valueSource) { + Map model = createTemplateModel(); + setFieldValue(fieldPathTemplate.execute(model), valueSource.copyAndResolve(model), false); + } + private void setFieldValue(String path, Object value, boolean append) { FieldPath fieldPath = new FieldPath(path); Object context = fieldPath.initialContext; - - value = deepCopy(value); - for (int i = 0; i < fieldPath.pathElements.length - 1; i++) { String pathElement = fieldPath.pathElements[i]; if (context == null) { @@ -332,6 +355,15 @@ public final class IngestDocument { throw new IllegalArgumentException("field [" + path + "] of type [" + object.getClass().getName() + "] cannot be cast to [" + clazz.getName() + "]"); } + private Map createTemplateModel() { + Map model = new HashMap<>(sourceAndMetadata); + model.put(SOURCE_KEY, sourceAndMetadata); + // If there is a field in the source with the name '_ingest' it gets overwritten here, + // if access to that field is required then it get accessed via '_source._ingest' + model.put(INGEST_KEY, ingestMetadata); + return model; + } + /** * one time operation that extracts the metadata fields from the ingest document and returns them. * Metadata fields that used to be accessible as ordinary top level fields will be removed as part of this call. @@ -361,32 +393,6 @@ public final class IngestDocument { return this.sourceAndMetadata; } - static Object deepCopy(Object value) { - if (value instanceof Map) { - @SuppressWarnings("unchecked") - Map mapValue = (Map) value; - Map copy = new HashMap<>(mapValue.size()); - for (Map.Entry entry : mapValue.entrySet()) { - copy.put(entry.getKey(), deepCopy(entry.getValue())); - } - return copy; - } else if (value instanceof List) { - @SuppressWarnings("unchecked") - List listValue = (List) value; - List copy = new ArrayList<>(listValue.size()); - for (Object itemValue : listValue) { - copy.add(deepCopy(itemValue)); - } - return copy; - } else if (value == null || value instanceof String || value instanceof Integer || - value instanceof Long || value instanceof Float || - value instanceof Double || value instanceof Boolean) { - return value; - } else { - throw new IllegalArgumentException("unexpected value type [" + value.getClass() + "]"); - } - } - @Override public boolean equals(Object obj) { if (obj == this) { return true; } @@ -431,26 +437,6 @@ public final class IngestDocument { return fieldName; } - public static MetaData fromString(String value) { - switch (value) { - case "_index": - return INDEX; - case "_type": - return TYPE; - case "_id": - return ID; - case "_routing": - return ROUTING; - case "_parent": - return PARENT; - case "_timestamp": - return TIMESTAMP; - case "_ttl": - return TTL; - default: - throw new IllegalArgumentException("no valid metadata field name [" + value + "]"); - } - } } private class FieldPath { @@ -462,12 +448,12 @@ public final class IngestDocument { throw new IllegalArgumentException("path cannot be null nor empty"); } String newPath; - if (path.startsWith("_ingest.")) { + if (path.startsWith(INGEST_KEY + ".")) { initialContext = ingestMetadata; newPath = path.substring(8, path.length()); } else { initialContext = sourceAndMetadata; - if (path.startsWith("_source.")) { + if (path.startsWith(SOURCE_KEY + ".")) { newPath = path.substring(8, path.length()); } else { newPath = path; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/TemplateService.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/TemplateService.java new file mode 100644 index 00000000000..c0505365e0a --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/TemplateService.java @@ -0,0 +1,38 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.ingest; + +import java.util.Map; + +/** + * Abstraction for the template engine. + */ +public interface TemplateService { + + Template compile(String template); + + interface Template { + + String execute(Map model); + + String getKey(); + + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/ValueSource.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/ValueSource.java new file mode 100644 index 00000000000..525bb722d60 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/ValueSource.java @@ -0,0 +1,189 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import java.util.*; + +/** + * Holds a value. If the value is requested a copy is made and optionally template snippets are resolved too. + */ +public interface ValueSource { + + /** + * Returns a copy of the value this ValueSource holds and resolves templates if there're any. + * + * For immutable values only a copy of the reference to the value is made. + * + * @param model The model to be used when resolving any templates + * @return copy of the wrapped value + */ + Object copyAndResolve(Map model); + + static ValueSource wrap(Object value, TemplateService templateService) { + if (value instanceof Map) { + @SuppressWarnings("unchecked") + Map mapValue = (Map) value; + Map valueTypeMap = new HashMap<>(mapValue.size()); + for (Map.Entry entry : mapValue.entrySet()) { + valueTypeMap.put(wrap(entry.getKey(), templateService), wrap(entry.getValue(), templateService)); + } + return new MapValue(valueTypeMap); + } else if (value instanceof List) { + @SuppressWarnings("unchecked") + List listValue = (List) value; + List valueSourceList = new ArrayList<>(listValue.size()); + for (Object item : listValue) { + valueSourceList.add(wrap(item, templateService)); + } + return new ListValue(valueSourceList); + } else if (value == null || value instanceof Integer || + value instanceof Long || value instanceof Float || + value instanceof Double || value instanceof Boolean) { + return new ObjectValue(value); + } else if (value instanceof String) { + return new TemplatedValue(templateService.compile((String) value)); + } else { + throw new IllegalArgumentException("unexpected value type [" + value.getClass() + "]"); + } + } + + final class MapValue implements ValueSource { + + private final Map map; + + MapValue(Map map) { + this.map = map; + } + + @Override + public Object copyAndResolve(Map model) { + Map copy = new HashMap<>(); + for (Map.Entry entry : this.map.entrySet()) { + copy.put(entry.getKey().copyAndResolve(model), entry.getValue().copyAndResolve(model)); + } + return copy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + MapValue mapValue = (MapValue) o; + return map.equals(mapValue.map); + + } + + @Override + public int hashCode() { + return map.hashCode(); + } + } + + final class ListValue implements ValueSource { + + private final List values; + + ListValue(List values) { + this.values = values; + } + + @Override + public Object copyAndResolve(Map model) { + List copy = new ArrayList<>(values.size()); + for (ValueSource value : values) { + copy.add(value.copyAndResolve(model)); + } + return copy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + ListValue listValue = (ListValue) o; + return values.equals(listValue.values); + + } + + @Override + public int hashCode() { + return values.hashCode(); + } + } + + final class ObjectValue implements ValueSource { + + private final Object value; + + ObjectValue(Object value) { + this.value = value; + } + + @Override + public Object copyAndResolve(Map model) { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + ObjectValue objectValue = (ObjectValue) o; + return Objects.equals(value, objectValue.value); + } + + @Override + public int hashCode() { + return Objects.hashCode(value); + } + } + + final class TemplatedValue implements ValueSource { + + private final TemplateService.Template template; + + TemplatedValue(TemplateService.Template template) { + this.template = template; + } + + @Override + public Object copyAndResolve(Map model) { + return template.execute(model); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + TemplatedValue templatedValue = (TemplatedValue) o; + return Objects.equals(template.getKey(), templatedValue.template.getKey()); + } + + @Override + public int hashCode() { + return Objects.hashCode(template.getKey()); + } + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java deleted file mode 100644 index 4de13f5b107..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessor.java +++ /dev/null @@ -1,73 +0,0 @@ -package org.elasticsearch.ingest.processor.meta; - -import com.github.mustachejava.DefaultMustacheFactory; -import com.github.mustachejava.Mustache; -import com.github.mustachejava.MustacheFactory; -import org.elasticsearch.common.io.FastStringReader; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.IngestDocument.MetaData; -import org.elasticsearch.ingest.processor.Processor; - -import java.io.StringWriter; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; - - -//TODO this processor needs to be removed, as the set processor allows now to set any field, including metadata ones. -//The only reason for it to be still here is that it supports templating, we will remove once any processor supports templating. -public final class MetaDataProcessor implements Processor { - - public final static String TYPE = "meta"; - - private final Map templates; - - public MetaDataProcessor(Map templates) { - this.templates = templates; - } - - @Override - public void execute(IngestDocument ingestDocument) { - Map model = ingestDocument.getSourceAndMetadata(); - for (Map.Entry entry : templates.entrySet()) { - StringWriter writer = new StringWriter(); - entry.getValue().execute(writer, model); - ingestDocument.setFieldValue(entry.getKey().getFieldName(), writer.toString()); - } - } - - @Override - public String getType() { - return TYPE; - } - - Map getTemplates() { - return templates; - } - - public final static class Factory implements Processor.Factory { - - private final MustacheFactory mustacheFactory = new DefaultMustacheFactory(); - - @Override - public MetaDataProcessor create(Map config) throws Exception { - Map templates = new HashMap<>(); - Iterator> iterator = config.entrySet().iterator(); - while (iterator.hasNext()) { - Map.Entry entry = iterator.next(); - MetaData metaData = MetaData.fromString(entry.getKey()); - Mustache mustache = mustacheFactory.compile(new FastStringReader(entry.getValue().toString()), ""); - templates.put(metaData, mustache); - iterator.remove(); - } - - if (templates.isEmpty()) { - throw new IllegalArgumentException("no meta fields specified"); - } - - return new MetaDataProcessor(Collections.unmodifiableMap(templates)); - } - } - -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java index 80cd017ef78..9bdde91f38c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest.processor.remove; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; @@ -32,13 +33,13 @@ public class RemoveProcessor implements Processor { public static final String TYPE = "remove"; - private final String field; + private final TemplateService.Template field; - RemoveProcessor(String field) { + RemoveProcessor(TemplateService.Template field) { this.field = field; } - String getField() { + public TemplateService.Template getField() { return field; } @@ -53,10 +54,17 @@ public class RemoveProcessor implements Processor { } public static class Factory implements Processor.Factory { + + private final TemplateService templateService; + + public Factory(TemplateService templateService) { + this.templateService = templateService; + } + @Override public RemoveProcessor create(Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(config, "field"); - return new RemoveProcessor(field); + return new RemoveProcessor(templateService.compile(field)); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java index f14be2a3217..c872c479f2b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java @@ -20,11 +20,11 @@ package org.elasticsearch.ingest.processor.set; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.ingest.ValueSource; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.util.Arrays; -import java.util.Collections; import java.util.Map; /** @@ -35,19 +35,19 @@ public class SetProcessor implements Processor { public static final String TYPE = "set"; - private final String field; - private final Object value; + private final TemplateService.Template field; + private final ValueSource value; - SetProcessor(String field, Object value) { + SetProcessor(TemplateService.Template field, ValueSource value) { this.field = field; this.value = value; } - String getField() { + public TemplateService.Template getField() { return field; } - Object getValue() { + public ValueSource getValue() { return value; } @@ -62,11 +62,18 @@ public class SetProcessor implements Processor { } public static final class Factory implements Processor.Factory { + + private final TemplateService templateService; + + public Factory(TemplateService templateService) { + this.templateService = templateService; + } + @Override public SetProcessor create(Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(config, "field"); Object value = ConfigurationUtils.readObject(config, "value"); - return new SetProcessor(field, value); + return new SetProcessor(templateService.compile(field), ValueSource.wrap(value, templateService)); } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index 5c6961b8670..a50bed33ea0 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -34,15 +34,12 @@ import org.elasticsearch.ingest.processor.rename.RenameProcessor; import org.elasticsearch.ingest.processor.split.SplitProcessor; import org.elasticsearch.ingest.processor.trim.TrimProcessor; import org.elasticsearch.ingest.processor.uppercase.UppercaseProcessor; -import org.elasticsearch.ingest.processor.meta.MetaDataProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; import org.elasticsearch.plugin.ingest.transport.simulate.SimulateExecutionService; import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.plugin.ingest.PipelineStore.ProcessorFactoryProvider; - public class IngestModule extends AbstractModule { private final Map processorFactoryProviders = new HashMap<>(); @@ -54,20 +51,19 @@ public class IngestModule extends AbstractModule { binder().bind(PipelineStore.class).asEagerSingleton(); binder().bind(SimulateExecutionService.class).asEagerSingleton(); - addProcessor(GeoIpProcessor.TYPE, environment -> new GeoIpProcessor.Factory(environment.configFile())); - addProcessor(GrokProcessor.TYPE, environment -> new GrokProcessor.Factory(environment.configFile())); - addProcessor(DateProcessor.TYPE, environment -> new DateProcessor.Factory()); - addProcessor(SetProcessor.TYPE, environment -> new SetProcessor.Factory()); - addProcessor(RenameProcessor.TYPE, environment -> new RenameProcessor.Factory()); - addProcessor(RemoveProcessor.TYPE, environment -> new RemoveProcessor.Factory()); - addProcessor(SplitProcessor.TYPE, environment -> new SplitProcessor.Factory()); - addProcessor(JoinProcessor.TYPE, environment -> new JoinProcessor.Factory()); - addProcessor(UppercaseProcessor.TYPE, environment -> new UppercaseProcessor.Factory()); - addProcessor(LowercaseProcessor.TYPE, environment -> new LowercaseProcessor.Factory()); - addProcessor(TrimProcessor.TYPE, environment -> new TrimProcessor.Factory()); - addProcessor(ConvertProcessor.TYPE, environment -> new ConvertProcessor.Factory()); - addProcessor(GsubProcessor.TYPE, environment -> new GsubProcessor.Factory()); - addProcessor(MetaDataProcessor.TYPE, environment -> new MetaDataProcessor.Factory()); + addProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); + addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); + addProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); + addProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); + addProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); + addProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); + addProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); + addProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); + addProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); + addProcessor(LowercaseProcessor.TYPE, (environment, mustacheFactory) -> new LowercaseProcessor.Factory()); + addProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); + addProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); + addProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, ProcessorFactoryProvider.class); for (Map.Entry entry : processorFactoryProviders.entrySet()) { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 2474d64c067..1bd68efd87c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -41,6 +41,7 @@ import org.elasticsearch.plugin.ingest.transport.put.PutPipelineTransportAction; import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineAction; import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineTransportAction; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.ScriptModule; import java.util.Collection; import java.util.Collections; @@ -115,4 +116,8 @@ public class IngestPlugin extends Plugin { networkModule.registerRestHandler(RestSimulatePipelineAction.class); } } + + public void onModule(ScriptModule module) { + module.registerScriptContext(InternalTemplateService.INGEST_SCRIPT_CONTEXT); + } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java new file mode 100644 index 00000000000..cec07f40459 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java @@ -0,0 +1,90 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.script.*; + +import java.util.Collections; +import java.util.Map; + +class InternalTemplateService implements TemplateService { + + public static final ScriptContext.Plugin INGEST_SCRIPT_CONTEXT = new ScriptContext.Plugin("elasticsearch-ingest", "ingest"); + + private final ScriptService scriptService; + + InternalTemplateService(ScriptService scriptService) { + this.scriptService = scriptService; + } + + @Override + public Template compile(String template) { + int mustacheStart = template.indexOf("{{"); + int mustacheEnd = template.indexOf("}}"); + if (mustacheStart != -1 && mustacheEnd != -1 && mustacheStart < mustacheEnd) { + Script script = new Script(template, ScriptService.ScriptType.INLINE, "mustache", Collections.emptyMap()); + CompiledScript compiledScript = scriptService.compile( + script, + INGEST_SCRIPT_CONTEXT, + null /* we can supply null here, because ingest doesn't use indexed scripts */, + Collections.emptyMap() + ); + return new Template() { + @Override + public String execute(Map model) { + ExecutableScript executableScript = scriptService.executable(compiledScript, model); + Object result = executableScript.run(); + if (result instanceof BytesReference) { + return ((BytesReference) result).toUtf8(); + } + return String.valueOf(result); + } + + @Override + public String getKey() { + return template; + } + }; + } else { + return new StringTemplate(template); + } + } + + class StringTemplate implements Template { + + private final String value; + + public StringTemplate(String value) { + this.value = value; + } + + @Override + public String execute(Map model) { + return value; + } + + @Override + public String getKey() { + return value; + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index b3e30b51ff9..9e36cf17df2 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -35,10 +35,7 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.common.SearchScrollIterator; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Provider; import org.elasticsearch.common.regex.Regex; @@ -48,9 +45,11 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequest; import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequest; +import org.elasticsearch.script.*; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; @@ -66,35 +65,46 @@ public class PipelineStore extends AbstractLifecycleComponent { public final static String TYPE = "pipeline"; private final ThreadPool threadPool; + private final Environment environment; private final TimeValue scrollTimeout; private final ClusterService clusterService; private final Provider clientProvider; private final TimeValue pipelineUpdateInterval; + private final Provider scriptServiceProvider; private final Pipeline.Factory factory = new Pipeline.Factory(); - private final Map processorFactoryRegistry; + private volatile Map processorFactoryRegistry; + private final Map processorFactoryProviders; private volatile Client client; private volatile Map pipelines = new HashMap<>(); @Inject - public PipelineStore(Settings settings, Provider clientProvider, ThreadPool threadPool, Environment environment, ClusterService clusterService, Map processorFactoryProviders) { + public PipelineStore(Settings settings, Provider clientProvider, ThreadPool threadPool, + Environment environment, ClusterService clusterService, Provider scriptServiceProvider, + Map processorFactoryProviders) { super(settings); this.threadPool = threadPool; + this.environment = environment; this.clusterService = clusterService; this.clientProvider = clientProvider; + this.scriptServiceProvider = scriptServiceProvider; this.scrollTimeout = settings.getAsTime("ingest.pipeline.store.scroll.timeout", TimeValue.timeValueSeconds(30)); this.pipelineUpdateInterval = settings.getAsTime("ingest.pipeline.store.update.interval", TimeValue.timeValueSeconds(1)); - Map processorFactories = new HashMap<>(); - for (Map.Entry entry : processorFactoryProviders.entrySet()) { - Processor.Factory processorFactory = entry.getValue().get(environment); - processorFactories.put(entry.getKey(), processorFactory); - } - this.processorFactoryRegistry = Collections.unmodifiableMap(processorFactories); + this.processorFactoryProviders = processorFactoryProviders; + clusterService.add(new PipelineStoreListener()); } @Override protected void doStart() { + // TODO this will be better when #15203 gets in: + Map processorFactories = new HashMap<>(); + TemplateService templateService = new InternalTemplateService(scriptServiceProvider.get()); + for (Map.Entry entry : processorFactoryProviders.entrySet()) { + Processor.Factory processorFactory = entry.getValue().get(environment, templateService); + processorFactories.put(entry.getKey(), processorFactory); + } + this.processorFactoryRegistry = Collections.unmodifiableMap(processorFactories); } @Override @@ -249,7 +259,6 @@ public class PipelineStore extends AbstractLifecycleComponent { return SearchScrollIterator.createIterator(client(), scrollTimeout, searchRequest); } - private Client client() { if (client == null) { client = clientProvider.get(); @@ -257,19 +266,6 @@ public class PipelineStore extends AbstractLifecycleComponent { return client; } - /** - * The ingest framework (pipeline, processor and processor factory) can't rely on ES specific code. However some - * processors rely on reading files from the config directory. We can't add Environment as a constructor parameter, - * so we need some code that provides the physical location of the configuration directory to the processor factories - * that need this and this is what this processor factory provider does. - */ - @FunctionalInterface - interface ProcessorFactoryProvider { - - Processor.Factory get(Environment environment); - - } - class Updater implements Runnable { @Override diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/ProcessorFactoryProvider.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/ProcessorFactoryProvider.java new file mode 100644 index 00000000000..e99261e6408 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/ProcessorFactoryProvider.java @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.env.Environment; +import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.ingest.processor.Processor; + +/** + * The ingest framework (pipeline, processor and processor factory) can't rely on ES specific code. However some + * processors rely on reading files from the config directory. We can't add Environment as a constructor parameter, + * so we need some code that provides the physical location of the configuration directory to the processor factories + * that need this and this is what this processor factory provider does. + */ +@FunctionalInterface +interface ProcessorFactoryProvider { + + Processor.Factory get(Environment environment, TemplateService templateService); + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index 11ac560fb3e..011d47c67ed 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -22,8 +22,6 @@ package org.elasticsearch.ingest; import org.elasticsearch.test.ESTestCase; import org.junit.Before; -import java.text.DateFormat; -import java.text.SimpleDateFormat; import java.util.*; import static org.hamcrest.Matchers.*; @@ -381,7 +379,7 @@ public class IngestDocumentTests extends ESTestCase { } try { - ingestDocument.setFieldValue("_ingest.", Object.class); + ingestDocument.setFieldValue("_ingest.", "_value"); fail("set field value should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("path [_ingest.] is not valid")); @@ -605,7 +603,7 @@ public class IngestDocumentTests extends ESTestCase { public void testRemoveNullField() { try { - ingestDocument.removeField(null); + ingestDocument.removeField((String) null); fail("remove field should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("path cannot be null nor empty")); @@ -677,57 +675,4 @@ public class IngestDocumentTests extends ESTestCase { } } - public void testDeepCopy() { - int iterations = scaledRandomIntBetween(8, 64); - for (int i = 0; i < iterations; i++) { - Map map = RandomDocumentPicks.randomSource(random()); - Object copy = IngestDocument.deepCopy(map); - assertThat("iteration: " + i, copy, equalTo(map)); - assertThat("iteration: " + i, copy, not(sameInstance(map))); - } - } - - public void testDeepCopyDoesNotChangeProvidedMap() { - Map myPreciousMap = new HashMap<>(); - myPreciousMap.put("field2", "value2"); - - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, null, new HashMap<>()); - ingestDocument.setFieldValue("field1", myPreciousMap); - ingestDocument.removeField("field1.field2"); - - assertThat(myPreciousMap.size(), equalTo(1)); - assertThat(myPreciousMap.get("field2"), equalTo("value2")); - } - - public void testDeepCopyDoesNotChangeProvidedList() { - List myPreciousList = new ArrayList<>(); - myPreciousList.add("value"); - - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, null, new HashMap<>()); - ingestDocument.setFieldValue("field1", myPreciousList); - ingestDocument.removeField("field1.0"); - - assertThat(myPreciousList.size(), equalTo(1)); - assertThat(myPreciousList.get(0), equalTo("value")); - } - - public void testIngestMetadataTimestamp() throws Exception { - long before = System.currentTimeMillis(); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - long after = System.currentTimeMillis(); - String timestampString = ingestDocument.getIngestMetadata().get("timestamp"); - assertThat(timestampString, notNullValue()); - assertThat(timestampString, endsWith("+0000")); - DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", Locale.ROOT); - Date timestamp = df.parse(timestampString); - assertThat(timestamp.getTime(), greaterThanOrEqualTo(before)); - assertThat(timestamp.getTime(), lessThanOrEqualTo(after)); - } - - public void testCopyConstructor() { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - IngestDocument copy = new IngestDocument(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata(), not(sameInstance(copy.getSourceAndMetadata()))); - assertThat(ingestDocument.getSourceAndMetadata(), equalTo(copy.getSourceAndMetadata())); - } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/TestTemplateService.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/TestTemplateService.java new file mode 100644 index 00000000000..5ef2c8e4bdd --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/TestTemplateService.java @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import java.util.Map; + +public class TestTemplateService implements TemplateService { + + public static TemplateService instance() { + return new TestTemplateService(); + } + + private TestTemplateService() { + } + + @Override + public Template compile(String template) { + return new MockTemplate(template); + } + + public static class MockTemplate implements TemplateService.Template { + + private final String expected; + + public MockTemplate(String expected) { + this.expected = expected; + } + + @Override + public String execute(Map model) { + return expected; + } + + @Override + public String getKey() { + return expected; + } + + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java new file mode 100644 index 00000000000..1c3f7dc3120 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.test.ESTestCase; + +import java.util.*; + +import static org.hamcrest.Matchers.*; + +public class ValueSourceTests extends ESTestCase { + + public void testDeepCopy() { + int iterations = scaledRandomIntBetween(8, 64); + for (int i = 0; i < iterations; i++) { + Map map = RandomDocumentPicks.randomSource(random()); + ValueSource valueSource = ValueSource.wrap(map, TestTemplateService.instance()); + Object copy = valueSource.copyAndResolve(Collections.emptyMap()); + assertThat("iteration: " + i, copy, equalTo(map)); + assertThat("iteration: " + i, copy, not(sameInstance(map))); + } + } + + public void testCopyDoesNotChangeProvidedMap() { + Map myPreciousMap = new HashMap<>(); + myPreciousMap.put("field2", "value2"); + + IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); + ingestDocument.setFieldValue(TestTemplateService.instance().compile("field1"), ValueSource.wrap(myPreciousMap, TestTemplateService.instance())); + ingestDocument.removeField("field1.field2"); + + assertThat(myPreciousMap.size(), equalTo(1)); + assertThat(myPreciousMap.get("field2"), equalTo("value2")); + } + + public void testCopyDoesNotChangeProvidedList() { + List myPreciousList = new ArrayList<>(); + myPreciousList.add("value"); + + IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); + ingestDocument.setFieldValue(TestTemplateService.instance().compile("field1"), ValueSource.wrap(myPreciousList, TestTemplateService.instance())); + ingestDocument.removeField("field1.0"); + + assertThat(myPreciousList.size(), equalTo(1)); + assertThat(myPreciousList.get(0), equalTo("value")); + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorFactoryTests.java deleted file mode 100644 index ee4cb0228a8..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorFactoryTests.java +++ /dev/null @@ -1,65 +0,0 @@ -package org.elasticsearch.ingest.processor.meta; - -import com.github.mustachejava.DefaultMustacheFactory; -import com.github.mustachejava.Mustache; -import com.github.mustachejava.MustacheException; -import org.elasticsearch.common.io.FastStringReader; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matchers; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import static org.elasticsearch.ingest.IngestDocument.MetaData; - -public class MetaDataProcessorFactoryTests extends ESTestCase { - - public void testCreate() throws Exception { - MetaDataProcessor.Factory factory = new MetaDataProcessor.Factory(); - Map config = new HashMap<>(); - for (MetaData metaData : MetaData.values()) { - config.put(metaData.getFieldName(), randomBoolean() ? "static text" : "{{expression}}"); - } - MetaDataProcessor processor = factory.create(config); - assertThat(processor.getTemplates().size(), Matchers.equalTo(7)); - assertThat(processor.getTemplates().get(MetaData.INDEX), Matchers.notNullValue()); - assertThat(processor.getTemplates().get(MetaData.TIMESTAMP), Matchers.notNullValue()); - assertThat(processor.getTemplates().get(MetaData.ID), Matchers.notNullValue()); - assertThat(processor.getTemplates().get(MetaData.ROUTING), Matchers.notNullValue()); - assertThat(processor.getTemplates().get(MetaData.PARENT), Matchers.notNullValue()); - assertThat(processor.getTemplates().get(MetaData.TIMESTAMP), Matchers.notNullValue()); - assertThat(processor.getTemplates().get(MetaData.TTL), Matchers.notNullValue()); - } - - public void testCreateIllegalMetaData() throws Exception { - MetaDataProcessor.Factory factory = new MetaDataProcessor.Factory(); - try { - factory.create(Collections.singletonMap("_field", "text {{expression}}")); - fail("exception should have been thrown"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), Matchers.equalTo("no valid metadata field name [_field]")); - } - } - - public void testCreateIllegalEmpty() throws Exception { - MetaDataProcessor.Factory factory = new MetaDataProcessor.Factory(); - try { - factory.create(Collections.emptyMap()); - fail("exception should have been thrown"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), Matchers.equalTo("no meta fields specified")); - } - } - - public void testIlegalMustacheExpression() throws Exception { - try { - new MetaDataProcessor.Factory().create(Collections.singletonMap("_index", "text {{var")); - fail("exception expected"); - } catch (MustacheException e) { - assertThat(e.getMessage(), Matchers.equalTo("Improperly closed variable in :1")); - } - } - -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java deleted file mode 100644 index c102849fdc4..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/meta/MetaDataProcessorTests.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.elasticsearch.ingest.processor.meta; - -import com.github.mustachejava.DefaultMustacheFactory; -import com.github.mustachejava.Mustache; -import org.elasticsearch.common.io.FastStringReader; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matchers; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import static org.elasticsearch.ingest.IngestDocument.*; - -public class MetaDataProcessorTests extends ESTestCase { - - public void testExecute() throws Exception { - Map templates = new HashMap<>(); - for (MetaData metaData : MetaData.values()) { - templates.put(metaData, new DefaultMustacheFactory().compile(new FastStringReader("some {{field}}"), "noname")); - } - - MetaDataProcessor processor = new MetaDataProcessor(templates); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", "value")); - processor.execute(ingestDocument); - - Map metadataMap = ingestDocument.extractMetadata(); - for (MetaData metaData : MetaData.values()) { - assertThat(metadataMap.get(metaData), Matchers.equalTo("some value")); - } - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java index f45f3bc59d0..3a370223813 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java @@ -19,27 +19,33 @@ package org.elasticsearch.ingest.processor.remove; +import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; public class RemoveProcessorFactoryTests extends ESTestCase { + private RemoveProcessor.Factory factory; + + @Before + public void init() { + factory = new RemoveProcessor.Factory(TestTemplateService.instance()); + } + public void testCreate() throws Exception { - RemoveProcessor.Factory factory = new RemoveProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "field1"); RemoveProcessor removeProcessor = factory.create(config); - assertThat(removeProcessor.getField(), equalTo("field1")); + assertThat(removeProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); } public void testCreateMissingField() throws Exception { - RemoveProcessor.Factory factory = new RemoveProcessor.Factory(); Map config = new HashMap<>(); try { factory.create(config); @@ -48,4 +54,5 @@ public class RemoveProcessorFactoryTests extends ESTestCase { assertThat(e.getMessage(), equalTo("required property [field] is missing")); } } + } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java index 2ccfd5add93..9800c48702a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java @@ -21,13 +21,11 @@ package org.elasticsearch.ingest.processor.remove; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; -import java.util.Set; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -37,7 +35,7 @@ public class RemoveProcessorTests extends ESTestCase { public void testRemoveFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String field = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); - Processor processor = new RemoveProcessor(field); + Processor processor = new RemoveProcessor(new TestTemplateService.MockTemplate(field)); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(field), equalTo(false)); } @@ -45,7 +43,7 @@ public class RemoveProcessorTests extends ESTestCase { public void testRemoveNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new RemoveProcessor(fieldName); + Processor processor = new RemoveProcessor(new TestTemplateService.MockTemplate(fieldName)); try { processor.execute(ingestDocument); fail("remove field should have failed"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java index 9eb6b2a4907..ddbec7c8546 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java @@ -19,7 +19,9 @@ package org.elasticsearch.ingest.processor.set; +import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; import java.util.Collections; import java.util.HashMap; @@ -29,18 +31,23 @@ import static org.hamcrest.CoreMatchers.equalTo; public class SetProcessorFactoryTests extends ESTestCase { + private SetProcessor.Factory factory; + + @Before + public void init() { + factory = new SetProcessor.Factory(TestTemplateService.instance()); + } + public void testCreate() throws Exception { - SetProcessor.Factory factory = new SetProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "field1"); config.put("value", "value1"); SetProcessor setProcessor = factory.create(config); - assertThat(setProcessor.getField(), equalTo("field1")); - assertThat(setProcessor.getValue(), equalTo("value1")); + assertThat(setProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); + assertThat(setProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo("value1")); } public void testCreateNoFieldPresent() throws Exception { - SetProcessor.Factory factory = new SetProcessor.Factory(); Map config = new HashMap<>(); config.put("value", "value1"); try { @@ -52,7 +59,6 @@ public class SetProcessorFactoryTests extends ESTestCase { } public void testCreateNoValuePresent() throws Exception { - SetProcessor.Factory factory = new SetProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "field1"); try { @@ -64,7 +70,6 @@ public class SetProcessorFactoryTests extends ESTestCase { } public void testCreateNullValue() throws Exception { - SetProcessor.Factory factory = new SetProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "field1"); config.put("value", null); @@ -75,4 +80,5 @@ public class SetProcessorFactoryTests extends ESTestCase { assertThat(e.getMessage(), equalTo("required property [value] is missing")); } } + } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java index 7d693066595..9d772602f78 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java @@ -19,10 +19,10 @@ package org.elasticsearch.ingest.processor.set; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.*; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; import java.util.*; @@ -34,7 +34,7 @@ public class SetProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); Object fieldValue = RandomDocumentPicks.randomFieldValue(random()); - Processor processor = new SetProcessor(fieldName, fieldValue); + Processor processor = createSetProcessor(fieldName, fieldValue); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(fieldName), equalTo(true)); assertThat(ingestDocument.getFieldValue(fieldName, Object.class), equalTo(fieldValue)); @@ -46,7 +46,7 @@ public class SetProcessorTests extends ESTestCase { IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); Object fieldValue = RandomDocumentPicks.randomFieldValue(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), testIngestDocument, fieldValue); - Processor processor = new SetProcessor(fieldName, fieldValue); + Processor processor = createSetProcessor(fieldName, fieldValue); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(fieldName), equalTo(true)); assertThat(ingestDocument.getFieldValue(fieldName, Object.class), equalTo(fieldValue)); @@ -55,7 +55,7 @@ public class SetProcessorTests extends ESTestCase { public void testSetFieldsTypeMismatch() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestDocument.setFieldValue("field", "value"); - Processor processor = new SetProcessor("field.inner", "value"); + Processor processor = createSetProcessor("field.inner", "value"); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -63,4 +63,18 @@ public class SetProcessorTests extends ESTestCase { assertThat(e.getMessage(), equalTo("cannot set [inner] with parent object of type [java.lang.String] as part of path [field.inner]")); } } -} \ No newline at end of file + + public void testSetMetadata() throws Exception { + IngestDocument.MetaData randomMetaData = randomFrom(IngestDocument.MetaData.values()); + Processor processor = createSetProcessor(randomMetaData.getFieldName(), "_value"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue(randomMetaData.getFieldName(), String.class), Matchers.equalTo("_value")); + } + + private Processor createSetProcessor(String fieldName, Object fieldValue) { + TemplateService templateService = TestTemplateService.instance(); + return new SetProcessor(templateService.compile(fieldName), ValueSource.wrap(fieldValue, templateService)); + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index 8f8a3a3f2ea..a1e3191e16c 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -25,8 +25,9 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.ingest.processor.meta.MetaDataProcessor; +import org.elasticsearch.ingest.processor.set.SetProcessor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; @@ -126,10 +127,11 @@ public class PipelineExecutionServiceTests extends ESTestCase { @SuppressWarnings("unchecked") public void testExecuteTTL() throws Exception { // test with valid ttl - MetaDataProcessor.Factory metaProcessorFactory = new MetaDataProcessor.Factory(); + SetProcessor.Factory metaProcessorFactory = new SetProcessor.Factory(TestTemplateService.instance()); Map config = new HashMap<>(); - config.put("_ttl", "5d"); - MetaDataProcessor processor = metaProcessorFactory.create(config); + config.put("field", "_ttl"); + config.put("value", "5d"); + Processor processor = metaProcessorFactory.create(config); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); @@ -141,9 +143,10 @@ public class PipelineExecutionServiceTests extends ESTestCase { verify(listener, never()).onFailure(any()); // test with invalid ttl - metaProcessorFactory = new MetaDataProcessor.Factory(); + metaProcessorFactory = new SetProcessor.Factory(TestTemplateService.instance()); config = new HashMap<>(); - config.put("_ttl", "abc"); + config.put("field", "_ttl"); + config.put("value", "abc"); processor = metaProcessorFactory.create(config); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java index 2f1409b4528..51c1e877de4 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.env.Environment; import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHits; @@ -68,9 +69,10 @@ public class PipelineStoreTests extends ESTestCase { client = mock(Client.class); ClusterService clusterService = mock(ClusterService.class); + ScriptService scriptService = mock(ScriptService.class); when(client.searchScroll(any())).thenReturn(expectedSearchReponse(Collections.emptyList())); Environment environment = mock(Environment.class); - store = new PipelineStore(Settings.EMPTY, () -> client, threadPool, environment, clusterService, Collections.emptyMap()); + store = new PipelineStore(Settings.EMPTY, () -> client, threadPool, environment, clusterService, () -> scriptService, Collections.emptyMap()); } @After diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml index eb59cada2d0..ca0f58435df 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml @@ -113,3 +113,51 @@ - match: { _source.field_to_join: "127-0-0-1" } - match: { _source.field_to_convert: [127,0,0,1] } - match: { _source.field_to_gsub: "127.0.0.1" } + +--- +"Test metadata": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "_index", + "value" : "surprise" + } + } + ] + } + - match: { _id: "my_pipeline" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline_id: "my_pipeline" + body: {field: "value"} + + - do: + get: + index: surprise + type: test + id: 1 + - length: { _source: 1 } + - match: { _source.field: "value" } + diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_meta_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_meta_processor.yaml deleted file mode 100644 index be13146fb63..00000000000 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_meta_processor.yaml +++ /dev/null @@ -1,45 +0,0 @@ ---- -"Test meta processor": - - do: - cluster.health: - wait_for_status: green - - - do: - ingest.put_pipeline: - id: "my_pipeline" - body: > - { - "description": "_description", - "processors": [ - { - "meta" : { - "_index" : "surprise" - } - } - ] - } - - match: { _id: "my_pipeline" } - - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - - do: - ingest.index: - index: test - type: test - id: 1 - pipeline_id: "my_pipeline" - body: {field: "value"} - - - do: - get: - index: surprise - type: test - id: 1 - - length: { _source: 1 } - - match: { _source.field: "value" } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml similarity index 100% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_simulate.yaml rename to plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml diff --git a/qa/ingest-with-mustache/build.gradle b/qa/ingest-with-mustache/build.gradle new file mode 100644 index 00000000000..32ed5f8956f --- /dev/null +++ b/qa/ingest-with-mustache/build.gradle @@ -0,0 +1,31 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +apply plugin: 'elasticsearch.rest-test' + +dependencies { + testCompile project(path: ':plugins:ingest', configuration: 'runtime') + testCompile project(path: ':modules:lang-mustache', configuration: 'runtime') +} + +integTest { + cluster { + plugin 'ingest', project(':plugins:ingest') + } +} diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java new file mode 100644 index 00000000000..bdd37c86d58 --- /dev/null +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.script.ScriptContextRegistry; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.mustache.MustacheScriptEngineService; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.Collections; + +public abstract class AbstractMustacheTests extends ESTestCase { + + protected TemplateService templateService; + + @Before + public void init() throws Exception { + Settings settings = Settings.builder() + .put("path.home", createTempDir()) + .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false) + .build(); + MustacheScriptEngineService mustache = new MustacheScriptEngineService(settings); + ScriptContextRegistry registry = new ScriptContextRegistry( + Collections.singletonList(InternalTemplateService.INGEST_SCRIPT_CONTEXT) + ); + ScriptService scriptService = new ScriptService( + settings, new Environment(settings), Collections.singleton(mustache), null, registry + ); + templateService = new InternalTemplateService(scriptService); + } + +} diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java new file mode 100644 index 00000000000..8096d3a8d5d --- /dev/null +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java @@ -0,0 +1,85 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.ValueSource; + +import java.util.*; + +import static org.hamcrest.Matchers.equalTo; + +public class IngestDocumentMustacheIT extends AbstractMustacheTests { + + public void testAccessMetaDataViaTemplate() { + Map document = new HashMap<>(); + document.put("foo", "bar"); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); + ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("1 {{foo}}", templateService)); + assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 bar")); + + ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("2 {{_source.foo}}", templateService)); + assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("2 bar")); + } + + public void testAccessMapMetaDataViaTemplate() { + Map document = new HashMap<>(); + Map innerObject = new HashMap<>(); + innerObject.put("bar", "hello bar"); + innerObject.put("baz", "hello baz"); + innerObject.put("qux", Collections.singletonMap("fubar", "hello qux and fubar")); + document.put("foo", innerObject); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); + ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("1 {{foo.bar}} {{foo.baz}} {{foo.qux.fubar}}", templateService)); + assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 hello bar hello baz hello qux and fubar")); + + ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("2 {{_source.foo.bar}} {{_source.foo.baz}} {{_source.foo.qux.fubar}}", templateService)); + assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("2 hello bar hello baz hello qux and fubar")); + } + + public void testAccessListMetaDataViaTemplate() { + Map document = new HashMap<>(); + document.put("list1", Arrays.asList("foo", "bar", null)); + List> list = new ArrayList<>(); + Map value = new HashMap<>(); + value.put("field", "value"); + list.add(value); + list.add(null); + document.put("list2", list); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); + // TODO: fix index based lookups in lists: + ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("1 {{list1}} {{list2}}", templateService)); + assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 [foo, bar, null] [{field=value}, null]")); + + ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("2 {{_source.list1}} {{_source.list2}}", templateService)); + assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("2 [foo, bar, null] [{field=value}, null]")); + } + + public void testAccessIngestMetadataViaTemplate() { + Map document = new HashMap<>(); + Map ingestMap = new HashMap<>(); + ingestMap.put("timestamp", "bogus_timestamp"); + document.put("_ingest", ingestMap); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); + ingestDocument.setFieldValue(templateService.compile("ingest_timestamp"), ValueSource.wrap("{{_ingest.timestamp}} and {{_source._ingest.timestamp}}", templateService)); + assertThat(ingestDocument.getFieldValue("ingest_timestamp", String.class), equalTo(ingestDocument.getIngestMetadata().get("timestamp") + " and bogus_timestamp")); + } + +} diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheRemoveProcessorIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheRemoveProcessorIT.java new file mode 100644 index 00000000000..9931b15f231 --- /dev/null +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheRemoveProcessorIT.java @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.ingest.processor.remove.RemoveProcessor; +import org.hamcrest.CoreMatchers; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +public class IngestMustacheRemoveProcessorIT extends AbstractMustacheTests { + + public void testRemoveProcessorMustacheExpression() throws Exception { + RemoveProcessor.Factory factory = new RemoveProcessor.Factory(templateService); + Map config = new HashMap<>(); + config.put("field", "field{{var}}"); + RemoveProcessor processor = factory.create(config); + assertThat(processor.getField().execute(Collections.singletonMap("var", "_value")), CoreMatchers.equalTo("field_value")); + } + +} diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java new file mode 100644 index 00000000000..1cf70543ce1 --- /dev/null +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java @@ -0,0 +1,71 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + + +import org.elasticsearch.ingest.*; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.processor.set.SetProcessor; +import org.hamcrest.Matchers; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; + +public class IngestMustacheSetProcessorIT extends AbstractMustacheTests { + + public void testExpression() throws Exception { + SetProcessor processor = createSetProcessor("_index", "text {{var}}"); + assertThat(processor.getValue(), instanceOf(ValueSource.TemplatedValue.class)); + assertThat(processor.getValue().copyAndResolve(Collections.singletonMap("var", "_value")), equalTo("text _value")); + } + + public void testSetMetadataWithTemplates() throws Exception { + IngestDocument.MetaData randomMetaData = randomFrom(IngestDocument.MetaData.values()); + Processor processor = createSetProcessor(randomMetaData.getFieldName(), "_value {{field}}"); + IngestDocument ingestDocument = createIngestDocument(Collections.singletonMap("field", "value")); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue(randomMetaData.getFieldName(), String.class), Matchers.equalTo("_value value")); + } + + public void testSetWithTemplates() throws Exception { + IngestDocument.MetaData randomMetaData = randomFrom(IngestDocument.MetaData.INDEX, IngestDocument.MetaData.TYPE, IngestDocument.MetaData.ID); + Processor processor = createSetProcessor("field{{_type}}", "_value {{" + randomMetaData.getFieldName() + "}}"); + IngestDocument ingestDocument = createIngestDocument(new HashMap<>()); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("field_type", String.class), Matchers.equalTo("_value " + ingestDocument.getFieldValue(randomMetaData.getFieldName(), String.class))); + } + + private SetProcessor createSetProcessor(String fieldName, Object fieldValue) throws Exception { + SetProcessor.Factory factory = new SetProcessor.Factory(templateService); + Map config = new HashMap<>(); + config.put("field", fieldName); + config.put("value", fieldValue); + return factory.create(config); + } + + private IngestDocument createIngestDocument(Map source) { + return new IngestDocument("_index", "_type", "_id", null, null, null, null, source); + } + +} diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java new file mode 100644 index 00000000000..6925959e710 --- /dev/null +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.script.ScriptException; + +import java.util.*; + +import static org.hamcrest.Matchers.*; + +public class TemplateServiceIT extends AbstractMustacheTests { + + public void testTemplates() { + Map model = new HashMap<>(); + model.put("fielda", "value1"); + model.put("fieldb", Collections.singletonMap("fieldc", "value3")); + + TemplateService.Template template = templateService.compile("{{fielda}}/{{fieldb}}/{{fieldb.fieldc}}"); + assertThat(template.execute(model), equalTo("value1/{fieldc=value3}/value3")); + } + + public void testWrongTemplateUsage() { + Map model = Collections.emptyMap(); + TemplateService.Template template = templateService.compile("value"); + assertThat(template.execute(model), equalTo("value")); + + template = templateService.compile("value {{"); + assertThat(template.execute(model), equalTo("value {{")); + template = templateService.compile("value {{abc"); + assertThat(template.execute(model), equalTo("value {{abc")); + template = templateService.compile("value }}"); + assertThat(template.execute(model), equalTo("value }}")); + template = templateService.compile("value }} {{"); + assertThat(template.execute(model), equalTo("value }} {{")); + } + +} diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java new file mode 100644 index 00000000000..85fd9561dad --- /dev/null +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java @@ -0,0 +1,70 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.ValueSource; + +import java.util.*; + +import static org.hamcrest.Matchers.*; + +public class ValueSourceMustacheIT extends AbstractMustacheTests { + + public void testValueSourceWithTemplates() { + Map model = new HashMap<>(); + model.put("field1", "value1"); + model.put("field2", Collections.singletonMap("field3", "value3")); + + ValueSource valueSource = ValueSource.wrap("{{field1}}/{{field2}}/{{field2.field3}}", templateService); + assertThat(valueSource, instanceOf(ValueSource.TemplatedValue.class)); + assertThat(valueSource.copyAndResolve(model), equalTo("value1/{field3=value3}/value3")); + + valueSource = ValueSource.wrap(Arrays.asList("_value", "{{field1}}"), templateService); + assertThat(valueSource, instanceOf(ValueSource.ListValue.class)); + List result = (List) valueSource.copyAndResolve(model); + assertThat(result.size(), equalTo(2)); + assertThat(result.get(0), equalTo("_value")); + assertThat(result.get(1), equalTo("value1")); + + Map map = new HashMap<>(); + map.put("field1", "{{field1}}"); + map.put("field2", Collections.singletonMap("field3", "{{field2.field3}}")); + map.put("field4", "_value"); + valueSource = ValueSource.wrap(map, templateService); + assertThat(valueSource, instanceOf(ValueSource.MapValue.class)); + Map resultMap = (Map) valueSource.copyAndResolve(model); + assertThat(resultMap.size(), equalTo(3)); + assertThat(resultMap.get("field1"), equalTo("value1")); + assertThat(((Map) resultMap.get("field2")).size(), equalTo(1)); + assertThat(((Map) resultMap.get("field2")).get("field3"), equalTo("value3")); + assertThat(resultMap.get("field4"), equalTo("_value")); + } + + public void testAccessSourceViaTemplate() { + IngestDocument ingestDocument = new IngestDocument("marvel", "type", "id", null, null, null, null, new HashMap<>()); + assertThat(ingestDocument.hasField("marvel"), is(false)); + ingestDocument.setFieldValue(templateService.compile("{{_index}}"), ValueSource.wrap("{{_index}}", templateService)); + assertThat(ingestDocument.getFieldValue("marvel", String.class), equalTo("marvel")); + ingestDocument.removeField(templateService.compile("{{marvel}}")); + assertThat(ingestDocument.hasField("index"), is(false)); + } + +} diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/smoketest/IngestWithMustacheIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/smoketest/IngestWithMustacheIT.java new file mode 100644 index 00000000000..73f64d4433c --- /dev/null +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/smoketest/IngestWithMustacheIT.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.smoketest; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestCandidate; +import org.elasticsearch.test.rest.parser.RestTestParseException; + +import java.io.IOException; + +public class IngestWithMustacheIT extends ESRestTestCase { + + public IngestWithMustacheIT(@Name("yaml") RestTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, RestTestParseException { + return ESRestTestCase.createParameters(0, 1); + } + +} diff --git a/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml b/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml new file mode 100644 index 00000000000..fb0fa9c1083 --- /dev/null +++ b/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml @@ -0,0 +1,171 @@ +--- +"Test metadata templateing": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline_1" + body: > + { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "index_type_id", + "value": "{{_index}}/{{_type}}/{{_id}}" + } + } + ] + } + - match: { _id: "my_pipeline_1" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline_id: "my_pipeline_1" + body: {} + + - do: + get: + index: test + type: test + id: 1 + - length: { _source: 1 } + - match: { _source.index_type_id: "test/test/1" } + +--- +"Test templateing": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_pipeline_1" + body: > + { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field4", + "value": "{{field1}}/{{field2}}/{{field3}}" + } + } + ] + } + - match: { _id: "my_pipeline_1" } + + - do: + ingest.put_pipeline: + id: "my_pipeline_2" + body: > + { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "{{field1}}", + "value": "value" + } + } + ] + } + - match: { _id: "my_pipeline_2" } + + - do: + ingest.put_pipeline: + id: "my_pipeline_3" + body: > + { + "description": "_description", + "processors": [ + { + "remove" : { + "field" : "{{field_to_remove}}" + } + } + ] + } + - match: { _id: "my_pipeline_3" } + + # Simulate a Thread.sleep(), because pipeline are updated in the background + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 99 + timeout: 2s + - match: { "timed_out": true } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline_id: "my_pipeline_1" + body: { + field1: "1", + field2: "2", + field3: "3" + } + + - do: + get: + index: test + type: test + id: 1 + - length: { _source: 4 } + - match: { _source.field1: "1" } + - match: { _source.field2: "2" } + - match: { _source.field3: "3" } + - match: { _source.field4: "1/2/3" } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline_id: "my_pipeline_2" + body: { + field1: "field2" + } + + - do: + get: + index: test + type: test + id: 1 + - length: { _source: 2 } + - match: { _source.field1: "field2" } + - match: { _source.field2: "value" } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline_id: "my_pipeline_3" + body: { + field_to_remove: "field2", + field2: "2", + } + + - do: + get: + index: test + type: test + id: 1 + - length: { _source: 1 } + - match: { _source.field_to_remove: "field2" } diff --git a/settings.gradle b/settings.gradle index 760b4e90c86..9a69d685e66 100644 --- a/settings.gradle +++ b/settings.gradle @@ -39,6 +39,7 @@ List projects = [ 'qa:smoke-test-client', 'qa:smoke-test-multinode', 'qa:smoke-test-plugins', + 'qa:ingest-with-mustache', 'qa:vagrant', ] From 6dfcee69376031d2072e6df339a1b60f607e1ee4 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 3 Dec 2015 00:03:31 +0100 Subject: [PATCH 128/347] Added an internal reload pipeline api that makes sure pipeline changes are visible on all ingest nodes after modifcations have been made. * Pipeline store can now only start when there is no .ingest index or all primary shards of .ingest have been started * IngestPlugin adds`node.ingest` setting to `true`. This is used to figure out to what nodes to send the refresh request too. This setting isn't yet configurable. This will be done in a follow up issue. * Removed the background pipeline updater and added added logic to deal with specific scenarious to reload all pipelines. * Ingest services are no longer be managed by Guice. Only the bootstrapper gets managed by guice and that contructs all the services/components ingest will need. --- docs/plugins/ingest.asciidoc | 4 +- .../plugin/ingest/IngestModule.java | 5 +- .../plugin/ingest/IngestPlugin.java | 6 +- .../ingest/PipelineExecutionService.java | 2 - .../plugin/ingest/PipelineStore.java | 186 +++++++------ .../ingest/PipelineStoreBootstrapper.java | 163 +++++++++++ .../ingest/transport/IngestActionFilter.java | 5 +- .../delete/DeletePipelineTransportAction.java | 5 +- .../get/GetPipelineTransportAction.java | 5 +- .../put/PutPipelineTransportAction.java | 5 +- .../reload/ReloadPipelinesAction.java | 123 +++++++++ .../simulate/SimulateExecutionService.java | 6 +- .../SimulatePipelineTransportAction.java | 8 +- .../elasticsearch/ingest/IngestClientIT.java | 65 ++--- .../ingest/PipelineBootstrapperTests.java | 253 ++++++++++++++++++ .../plugin/ingest/PipelineStoreTests.java | 67 ++--- .../transport/IngestActionFilterTests.java | 9 +- .../reload/ReloadPipelinesActionTests.java | 162 +++++++++++ .../rest-api-spec/test/ingest/20_crud.yaml | 24 -- .../rest-api-spec/test/ingest/30_grok.yaml | 20 -- .../test/ingest/40_geoip_processor.yaml | 32 --- .../test/ingest/50_date_processor.yaml | 12 - .../rest-api-spec/test/ingest/60_mutate.yaml | 12 - .../test/ingest/70_simulate.yaml | 32 --- 24 files changed, 872 insertions(+), 339 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStoreBootstrapper.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesAction.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineBootstrapperTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 42585fe0e90..4f84da7203f 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -527,8 +527,8 @@ PUT _ingest/pipeline/my-pipeline-id -------------------------------------------------- // AUTOSENSE -NOTE: Each ingest node updates its processors asynchronously in the background, so it may take a few seconds for all - nodes to have the latest version of the pipeline. +NOTE: The put pipeline api also instructs all ingest nodes to reload their in-memory representation of pipelines, so that + pipeline changes take immediately in effect. ==== Get pipeline API diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index a50bed33ea0..fa013024db3 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -35,7 +35,6 @@ import org.elasticsearch.ingest.processor.split.SplitProcessor; import org.elasticsearch.ingest.processor.trim.TrimProcessor; import org.elasticsearch.ingest.processor.uppercase.UppercaseProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulateExecutionService; import java.util.HashMap; import java.util.Map; @@ -47,9 +46,7 @@ public class IngestModule extends AbstractModule { @Override protected void configure() { binder().bind(IngestRestFilter.class).asEagerSingleton(); - binder().bind(PipelineExecutionService.class).asEagerSingleton(); - binder().bind(PipelineStore.class).asEagerSingleton(); - binder().bind(SimulateExecutionService.class).asEagerSingleton(); + binder().bind(PipelineStoreBootstrapper.class).asEagerSingleton(); addProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 1bd68efd87c..d9772eaefc7 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -17,7 +17,6 @@ * under the License. */ - package org.elasticsearch.plugin.ingest; import org.elasticsearch.action.ActionModule; @@ -54,6 +53,7 @@ public class IngestPlugin extends Plugin { public static final String PIPELINE_ID_PARAM = "pipeline_id"; public static final String PIPELINE_ALREADY_PROCESSED = "ingest_already_processed"; public static final String NAME = "ingest"; + public static final String NODE_INGEST_SETTING = "node.ingest"; private final Settings nodeSettings; private final boolean transportClient; @@ -87,7 +87,7 @@ public class IngestPlugin extends Plugin { if (transportClient) { return Collections.emptyList(); } else { - return Collections.singletonList(PipelineStore.class); + return Collections.singletonList(PipelineStoreBootstrapper.class); } } @@ -95,6 +95,8 @@ public class IngestPlugin extends Plugin { public Settings additionalSettings() { return settingsBuilder() .put(PipelineExecutionService.additionalSettings(nodeSettings)) + // TODO: in a followup issue this should be made configurable + .put(NODE_INGEST_SETTING, true) .build(); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index 18a79104f5a..3e7f405ab51 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -21,7 +21,6 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.ingest.IngestDocument; @@ -37,7 +36,6 @@ public class PipelineExecutionService { private final PipelineStore store; private final ThreadPool threadPool; - @Inject public PipelineExecutionService(PipelineStore store, ThreadPool threadPool) { this.store = store; this.threadPool = threadPool; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index 9e36cf17df2..bb6007170cb 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -30,76 +30,59 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.common.SearchScrollIterator; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.Provider; +import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.env.Environment; -import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequest; import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequest; -import org.elasticsearch.script.*; +import org.elasticsearch.plugin.ingest.transport.reload.ReloadPipelinesAction; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; import java.io.Closeable; import java.io.IOException; import java.util.*; -public class PipelineStore extends AbstractLifecycleComponent { +public class PipelineStore extends AbstractComponent implements Closeable { public final static String INDEX = ".ingest"; public final static String TYPE = "pipeline"; - private final ThreadPool threadPool; - private final Environment environment; + private Client client; private final TimeValue scrollTimeout; - private final ClusterService clusterService; - private final Provider clientProvider; - private final TimeValue pipelineUpdateInterval; - private final Provider scriptServiceProvider; + private final ReloadPipelinesAction reloadPipelinesAction; private final Pipeline.Factory factory = new Pipeline.Factory(); - private volatile Map processorFactoryRegistry; - private final Map processorFactoryProviders; + private Map processorFactoryRegistry; - private volatile Client client; + private volatile boolean started = false; private volatile Map pipelines = new HashMap<>(); - @Inject - public PipelineStore(Settings settings, Provider clientProvider, ThreadPool threadPool, - Environment environment, ClusterService clusterService, Provider scriptServiceProvider, - Map processorFactoryProviders) { + public PipelineStore(Settings settings, ClusterService clusterService, TransportService transportService) { super(settings); - this.threadPool = threadPool; - this.environment = environment; - this.clusterService = clusterService; - this.clientProvider = clientProvider; - this.scriptServiceProvider = scriptServiceProvider; this.scrollTimeout = settings.getAsTime("ingest.pipeline.store.scroll.timeout", TimeValue.timeValueSeconds(30)); - this.pipelineUpdateInterval = settings.getAsTime("ingest.pipeline.store.update.interval", TimeValue.timeValueSeconds(1)); - this.processorFactoryProviders = processorFactoryProviders; - - clusterService.add(new PipelineStoreListener()); + this.reloadPipelinesAction = new ReloadPipelinesAction(settings, this, clusterService, transportService); } - @Override - protected void doStart() { - // TODO this will be better when #15203 gets in: + public void setClient(Client client) { + this.client = client; + } + + public void buildProcessorFactoryRegistry(Map processorFactoryProviders, Environment environment, ScriptService scriptService) { Map processorFactories = new HashMap<>(); - TemplateService templateService = new InternalTemplateService(scriptServiceProvider.get()); + TemplateService templateService = new InternalTemplateService(scriptService); for (Map.Entry entry : processorFactoryProviders.entrySet()) { Processor.Factory processorFactory = entry.getValue().get(environment, templateService); processorFactories.put(entry.getKey(), processorFactory); @@ -108,35 +91,31 @@ public class PipelineStore extends AbstractLifecycleComponent { } @Override - protected void doStop() { - } - - @Override - protected void doClose() { - // TODO: When org.elasticsearch.node.Node can close Closable instances we should remove this code + public void close() throws IOException { + stop("closing"); + // TODO: When org.elasticsearch.node.Node can close Closable instances we should try to remove this code, + // since any wired closable should be able to close itself List closeables = new ArrayList<>(); for (Processor.Factory factory : processorFactoryRegistry.values()) { if (factory instanceof Closeable) { closeables.add((Closeable) factory); } } - try { - IOUtils.close(closeables); - } catch (IOException e) { - throw new RuntimeException(e); - } + IOUtils.close(closeables); } /** * Deletes the pipeline specified by id in the request. */ public void delete(DeletePipelineRequest request, ActionListener listener) { + ensureReady(); + DeleteRequest deleteRequest = new DeleteRequest(request); deleteRequest.index(PipelineStore.INDEX); deleteRequest.type(PipelineStore.TYPE); deleteRequest.id(request.id()); deleteRequest.refresh(true); - client().delete(deleteRequest, listener); + client.delete(deleteRequest, handleWriteResponseAndReloadPipelines(listener)); } /** @@ -145,6 +124,8 @@ public class PipelineStore extends AbstractLifecycleComponent { * @throws IllegalArgumentException If the pipeline holds incorrect configuration */ public void put(PutPipelineRequest request, ActionListener listener) throws IllegalArgumentException { + ensureReady(); + try { // validates the pipeline and processor configuration: Map pipelineConfig = XContentHelper.convertToMap(request.source(), false).v2(); @@ -159,13 +140,15 @@ public class PipelineStore extends AbstractLifecycleComponent { indexRequest.id(request.id()); indexRequest.source(request.source()); indexRequest.refresh(true); - client().index(indexRequest, listener); + client.index(indexRequest, handleWriteResponseAndReloadPipelines(listener)); } /** * Returns the pipeline by the specified id */ public Pipeline get(String id) { + ensureReady(); + PipelineDefinition ref = pipelines.get(id); if (ref != null) { return ref.getPipeline(); @@ -179,6 +162,8 @@ public class PipelineStore extends AbstractLifecycleComponent { } public List getReference(String... ids) { + ensureReady(); + List result = new ArrayList<>(ids.length); for (String id : ids) { if (Regex.isSimpleMatchPattern(id)) { @@ -197,11 +182,7 @@ public class PipelineStore extends AbstractLifecycleComponent { return result; } - Pipeline constructPipeline(String id, Map config) throws Exception { - return factory.create(id, config, processorFactoryRegistry); - } - - synchronized void updatePipelines() throws Exception { + public synchronized void updatePipelines() throws Exception { // note: this process isn't fast or smart, but the idea is that there will not be many pipelines, // so for that reason the goal is to keep the update logic simple. @@ -210,9 +191,15 @@ public class PipelineStore extends AbstractLifecycleComponent { for (SearchHit hit : readAllPipelines()) { String pipelineId = hit.getId(); BytesReference pipelineSource = hit.getSourceRef(); - PipelineDefinition previous = newPipelines.get(pipelineId); - if (previous != null) { - if (previous.getSource().equals(pipelineSource)) { + PipelineDefinition current = newPipelines.get(pipelineId); + if (current != null) { + // If we first read from a primary shard copy and then from a replica copy, + // and a write did not yet make it into the replica shard + // then the source is not equal but we don't update because the current pipeline is the latest: + if (current.getVersion() > hit.getVersion()) { + continue; + } + if (current.getSource().equals(pipelineSource)) { continue; } } @@ -224,7 +211,7 @@ public class PipelineStore extends AbstractLifecycleComponent { int removed = 0; for (String existingPipelineId : pipelines.keySet()) { - if (!existPipeline(existingPipelineId)) { + if (pipelineExists(existingPipelineId) == false) { newPipelines.remove(existingPipelineId); removed++; } @@ -238,17 +225,46 @@ public class PipelineStore extends AbstractLifecycleComponent { } } - void startUpdateWorker() { - threadPool.schedule(pipelineUpdateInterval, ThreadPool.Names.GENERIC, new Updater()); + private Pipeline constructPipeline(String id, Map config) throws Exception { + return factory.create(id, config, processorFactoryRegistry); } - boolean existPipeline(String pipelineId) { + boolean pipelineExists(String pipelineId) { GetRequest request = new GetRequest(PipelineStore.INDEX, PipelineStore.TYPE, pipelineId); - GetResponse response = client().get(request).actionGet(); - return response.isExists(); + try { + GetResponse response = client.get(request).actionGet(); + return response.isExists(); + } catch (IndexNotFoundException e) { + // the ingest index doesn't exist, so the pipeline doesn't either: + return false; + } } - Iterable readAllPipelines() { + synchronized void start() throws Exception { + if (started) { + logger.debug("Pipeline already started"); + } else { + updatePipelines(); + started = true; + logger.debug("Pipeline store started with [{}] pipelines", pipelines.size()); + } + } + + synchronized void stop(String reason) { + if (started) { + started = false; + pipelines = new HashMap<>(); + logger.debug("Pipeline store stopped, reason [{}]", reason); + } else { + logger.debug("Pipeline alreadt stopped"); + } + } + + public boolean isStarted() { + return started; + } + + private Iterable readAllPipelines() { // TODO: the search should be replaced with an ingest API when it is available SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); sourceBuilder.version(true); @@ -256,40 +272,32 @@ public class PipelineStore extends AbstractLifecycleComponent { SearchRequest searchRequest = new SearchRequest(PipelineStore.INDEX); searchRequest.source(sourceBuilder); searchRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); - return SearchScrollIterator.createIterator(client(), scrollTimeout, searchRequest); + return SearchScrollIterator.createIterator(client, scrollTimeout, searchRequest); } - private Client client() { - if (client == null) { - client = clientProvider.get(); + private void ensureReady() { + if (started == false) { + throw new IllegalStateException("pipeline store isn't ready yet"); } - return client; } - class Updater implements Runnable { - - @Override - public void run() { - try { - updatePipelines(); - } catch (Exception e) { - logger.error("pipeline store update failure", e); - } finally { - startUpdateWorker(); + @SuppressWarnings("unchecked") + private ActionListener handleWriteResponseAndReloadPipelines(ActionListener listener) { + return new ActionListener() { + @Override + public void onResponse(T result) { + try { + reloadPipelinesAction.reloadPipelinesOnAllNodes(reloadResult -> listener.onResponse(result)); + } catch (Throwable e) { + listener.onFailure(e); + } } - } - } - - class PipelineStoreListener implements ClusterStateListener { - - @Override - public void clusterChanged(ClusterChangedEvent event) { - if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) == false) { - startUpdateWorker(); - clusterService.remove(this); + @Override + public void onFailure(Throwable e) { + listener.onFailure(e); } - } + }; } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStoreBootstrapper.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStoreBootstrapper.java new file mode 100644 index 00000000000..4432fffcf2d --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStoreBootstrapper.java @@ -0,0 +1,163 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.Map; + +public class PipelineStoreBootstrapper extends AbstractLifecycleComponent implements ClusterStateListener { + + private final ThreadPool threadPool; + private final Environment environment; + private final PipelineStore pipelineStore; + private final PipelineExecutionService pipelineExecutionService; + private final Map processorFactoryProvider; + + @Inject + public PipelineStoreBootstrapper(Settings settings, ThreadPool threadPool, Environment environment, + ClusterService clusterService, TransportService transportService, + Map processorFactoryProvider) { + super(settings); + this.threadPool = threadPool; + this.environment = environment; + this.processorFactoryProvider = processorFactoryProvider; + this.pipelineStore = new PipelineStore(settings, clusterService, transportService); + this.pipelineExecutionService = new PipelineExecutionService(pipelineStore, threadPool); + + clusterService.add(this); + } + + // for testing: + PipelineStoreBootstrapper(Settings settings, ThreadPool threadPool, ClusterService clusterService, + PipelineStore pipelineStore, PipelineExecutionService pipelineExecutionService) { + super(settings); + this.threadPool = threadPool; + this.environment = null; + clusterService.add(this); + this.pipelineStore = pipelineStore; + this.pipelineExecutionService = pipelineExecutionService; + this.processorFactoryProvider = null; + } + + public PipelineStore getPipelineStore() { + return pipelineStore; + } + + public PipelineExecutionService getPipelineExecutionService() { + return pipelineExecutionService; + } + + @Inject + public void setClient(Client client) { + pipelineStore.setClient(client); + } + + @Inject + public void setScriptService(ScriptService scriptService) { + pipelineStore.buildProcessorFactoryRegistry(processorFactoryProvider, environment, scriptService); + } + + @Override + public void clusterChanged(ClusterChangedEvent event) { + if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { + return; + } + + if (pipelineStore.isStarted()) { + if (validClusterState(event.state()) == false) { + stopPipelineStore("cluster state invalid [" + event.state() + "]"); + } + } else { + if (validClusterState(event.state())) { + startPipelineStore(); + } + } + } + + boolean validClusterState(ClusterState state) { + if (state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_WRITES) || + state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ALL)) { + return false; + } + + if (state.getMetaData().hasConcreteIndex(PipelineStore.INDEX)) { + IndexRoutingTable routingTable = state.getRoutingTable().index(PipelineStore.INDEX); + return routingTable.allPrimaryShardsActive(); + } else { + // it will be ready when auto create index kicks in before the first pipeline doc gets added + return true; + } + } + + @Override + protected void doStart() { + } + + @Override + protected void doStop() { + } + + @Override + protected void doClose() { + try { + pipelineStore.close(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + void startPipelineStore() { + threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { + try { + pipelineStore.start(); + } catch (Exception e) { + logger.warn("pipeline store failed to start, retrying...", e); + startPipelineStore(); + } + }); + } + + void stopPipelineStore(String reason) { + threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { + try { + pipelineStore.stop(reason); + } catch (Exception e) { + logger.error("pipeline store stop failure", e); + } + }); + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index e552c76d4bf..15cf21b17fb 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineExecutionService; +import org.elasticsearch.plugin.ingest.PipelineStoreBootstrapper; import java.util.*; @@ -41,9 +42,9 @@ public final class IngestActionFilter extends AbstractComponent implements Actio private final PipelineExecutionService executionService; @Inject - public IngestActionFilter(Settings settings, PipelineExecutionService executionService) { + public IngestActionFilter(Settings settings, PipelineStoreBootstrapper bootstrapper) { super(settings); - this.executionService = executionService; + this.executionService = bootstrapper.getPipelineExecutionService(); } @Override diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java index 3b5e72c01d4..ce8615a6bfb 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.plugin.ingest.PipelineStoreBootstrapper; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -35,9 +36,9 @@ public class DeletePipelineTransportAction extends HandledTransportAction { + + public static final String ACTION_NAME = "internal:admin/ingest/reload/pipelines"; + + private final ClusterService clusterService; + private final TransportService transportService; + private final PipelineStore pipelineStore; + + public ReloadPipelinesAction(Settings settings, PipelineStore pipelineStore, ClusterService clusterService, TransportService transportService) { + super(settings); + this.pipelineStore = pipelineStore; + this.clusterService = clusterService; + this.transportService = transportService; + transportService.registerRequestHandler(ACTION_NAME, ReloadPipelinesRequest::new, ThreadPool.Names.SAME, this); + } + + public void reloadPipelinesOnAllNodes(Consumer listener) { + List ingestNodes = new ArrayList<>(); + for (DiscoveryNode node : clusterService.state().getNodes()) { + String nodeEnabled = node.getAttributes().get("ingest"); + if ("true".equals(nodeEnabled)) { + ingestNodes.add(node); + } + } + + if (ingestNodes.isEmpty()) { + throw new IllegalStateException("There are no ingest nodes in this cluster"); + } + + AtomicBoolean failed = new AtomicBoolean(); + AtomicInteger expectedResponses = new AtomicInteger(ingestNodes.size()); + for (DiscoveryNode node : ingestNodes) { + ReloadPipelinesRequest nodeRequest = new ReloadPipelinesRequest(); + transportService.sendRequest(node, ACTION_NAME, nodeRequest, new TransportResponseHandler() { + @Override + public ReloadPipelinesResponse newInstance() { + return new ReloadPipelinesResponse(); + } + + @Override + public void handleResponse(ReloadPipelinesResponse response) { + decrementAndReturn(); + } + + @Override + public void handleException(TransportException exp) { + logger.warn("failed to update pipelines on remote node [{}]", exp, node); + failed.set(true); + decrementAndReturn(); + } + + void decrementAndReturn() { + if (expectedResponses.decrementAndGet() == 0) { + listener.accept(!failed.get()); + } + } + + @Override + public String executor() { + return ThreadPool.Names.MANAGEMENT; + } + }); + } + } + + @Override + public void messageReceived(ReloadPipelinesRequest request, TransportChannel channel) throws Exception { + try { + pipelineStore.updatePipelines(); + channel.sendResponse(new ReloadPipelinesResponse()); + } catch (Throwable e) { + logger.warn("failed to update pipelines", e); + channel.sendResponse(e); + } + } + + final static class ReloadPipelinesRequest extends TransportRequest { + + } + + final static class ReloadPipelinesResponse extends TransportResponse { + + } + +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java index fcf6e00c657..430d6fd7234 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java @@ -20,7 +20,6 @@ package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; @@ -29,14 +28,13 @@ import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.List; -public class SimulateExecutionService { +class SimulateExecutionService { private static final String THREAD_POOL_NAME = ThreadPool.Names.MANAGEMENT; private final ThreadPool threadPool; - @Inject - public SimulateExecutionService(ThreadPool threadPool) { + SimulateExecutionService(ThreadPool threadPool) { this.threadPool = threadPool; } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java index b4036c5bac3..4d6fa5f375a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.plugin.ingest.PipelineStoreBootstrapper; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -34,14 +35,15 @@ import java.io.IOException; import java.util.Map; public class SimulatePipelineTransportAction extends HandledTransportAction { + private final PipelineStore pipelineStore; private final SimulateExecutionService executionService; @Inject - public SimulatePipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PipelineStore pipelineStore, SimulateExecutionService executionService) { + public SimulatePipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PipelineStoreBootstrapper bootstrapper) { super(settings, SimulatePipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, SimulatePipelineRequest::new); - this.pipelineStore = pipelineStore; - this.executionService = executionService; + this.pipelineStore = bootstrapper.getPipelineStore(); + this.executionService = new SimulateExecutionService(threadPool); } @Override diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 4c27d4d6dca..1ab97961903 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -81,17 +81,12 @@ public class IngestClientIT extends ESIntegTestCase { .endArray() .endObject().bytes()) .get(); - assertBusy(new Runnable() { - @Override - public void run() { - GetPipelineResponse response = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) - .setIds("_id") - .get(); - assertThat(response.isFound(), is(true)); - assertThat(response.pipelines().size(), equalTo(1)); - assertThat(response.pipelines().get(0).getId(), equalTo("_id")); - } - }); + GetPipelineResponse getResponse = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) + .setIds("_id") + .get(); + assertThat(getResponse.isFound(), is(true)); + assertThat(getResponse.pipelines().size(), equalTo(1)); + assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id")); SimulatePipelineResponse response = new SimulatePipelineRequestBuilder(client(), SimulatePipelineAction.INSTANCE) .setId("_id") @@ -200,14 +195,12 @@ public class IngestClientIT extends ESIntegTestCase { .endArray() .endObject().bytes()) .get(); - assertBusy(() -> { - GetPipelineResponse response = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) - .setIds("_id") - .get(); - assertThat(response.isFound(), is(true)); - assertThat(response.pipelines().size(), equalTo(1)); - assertThat(response.pipelines().get(0).getId(), equalTo("_id")); - }); + GetPipelineResponse getResponse = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) + .setIds("_id") + .get(); + assertThat(getResponse.isFound(), is(true)); + assertThat(getResponse.pipelines().size(), equalTo(1)); + assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id")); createIndex("test"); XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("properties") @@ -222,23 +215,19 @@ public class IngestClientIT extends ESIntegTestCase { .putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id") .get(); - assertBusy(() -> { - Map doc = client().prepareGet("test", "type", "1") - .get().getSourceAsMap(); - assertThat(doc.get("val"), equalTo(123.42)); - assertThat(doc.get("status"), equalTo(400)); - assertThat(doc.get("msg"), equalTo("foo")); - }); + Map doc = client().prepareGet("test", "type", "1") + .get().getSourceAsMap(); + assertThat(doc.get("val"), equalTo(123.42)); + assertThat(doc.get("status"), equalTo(400)); + assertThat(doc.get("msg"), equalTo("foo")); client().prepareBulk().add( client().prepareIndex("test", "type", "2").setSource("field1", "123.42 400 ") ).putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id").get(); - assertBusy(() -> { - Map doc = client().prepareGet("test", "type", "2").get().getSourceAsMap(); - assertThat(doc.get("val"), equalTo(123.42)); - assertThat(doc.get("status"), equalTo(400)); - assertThat(doc.get("msg"), equalTo("foo")); - }); + doc = client().prepareGet("test", "type", "2").get().getSourceAsMap(); + assertThat(doc.get("val"), equalTo(123.42)); + assertThat(doc.get("status"), equalTo(400)); + assertThat(doc.get("msg"), equalTo("foo")); DeleteResponse response = new DeletePipelineRequestBuilder(client(), DeletePipelineAction.INSTANCE) .setId("_id") @@ -246,13 +235,11 @@ public class IngestClientIT extends ESIntegTestCase { assertThat(response.isFound(), is(true)); assertThat(response.getId(), equalTo("_id")); - assertBusy(() -> { - GetPipelineResponse response1 = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) - .setIds("_id") - .get(); - assertThat(response1.isFound(), is(false)); - assertThat(response1.pipelines().size(), equalTo(0)); - }); + getResponse = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) + .setIds("_id") + .get(); + assertThat(getResponse.isFound(), is(false)); + assertThat(getResponse.pipelines().size(), equalTo(0)); } @Override diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineBootstrapperTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineBootstrapperTests.java new file mode 100644 index 00000000000..bcf5e94f5cc --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineBootstrapperTests.java @@ -0,0 +1,253 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.Version; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.internal.InternalSearchHit; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.core.Is.is; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.*; + +public class PipelineBootstrapperTests extends ESTestCase { + + private PipelineStore store; + private PipelineStoreBootstrapper bootstrapper; + + @Before + public void init() { + ThreadPool threadPool = mock(ThreadPool.class); + when(threadPool.executor(any())).thenReturn(Runnable::run); + ClusterService clusterService = mock(ClusterService.class); + store = mock(PipelineStore.class); + when(store.isStarted()).thenReturn(false); + PipelineExecutionService pipelineExecutionService = mock(PipelineExecutionService.class); + bootstrapper = new PipelineStoreBootstrapper(Settings.EMPTY, threadPool, clusterService, store, pipelineExecutionService); + } + + public void testStartAndStopInBackground() throws Exception { + ThreadPool threadPool = new ThreadPool("test"); + Client client = mock(Client.class); + TransportService transportService = mock(TransportService.class); + + ClusterService clusterService = mock(ClusterService.class); + when(client.search(any())).thenReturn(PipelineStoreTests.expectedSearchReponse(Collections.emptyList())); + when(client.searchScroll(any())).thenReturn(PipelineStoreTests.expectedSearchReponse(Collections.emptyList())); + Settings settings = Settings.EMPTY; + PipelineStore store = new PipelineStore(settings, clusterService, transportService); + PipelineStoreBootstrapper bootstrapper = new PipelineStoreBootstrapper( + settings, threadPool, clusterService, store, null + ); + bootstrapper.setClient(client); + + List hits = new ArrayList<>(); + hits.add(new InternalSearchHit(0, "1", new Text("type"), Collections.emptyMap()) + .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) + ); + when(client.search(any())).thenReturn(PipelineStoreTests.expectedSearchReponse(hits)); + when(client.get(any())).thenReturn(PipelineStoreTests.expectedGetResponse(true)); + + try { + store.get("1"); + fail("IllegalStateException expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("pipeline store isn't ready yet")); + } + + bootstrapper.startPipelineStore(); + assertBusy(() -> { + assertThat(store.isStarted(), is(true)); + assertThat(store.get("1"), notNullValue()); + assertThat(store.get("1").getId(), equalTo("1")); + assertThat(store.get("1").getDescription(), equalTo("_description1")); + }); + + bootstrapper.stopPipelineStore("testing stop"); + assertBusy(() -> assertThat(store.isStarted(), is(false))); + + // the map internal search hit holds gets emptied after use, which is ok, but in this test we need to reset the source: + hits.get(0).sourceRef(new BytesArray("{\"description\": \"_description1\"}")); + hits.add(new InternalSearchHit(0, "2", new Text("type"), Collections.emptyMap()) + .sourceRef(new BytesArray("{\"description\": \"_description2\"}")) + ); + bootstrapper.startPipelineStore(); + assertBusy(() -> { + assertThat(store.isStarted(), is(true)); + assertThat(store.get("1"), notNullValue()); + assertThat(store.get("1").getId(), equalTo("1")); + assertThat(store.get("1").getDescription(), equalTo("_description1")); + assertThat(store.get("2"), notNullValue()); + assertThat(store.get("2").getId(), equalTo("2")); + assertThat(store.get("2").getDescription(), equalTo("_description2")); + }); + threadPool.shutdown(); + } + + public void testPipelineStoreBootstrappingGlobalStateNotRecoveredBlock() throws Exception { + ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); + csBuilder.blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)); + ClusterState cs = csBuilder.metaData(MetaData.builder()).build(); + bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); + verify(store, never()).start(); + verify(store, never()).stop(anyString()); + } + + public void testPipelineStoreBootstrappingGlobalStateNoMasterBlock() throws Exception { + ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); + csBuilder.blocks(ClusterBlocks.builder() + .addGlobalBlock(randomBoolean() ? DiscoverySettings.NO_MASTER_BLOCK_WRITES : DiscoverySettings.NO_MASTER_BLOCK_ALL)); + ClusterState cs = csBuilder.metaData(MetaData.builder()).build(); + + // We're not started and there is a no master block, doing nothing: + bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); + verify(store, never()).start(); + verify(store, never()).stop(anyString()); + + // We're started and there is a no master block, so we stop the store: + when(store.isStarted()).thenReturn(true); + bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); + verify(store, never()).start(); + verify(store, times(1)).stop(anyString()); + } + + public void testPipelineStoreBootstrappingNoIngestIndex() throws Exception { + ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); + ClusterState cs = csBuilder.metaData(MetaData.builder()).build(); + bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); + verify(store, times(1)).start(); + } + + public void testPipelineStoreBootstrappingIngestIndexShardsNotStarted() throws Exception { + // .ingest index, but not all primary shards started: + ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); + MetaData.Builder metaDateBuilder = MetaData.builder(); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); + Settings settings = settings(Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + metaDateBuilder.put(IndexMetaData.builder(PipelineStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1)); + IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(PipelineStore.INDEX); + indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(PipelineStore.INDEX, 0)) + .addShard(TestShardRouting.newShardRouting(PipelineStore.INDEX, 0, "_node_id", null, null, true, ShardRoutingState.UNASSIGNED, 1, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""))) + .build()); + indexRoutingTableBuilder.addReplica(); + routingTableBuilder.add(indexRoutingTableBuilder.build()); + csBuilder.metaData(metaDateBuilder); + csBuilder.routingTable(routingTableBuilder.build()); + ClusterState cs = csBuilder.build(); + + // We're not running and the cluster state isn't ready, so we don't start. + bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); + verify(store, never()).start(); + verify(store, never()).stop(anyString()); + + // We're running and the cluster state indicates that all our shards are unassigned, so we stop. + when(store.isStarted()).thenReturn(true); + bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); + verify(store, never()).start(); + verify(store, times(1)).stop(anyString()); + } + + public void testPipelineStoreBootstrappingIngestIndexShardsStarted() throws Exception { + // .ingest index, but not all primary shards started: + ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); + MetaData.Builder metaDateBuilder = MetaData.builder(); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); + Settings settings = settings(Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + metaDateBuilder.put(IndexMetaData.builder(PipelineStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1)); + IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(PipelineStore.INDEX); + indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(PipelineStore.INDEX, 0)) + .addShard(TestShardRouting.newShardRouting(PipelineStore.INDEX, 0, "_node_id", null, null, true, ShardRoutingState.STARTED, 1, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""))) + .build()); + indexRoutingTableBuilder.addReplica(); + routingTableBuilder.add(indexRoutingTableBuilder.build()); + csBuilder.metaData(metaDateBuilder); + csBuilder.routingTable(routingTableBuilder.build()); + ClusterState cs = csBuilder.build(); + + // We're not running and the cluster state is ready, so we start. + bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); + verify(store, times(1)).start(); + verify(store, never()).stop(anyString()); + + // We're running and the cluster state is good, so we do nothing. + when(store.isStarted()).thenReturn(true); + bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); + verify(store, times(1)).start(); + verify(store, never()).stop(anyString()); + } + + public void testPipelineStoreBootstrappingFailure() throws Exception { + // .ingest index, but not all primary shards started: + ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); + MetaData.Builder metaDateBuilder = MetaData.builder(); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); + Settings settings = settings(Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + metaDateBuilder.put(IndexMetaData.builder(PipelineStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1)); + IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(PipelineStore.INDEX); + indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(PipelineStore.INDEX, 0)) + .addShard(TestShardRouting.newShardRouting(PipelineStore.INDEX, 0, "_node_id", null, null, true, ShardRoutingState.STARTED, 1, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""))) + .build()); + indexRoutingTableBuilder.addReplica(); + routingTableBuilder.add(indexRoutingTableBuilder.build()); + csBuilder.metaData(metaDateBuilder); + csBuilder.routingTable(routingTableBuilder.build()); + ClusterState cs = csBuilder.build(); + + // fail the first call with an runtime exception and subsequent calls just return: + doThrow(new RuntimeException()).doNothing().when(store).start(); + bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); + verify(store, times(2)).start(); + verify(store, never()).stop(anyString()); + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java index 51c1e877de4..90972ac41c2 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java @@ -31,14 +31,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.env.Environment; import org.elasticsearch.index.get.GetResult; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHits; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.junit.After; +import org.elasticsearch.transport.TransportService; import org.junit.Before; import org.mockito.ArgumentMatcher; import org.mockito.Matchers; @@ -51,7 +48,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; @@ -59,29 +55,25 @@ import static org.mockito.Mockito.when; public class PipelineStoreTests extends ESTestCase { - private ThreadPool threadPool; private PipelineStore store; private Client client; @Before - public void init() { - threadPool = new ThreadPool("test"); - client = mock(Client.class); - + public void init() throws Exception { + Settings settings = Settings.EMPTY; ClusterService clusterService = mock(ClusterService.class); - ScriptService scriptService = mock(ScriptService.class); - when(client.searchScroll(any())).thenReturn(expectedSearchReponse(Collections.emptyList())); - Environment environment = mock(Environment.class); - store = new PipelineStore(Settings.EMPTY, () -> client, threadPool, environment, clusterService, () -> scriptService, Collections.emptyMap()); - } + TransportService transportService = mock(TransportService.class); - @After - public void cleanup() { - threadPool.shutdown(); + client = mock(Client.class); + when(client.search(any())).thenReturn(expectedSearchReponse(Collections.emptyList())); + when(client.searchScroll(any())).thenReturn(expectedSearchReponse(Collections.emptyList())); + store = new PipelineStore(settings, clusterService, transportService); + store.setClient(client); + store.start(); } public void testUpdatePipeline() throws Exception { - List hits = new ArrayList<>(); + List hits = new ArrayList<>(); hits.add(new InternalSearchHit(0, "1", new Text("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) ); @@ -112,38 +104,9 @@ public class PipelineStoreTests extends ESTestCase { assertThat(store.get("2"), nullValue()); } - public void testPipelineUpdater() throws Exception { - List hits = new ArrayList<>(); - hits.add(new InternalSearchHit(0, "1", new Text("type"), Collections.emptyMap()) - .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) - ); - when(client.search(any())).thenReturn(expectedSearchReponse(hits)); - when(client.get(any())).thenReturn(expectedGetResponse(true)); - assertThat(store.get("1"), nullValue()); - - store.startUpdateWorker(); - assertBusy(() -> { - assertThat(store.get("1"), notNullValue()); - assertThat(store.get("1").getId(), equalTo("1")); - assertThat(store.get("1").getDescription(), equalTo("_description1")); - }); - - hits.add(new InternalSearchHit(0, "2", new Text("type"), Collections.emptyMap()) - .sourceRef(new BytesArray("{\"description\": \"_description2\"}")) - ); - assertBusy(() -> { - assertThat(store.get("1"), notNullValue()); - assertThat(store.get("1").getId(), equalTo("1")); - assertThat(store.get("1").getDescription(), equalTo("_description1")); - assertThat(store.get("2"), notNullValue()); - assertThat(store.get("2").getId(), equalTo("2")); - assertThat(store.get("2").getDescription(), equalTo("_description2")); - }); - } - public void testGetReference() throws Exception { // fill the store up for the test: - List hits = new ArrayList<>(); + List hits = new ArrayList<>(); hits.add(new InternalSearchHit(0, "foo", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); hits.add(new InternalSearchHit(0, "bar", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); hits.add(new InternalSearchHit(0, "foobar", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); @@ -183,7 +146,7 @@ public class PipelineStoreTests extends ESTestCase { assertThat(result.get(1).getPipeline().getId(), equalTo("bar")); } - ActionFuture expectedSearchReponse(List hits) { + static ActionFuture expectedSearchReponse(List hits) { return new PlainActionFuture() { @Override @@ -194,7 +157,7 @@ public class PipelineStoreTests extends ESTestCase { }; } - ActionFuture expectedGetResponse(boolean exists) { + static ActionFuture expectedGetResponse(boolean exists) { return new PlainActionFuture() { @Override public GetResponse get() throws InterruptedException, ExecutionException { @@ -203,7 +166,7 @@ public class PipelineStoreTests extends ESTestCase { }; } - GetRequest eqGetRequest(String index, String type, String id) { + static GetRequest eqGetRequest(String index, String type, String id) { return Matchers.argThat(new GetRequestMatcher(index, type, id)); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index 569cf32ac72..d007a5ca58b 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineExecutionService; import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.plugin.ingest.PipelineStoreBootstrapper; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; @@ -60,7 +61,9 @@ public class IngestActionFilterTests extends ESTestCase { @Before public void setup() { executionService = mock(PipelineExecutionService.class); - filter = new IngestActionFilter(Settings.EMPTY, executionService); + PipelineStoreBootstrapper bootstrapper = mock(PipelineStoreBootstrapper.class); + when(bootstrapper.getPipelineExecutionService()).thenReturn(executionService); + filter = new IngestActionFilter(Settings.EMPTY, bootstrapper); } public void testApplyNoIngestId() throws Exception { @@ -181,7 +184,9 @@ public class IngestActionFilterTests extends ESTestCase { }; when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); executionService = new PipelineExecutionService(store, threadPool); - filter = new IngestActionFilter(Settings.EMPTY, executionService); + PipelineStoreBootstrapper bootstrapper = mock(PipelineStoreBootstrapper.class); + when(bootstrapper.getPipelineExecutionService()).thenReturn(executionService); + filter = new IngestActionFilter(Settings.EMPTY, bootstrapper); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java new file mode 100644 index 00000000000..32467cfe555 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java @@ -0,0 +1,162 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest.transport.reload; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.LocalTransportAddress; +import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportService; +import org.junit.Before; + +import java.util.Collections; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.hamcrest.CoreMatchers.is; +import static org.mockito.Mockito.when; + +public class ReloadPipelinesActionTests extends ESTestCase { + + private ClusterService clusterService; + private TransportService transportService; + private ReloadPipelinesAction reloadPipelinesAction; + + @Before + public void init() { + Settings settings = Settings.EMPTY; + PipelineStore pipelineStore = mock(PipelineStore.class); + clusterService = mock(ClusterService.class); + transportService = mock(TransportService.class); + reloadPipelinesAction = new ReloadPipelinesAction(settings, pipelineStore, clusterService, transportService); + } + + public void testSuccess() { + int numNodes = randomIntBetween(1, 10); + int numIngestNodes = 0; + + DiscoveryNodes.Builder discoNodes = DiscoveryNodes.builder(); + for (int i = 0; i < numNodes; i++) { + boolean ingestNode = i == 0 || randomBoolean(); + DiscoveryNode discoNode = generateDiscoNode(i, ingestNode); + discoNodes.put(discoNode); + if (ingestNode) { + numIngestNodes++; + } + } + ClusterState state = ClusterState.builder(new ClusterName("_name")).nodes(discoNodes).build(); + when(clusterService.state()).thenReturn(state); + + final int finalNumIngestNodes = numIngestNodes; + doAnswer(mock -> { + TransportResponseHandler handler = (TransportResponseHandler) mock.getArguments()[3]; + for (int i = 0; i < finalNumIngestNodes; i++) { + handler.handleResponse(new ReloadPipelinesAction.ReloadPipelinesResponse()); + } + return mock; + }).when(transportService).sendRequest(any(), eq(ReloadPipelinesAction.ACTION_NAME), any(), any()); + reloadPipelinesAction.reloadPipelinesOnAllNodes(result -> assertThat(result, is(true))); + } + + public void testWithAtLeastOneFailure() { + int numNodes = randomIntBetween(1, 10); + int numIngestNodes = 0; + + DiscoveryNodes.Builder discoNodes = DiscoveryNodes.builder(); + for (int i = 0; i < numNodes; i++) { + boolean ingestNode = i == 0 || randomBoolean(); + DiscoveryNode discoNode = generateDiscoNode(i, ingestNode); + discoNodes.put(discoNode); + if (ingestNode) { + numIngestNodes++; + } + } + ClusterState state = ClusterState.builder(new ClusterName("_name")).nodes(discoNodes).build(); + when(clusterService.state()).thenReturn(state); + + final int finalNumIngestNodes = numIngestNodes; + doAnswer(mock -> { + TransportResponseHandler handler = (TransportResponseHandler) mock.getArguments()[3]; + handler.handleException(new TransportException("test failure")); + for (int i = 1; i < finalNumIngestNodes; i++) { + if (randomBoolean()) { + handler.handleResponse(new ReloadPipelinesAction.ReloadPipelinesResponse()); + } else { + handler.handleException(new TransportException("test failure")); + } + } + return mock; + }).when(transportService).sendRequest(any(), eq(ReloadPipelinesAction.ACTION_NAME), any(), any()); + reloadPipelinesAction.reloadPipelinesOnAllNodes(result -> assertThat(result, is(false))); + } + + public void testNoIngestNodes() { + // expected exception if there are no nodes: + DiscoveryNodes discoNodes = DiscoveryNodes.builder() + .build(); + ClusterState state = ClusterState.builder(new ClusterName("_name")).nodes(discoNodes).build(); + when(clusterService.state()).thenReturn(state); + + try { + reloadPipelinesAction.reloadPipelinesOnAllNodes(result -> fail("shouldn't be invoked")); + fail("exception expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("There are no ingest nodes in this cluster")); + } + + // expected exception if there are no ingest nodes: + discoNodes = DiscoveryNodes.builder() + .put(new DiscoveryNode("_name", "_id", new LocalTransportAddress("_id"), Collections.singletonMap("ingest", "false"), Version.CURRENT)) + .build(); + state = ClusterState.builder(new ClusterName("_name")).nodes(discoNodes).build(); + when(clusterService.state()).thenReturn(state); + + try { + reloadPipelinesAction.reloadPipelinesOnAllNodes(result -> fail("shouldn't be invoked")); + fail("exception expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("There are no ingest nodes in this cluster")); + } + } + + private DiscoveryNode generateDiscoNode(int index, boolean ingestNode) { + Map attributes; + if (ingestNode) { + attributes = Collections.singletonMap("ingest", "true"); + } else { + attributes = Collections.emptyMap(); + } + String id = String.valueOf(index); + return new DiscoveryNode(id, id, new LocalTransportAddress(id), attributes, Version.CURRENT); + } + +} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml index 9cf5d50a68f..7dd83313a7f 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml @@ -1,9 +1,5 @@ --- "Test basic pipeline crud": - - do: - cluster.health: - wait_for_status: green - - do: ingest.put_pipeline: id: "my_pipeline" @@ -24,14 +20,6 @@ - match: { _version: 1 } - match: { _id: "my_pipeline" } - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - do: ingest.get_pipeline: id: "my_pipeline" @@ -47,14 +35,6 @@ - match: { _id: "my_pipeline" } - match: { found: true } - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - do: catch: missing ingest.get_pipeline: @@ -62,10 +42,6 @@ --- "Test invalid config": - - do: - cluster.health: - wait_for_status: green - - do: catch: param ingest.put_pipeline: diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml index e0f97b625b1..7807631344a 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml @@ -1,9 +1,5 @@ --- "Test Grok Pipeline": - - do: - cluster.health: - wait_for_status: green - - do: ingest.put_pipeline: id: "my_pipeline" @@ -21,14 +17,6 @@ } - match: { _id: "my_pipeline" } - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - do: ingest.index: index: test @@ -48,10 +36,6 @@ --- "Test Grok Pipeline With Custom Pattern": - - do: - cluster.health: - wait_for_status: green - - do: ingest.put_pipeline: id: "my_pipeline" @@ -97,10 +81,6 @@ --- "Test Grok Pipeline With Custom Pattern Sharing Same Name As Another": - - do: - cluster.health: - wait_for_status: green - - do: ingest.put_pipeline: id: "my_pipeline" diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml index 3c0efc19c78..d35898a59d5 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml @@ -1,9 +1,5 @@ --- "Test geoip processor with defaults": - - do: - cluster.health: - wait_for_status: green - - do: ingest.put_pipeline: id: "my_pipeline" @@ -20,14 +16,6 @@ } - match: { _id: "my_pipeline" } - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - do: ingest.index: index: test @@ -72,14 +60,6 @@ } - match: { _id: "my_pipeline" } - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - do: ingest.index: index: test @@ -108,10 +88,6 @@ --- "Test geoip processor with different database file": - - do: - cluster.health: - wait_for_status: green - - do: ingest.put_pipeline: id: "my_pipeline" @@ -129,14 +105,6 @@ } - match: { _id: "my_pipeline" } - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - do: ingest.index: index: test diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_date_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_date_processor.yaml index f2c6d2c0cce..5caad399d23 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_date_processor.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_date_processor.yaml @@ -1,9 +1,5 @@ --- "Test date processor": - - do: - cluster.health: - wait_for_status: green - - do: ingest.put_pipeline: id: "my_pipeline" @@ -23,14 +19,6 @@ } - match: { _id: "my_pipeline" } - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - do: ingest.index: index: test diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml index ca0f58435df..75cef2971c0 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml @@ -1,9 +1,5 @@ --- "Test mutate processors": - - do: - cluster.health: - wait_for_status: green - - do: ingest.put_pipeline: id: "my_pipeline" @@ -72,14 +68,6 @@ } - match: { _id: "my_pipeline" } - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - do: ingest.index: index: test diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml index edbd9494088..641fecf16f1 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml @@ -1,9 +1,5 @@ --- "Test simulate with stored ingest pipeline": - - do: - cluster.health: - wait_for_status: green - - do: ingest.put_pipeline: id: "my_pipeline" @@ -21,14 +17,6 @@ } - match: { _id: "my_pipeline" } - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - do: ingest.simulate: id: "my_pipeline" @@ -53,10 +41,6 @@ --- "Test simulate with provided pipeline definition": - - do: - cluster.health: - wait_for_status: green - - do: ingest.simulate: body: > @@ -87,10 +71,6 @@ --- "Test simulate with no provided pipeline or pipeline_id": - - do: - cluster.health: - wait_for_status: green - - do: catch: request ingest.simulate: @@ -114,10 +94,6 @@ --- "Test simulate with verbose flag": - - do: - cluster.health: - wait_for_status: green - - do: ingest.simulate: verbose: true @@ -168,10 +144,6 @@ --- "Test simulate with exception thrown": - - do: - cluster.health: - wait_for_status: green - - do: ingest.simulate: body: > @@ -213,10 +185,6 @@ --- "Test verbose simulate with exception thrown": - - do: - cluster.health: - wait_for_status: green - - do: ingest.simulate: verbose: true From 800482752230b367dd413d3bd747710d8aa00ce9 Mon Sep 17 00:00:00 2001 From: gmarz Date: Fri, 18 Dec 2015 15:04:54 -0500 Subject: [PATCH 129/347] Fix Windows service installation failure when ES_HOME contains parantheses Closes #15349 --- distribution/src/main/resources/bin/elasticsearch.in.bat | 2 +- distribution/src/main/resources/bin/service.bat | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/distribution/src/main/resources/bin/elasticsearch.in.bat b/distribution/src/main/resources/bin/elasticsearch.in.bat index 6f6550dcdf0..7138cf5f5ca 100644 --- a/distribution/src/main/resources/bin/elasticsearch.in.bat +++ b/distribution/src/main/resources/bin/elasticsearch.in.bat @@ -93,7 +93,7 @@ set JAVA_OPTS=%JAVA_OPTS% -Djna.nosys=true REM check in case a user was using this mechanism if "%ES_CLASSPATH%" == "" ( -set ES_CLASSPATH=%ES_HOME%/lib/elasticsearch-${project.version}.jar;%ES_HOME%/lib/* +set ES_CLASSPATH=!ES_HOME!/lib/elasticsearch-${project.version}.jar;!ES_HOME!/lib/* ) else ( ECHO Error: Don't modify the classpath with ES_CLASSPATH, Best is to add 1>&2 ECHO additional elements via the plugin mechanism, or if code must really be 1>&2 diff --git a/distribution/src/main/resources/bin/service.bat b/distribution/src/main/resources/bin/service.bat index 5b5fbff7522..f423bb9740f 100644 --- a/distribution/src/main/resources/bin/service.bat +++ b/distribution/src/main/resources/bin/service.bat @@ -1,5 +1,5 @@ @echo off -SETLOCAL +SETLOCAL enabledelayedexpansion TITLE Elasticsearch Service ${project.version} From 66d8b5342fd41ea8b04888de6205c909397887c8 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Sun, 20 Dec 2015 20:18:46 +0100 Subject: [PATCH 130/347] s/PipelineStoreBootstrapper/IngestBootstrapper --- ...Bootstrapper.java => IngestBootstrapper.java} | 16 ++++++++++------ .../plugin/ingest/IngestModule.java | 2 +- .../plugin/ingest/IngestPlugin.java | 2 +- .../ingest/transport/IngestActionFilter.java | 4 ++-- .../delete/DeletePipelineTransportAction.java | 4 ++-- .../get/GetPipelineTransportAction.java | 7 ++----- .../put/PutPipelineTransportAction.java | 10 ++-------- .../SimulatePipelineTransportAction.java | 5 ++--- ...erTests.java => IngestBootstrapperTests.java} | 8 ++++---- .../transport/IngestActionFilterTests.java | 6 +++--- 10 files changed, 29 insertions(+), 35 deletions(-) rename plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/{PipelineStoreBootstrapper.java => IngestBootstrapper.java} (86%) rename plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/{PipelineBootstrapperTests.java => IngestBootstrapperTests.java} (97%) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStoreBootstrapper.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java similarity index 86% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStoreBootstrapper.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java index 4432fffcf2d..2a836d1ef2e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStoreBootstrapper.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java @@ -38,7 +38,11 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Map; -public class PipelineStoreBootstrapper extends AbstractLifecycleComponent implements ClusterStateListener { +/** + * Instantiates and wires all the services that the ingest plugin will be needing. + * Also the bootstrapper is in charge of starting and stopping the ingest plugin based on the cluster state. + */ +public class IngestBootstrapper extends AbstractLifecycleComponent implements ClusterStateListener { private final ThreadPool threadPool; private final Environment environment; @@ -47,9 +51,9 @@ public class PipelineStoreBootstrapper extends AbstractLifecycleComponent implem private final Map processorFactoryProvider; @Inject - public PipelineStoreBootstrapper(Settings settings, ThreadPool threadPool, Environment environment, - ClusterService clusterService, TransportService transportService, - Map processorFactoryProvider) { + public IngestBootstrapper(Settings settings, ThreadPool threadPool, Environment environment, + ClusterService clusterService, TransportService transportService, + Map processorFactoryProvider) { super(settings); this.threadPool = threadPool; this.environment = environment; @@ -61,8 +65,8 @@ public class PipelineStoreBootstrapper extends AbstractLifecycleComponent implem } // for testing: - PipelineStoreBootstrapper(Settings settings, ThreadPool threadPool, ClusterService clusterService, - PipelineStore pipelineStore, PipelineExecutionService pipelineExecutionService) { + IngestBootstrapper(Settings settings, ThreadPool threadPool, ClusterService clusterService, + PipelineStore pipelineStore, PipelineExecutionService pipelineExecutionService) { super(settings); this.threadPool = threadPool; this.environment = null; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index fa013024db3..fec0f3877ee 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -46,7 +46,7 @@ public class IngestModule extends AbstractModule { @Override protected void configure() { binder().bind(IngestRestFilter.class).asEagerSingleton(); - binder().bind(PipelineStoreBootstrapper.class).asEagerSingleton(); + binder().bind(IngestBootstrapper.class).asEagerSingleton(); addProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index d9772eaefc7..2db3c9ec69d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -87,7 +87,7 @@ public class IngestPlugin extends Plugin { if (transportClient) { return Collections.emptyList(); } else { - return Collections.singletonList(PipelineStoreBootstrapper.class); + return Collections.singletonList(IngestBootstrapper.class); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index 15cf21b17fb..62249552367 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -33,7 +33,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineExecutionService; -import org.elasticsearch.plugin.ingest.PipelineStoreBootstrapper; +import org.elasticsearch.plugin.ingest.IngestBootstrapper; import java.util.*; @@ -42,7 +42,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio private final PipelineExecutionService executionService; @Inject - public IngestActionFilter(Settings settings, PipelineStoreBootstrapper bootstrapper) { + public IngestActionFilter(Settings settings, IngestBootstrapper bootstrapper) { super(settings); this.executionService = bootstrapper.getPipelineExecutionService(); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java index ce8615a6bfb..b5e899ef519 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java @@ -27,7 +27,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugin.ingest.PipelineStore; -import org.elasticsearch.plugin.ingest.PipelineStoreBootstrapper; +import org.elasticsearch.plugin.ingest.IngestBootstrapper; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -36,7 +36,7 @@ public class DeletePipelineTransportAction extends HandledTransportAction { private final PipelineStore pipelineStore; @Inject - public GetPipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PipelineStoreBootstrapper bootstrapper) { + public GetPipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, IngestBootstrapper bootstrapper) { super(settings, GetPipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, GetPipelineRequest::new); this.pipelineStore = bootstrapper.getPipelineStore(); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java index 7432e4c76e3..646244f95d5 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java @@ -20,29 +20,23 @@ package org.elasticsearch.plugin.ingest.transport.put; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.plugin.ingest.PipelineStore; -import org.elasticsearch.plugin.ingest.PipelineStoreBootstrapper; +import org.elasticsearch.plugin.ingest.IngestBootstrapper; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.io.IOException; -import java.util.Map; - public class PutPipelineTransportAction extends HandledTransportAction { private final PipelineStore pipelineStore; @Inject - public PutPipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PipelineStoreBootstrapper bootstrapper) { + public PutPipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, IngestBootstrapper bootstrapper) { super(settings, PutPipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, PutPipelineRequest::new); this.pipelineStore = bootstrapper.getPipelineStore(); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java index 4d6fa5f375a..682a03f21e7 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java @@ -27,11 +27,10 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.plugin.ingest.PipelineStore; -import org.elasticsearch.plugin.ingest.PipelineStoreBootstrapper; +import org.elasticsearch.plugin.ingest.IngestBootstrapper; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.io.IOException; import java.util.Map; public class SimulatePipelineTransportAction extends HandledTransportAction { @@ -40,7 +39,7 @@ public class SimulatePipelineTransportAction extends HandledTransportAction Date: Tue, 22 Dec 2015 11:22:47 +0100 Subject: [PATCH 131/347] adapt to upstream changes Removed wildcard imports and fixed two broken license headers --- .../elasticsearch/ingest/IngestDocument.java | 8 ++++++- .../org/elasticsearch/ingest/Pipeline.java | 7 ++++-- .../org/elasticsearch/ingest/ValueSource.java | 6 ++++- .../processor/AbstractStringProcessor.java | 3 --- .../ingest/processor/Processor.java | 3 --- .../processor/convert/ConvertProcessor.java | 5 ++++- .../ingest/processor/date/DateProcessor.java | 6 ++++- .../processor/geoip/GeoIpProcessor.java | 21 +++++++++++++++--- .../ingest/processor/grok/Grok.java | 16 ++++++++------ .../ingest/processor/grok/GrokMatchGroup.java | 5 ----- .../ingest/processor/join/JoinProcessor.java | 1 - .../lowercase/LowercaseProcessor.java | 1 - .../processor/rename/RenameProcessor.java | 2 -- .../processor/split/SplitProcessor.java | 1 - .../ingest/processor/trim/TrimProcessor.java | 2 -- .../uppercase/UppercaseProcessor.java | 1 - .../plugin/ingest/IngestModule.java | 2 +- .../ingest/InternalTemplateService.java | 6 ++++- .../plugin/ingest/PipelineDefinition.java | 1 - .../plugin/ingest/PipelineStore.java | 6 ++++- .../plugin/ingest/rest/IngestRestFilter.java | 8 +++++-- .../ingest/transport/IngestActionFilter.java | 8 +++++-- .../delete/DeletePipelineTransportAction.java | 2 +- .../transport/get/GetPipelineResponse.java | 5 ----- .../get/GetPipelineTransportAction.java | 2 +- .../put/PutPipelineTransportAction.java | 2 +- .../reload/ReloadPipelinesAction.java | 8 ++++++- .../SimulatePipelineTransportAction.java | 2 +- .../elasticsearch/ingest/IngestClientIT.java | 5 ++++- .../ingest/IngestDocumentTests.java | 13 +++++++++-- .../ingest/RandomDocumentPicks.java | 7 +++++- .../ingest/ValueSourceTests.java | 10 +++++++-- .../AbstractStringProcessorTestCase.java | 2 -- .../processor/ConfigurationUtilsTests.java | 8 +++++-- .../convert/ConvertProcessorFactoryTests.java | 1 - .../convert/ConvertProcessorTests.java | 7 +++++- .../date/DateProcessorFactoryTests.java | 22 ++++++++++++------- .../processor/date/DateProcessorTests.java | 21 +++++++++++------- .../geoip/GeoIpProcessorFactoryTests.java | 10 +++++++-- .../processor/grok/GrokProcessorTests.java | 5 +++-- .../ingest/processor/grok/GrokTests.java | 7 +++--- .../gsub/GsubProcessorFactoryTests.java | 2 -- .../processor/gsub/GsubProcessorTests.java | 2 -- .../processor/join/JoinProcessorTests.java | 5 ++++- .../LowercaseProcessorFactoryTests.java | 2 -- .../lowercase/LowercaseProcessorTests.java | 1 - .../rename/RenameProcessorFactoryTests.java | 1 - .../rename/RenameProcessorTests.java | 6 ++++- .../processor/set/SetProcessorTests.java | 8 +++++-- .../split/SplitProcessorFactoryTests.java | 1 - .../processor/split/SplitProcessorTests.java | 5 ++++- .../trim/TrimProcessorFactoryTests.java | 2 -- .../UppercaseProcessorFactoryTests.java | 2 -- .../uppercase/UppercaseProcessorTests.java | 1 - .../ingest/IngestBootstrapperTests.java | 15 +++++++++++-- .../ingest/PipelineExecutionServiceTests.java | 14 ++++++++++-- .../plugin/ingest/PipelineStoreTests.java | 1 - .../transport/IngestActionFilterTests.java | 15 +++++++++---- .../reload/ReloadPipelinesActionTests.java | 2 +- .../SimulateExecutionServiceTests.java | 12 ++++++++-- .../SimulatePipelineRequestParsingTests.java | 11 ++++++++-- .../SimulatePipelineResponseTests.java | 4 +++- 62 files changed, 248 insertions(+), 122 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index a14a2647fd4..d721f00d284 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -23,7 +23,13 @@ import org.elasticsearch.common.Strings; import java.text.DateFormat; import java.text.SimpleDateFormat; -import java.util.*; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.TimeZone; /** * Represents a single document being captured before indexing and holds the source and metadata (like id, type and index). diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index 4fb26bc7ab0..7c92e4bbd53 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -23,8 +23,11 @@ package org.elasticsearch.ingest; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; /** * A pipeline is a list of {@link Processor} instances grouped under a unique id. diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/ValueSource.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/ValueSource.java index 525bb722d60..45f03d01130 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/ValueSource.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/ValueSource.java @@ -19,7 +19,11 @@ package org.elasticsearch.ingest; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; /** * Holds a value. If the value is requested a copy is made and optionally template snippets are resolved too. diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java index 475ace15552..2769a1dd419 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java @@ -21,9 +21,6 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; -import java.util.Collection; -import java.util.Collections; -import java.util.List; import java.util.Map; /** diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java index d9c788ba21e..36bcf9689a6 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java @@ -22,9 +22,6 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; -import java.io.Closeable; -import java.io.IOException; -import java.nio.file.Path; import java.util.Map; /** diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java index 0944305c8b6..4ef2663101e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java @@ -23,7 +23,10 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.util.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; /** * Processor that converts fields content to a different type. Supported types are: integer, float, boolean and string. diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java index 35fcc044f42..0f2be638710 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java @@ -26,7 +26,11 @@ import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; -import java.util.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; import java.util.function.Function; public final class DateProcessor implements Processor { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java index 50ac93ac491..30ca9541a4d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java @@ -23,7 +23,11 @@ import com.maxmind.geoip2.DatabaseReader; import com.maxmind.geoip2.exception.AddressNotFoundException; import com.maxmind.geoip2.model.CityResponse; import com.maxmind.geoip2.model.CountryResponse; -import com.maxmind.geoip2.record.*; +import com.maxmind.geoip2.record.City; +import com.maxmind.geoip2.record.Continent; +import com.maxmind.geoip2.record.Country; +import com.maxmind.geoip2.record.Location; +import com.maxmind.geoip2.record.Subdivision; import org.apache.lucene.util.IOUtils; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.network.NetworkAddress; @@ -35,10 +39,21 @@ import java.io.IOException; import java.io.InputStream; import java.net.InetAddress; import java.net.UnknownHostException; -import java.nio.file.*; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.PathMatcher; +import java.nio.file.StandardOpenOption; import java.security.AccessController; import java.security.PrivilegedAction; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import java.util.stream.Stream; import static org.elasticsearch.ingest.processor.ConfigurationUtils.readOptionalList; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java index 94e5d22fc98..bfad4e7e319 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java @@ -19,19 +19,21 @@ package org.elasticsearch.ingest.processor.grok; -import java.lang.Object; -import java.lang.String; -import java.lang.StringIndexOutOfBoundsException; +import org.jcodings.specific.UTF8Encoding; +import org.joni.Matcher; +import org.joni.NameEntry; +import org.joni.Option; +import org.joni.Regex; +import org.joni.Region; +import org.joni.Syntax; +import org.joni.exception.ValueException; + import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Iterator; import java.util.Locale; import java.util.Map; -import org.jcodings.specific.UTF8Encoding; -import org.joni.*; -import org.joni.exception.ValueException; - final class Grok { private static final String NAME_GROUP = "name"; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java index 86ddd6974fe..f44d758d7b8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java @@ -19,11 +19,6 @@ package org.elasticsearch.ingest.processor.grok; -import java.lang.Float; -import java.lang.Integer; -import java.lang.Object; -import java.lang.String; - final class GrokMatchGroup { private static final String DEFAULT_TYPE = "string"; private final String patternName; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java index 85be2316777..542c536786b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.stream.Collectors; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java index 6bff6223053..e1d6cf222ad 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest.processor.lowercase; import org.elasticsearch.ingest.processor.AbstractStringProcessor; -import java.util.Collection; import java.util.Locale; /** diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java index 7e894e53893..bc340db8275 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java @@ -23,8 +23,6 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; -import java.util.Arrays; -import java.util.Collections; import java.util.Map; /** diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java index 1895fc10d37..a95c70cb66f 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java @@ -24,7 +24,6 @@ import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; import java.util.Arrays; -import java.util.Collections; import java.util.Map; /** diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java index 94b617ba41e..8ed3afefe88 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java @@ -21,8 +21,6 @@ package org.elasticsearch.ingest.processor.trim; import org.elasticsearch.ingest.processor.AbstractStringProcessor; -import java.util.Collection; - /** * Processor that trims the content of string fields. * Throws exception is the field is not of type string. diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java index fe0d029cf3c..a599f6cf32a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest.processor.uppercase; import org.elasticsearch.ingest.processor.AbstractStringProcessor; -import java.util.Collection; import java.util.Locale; /** diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index fec0f3877ee..53bfea9e00d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -21,7 +21,6 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; -import org.elasticsearch.ingest.processor.set.SetProcessor; import org.elasticsearch.ingest.processor.convert.ConvertProcessor; import org.elasticsearch.ingest.processor.date.DateProcessor; import org.elasticsearch.ingest.processor.geoip.GeoIpProcessor; @@ -31,6 +30,7 @@ import org.elasticsearch.ingest.processor.join.JoinProcessor; import org.elasticsearch.ingest.processor.lowercase.LowercaseProcessor; import org.elasticsearch.ingest.processor.remove.RemoveProcessor; import org.elasticsearch.ingest.processor.rename.RenameProcessor; +import org.elasticsearch.ingest.processor.set.SetProcessor; import org.elasticsearch.ingest.processor.split.SplitProcessor; import org.elasticsearch.ingest.processor.trim.TrimProcessor; import org.elasticsearch.ingest.processor.uppercase.UppercaseProcessor; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java index cec07f40459..58bcfc0a269 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java @@ -21,7 +21,11 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.ingest.TemplateService; -import org.elasticsearch.script.*; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptService; import java.util.Collections; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineDefinition.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineDefinition.java index d78274ce7f6..f8e94463327 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineDefinition.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineDefinition.java @@ -22,7 +22,6 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index bb6007170cb..ba59d8af314 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -54,7 +54,11 @@ import org.elasticsearch.transport.TransportService; import java.io.Closeable; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; public class PipelineStore extends AbstractComponent implements Closeable { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java index fe7af03b0ae..0c548888ab2 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java @@ -20,9 +20,13 @@ package org.elasticsearch.plugin.ingest.rest; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestFilter; +import org.elasticsearch.rest.RestFilterChain; +import org.elasticsearch.rest.RestRequest; -import static org.elasticsearch.plugin.ingest.IngestPlugin.*; +import static org.elasticsearch.plugin.ingest.IngestPlugin.PIPELINE_ID_PARAM; import static org.elasticsearch.plugin.ingest.IngestPlugin.PIPELINE_ID_PARAM_CONTEXT_KEY; public class IngestRestFilter extends RestFilter { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index 62249552367..b91dd34a206 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -31,11 +31,15 @@ import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugin.ingest.IngestBootstrapper; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineExecutionService; -import org.elasticsearch.plugin.ingest.IngestBootstrapper; -import java.util.*; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Set; public final class IngestActionFilter extends AbstractComponent implements ActionFilter { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java index b5e899ef519..8a472d9a527 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java @@ -26,8 +26,8 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.plugin.ingest.IngestBootstrapper; +import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java index 8d7bf9cc258..3508f6c0c55 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java @@ -20,21 +20,16 @@ package org.elasticsearch.plugin.ingest.transport.get; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.StatusToXContent; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.plugin.ingest.PipelineDefinition; import org.elasticsearch.rest.RestStatus; import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; public class GetPipelineResponse extends ActionResponse implements StatusToXContent { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java index 47606149e01..e3b00697b16 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java @@ -25,9 +25,9 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugin.ingest.IngestBootstrapper; import org.elasticsearch.plugin.ingest.PipelineDefinition; import org.elasticsearch.plugin.ingest.PipelineStore; -import org.elasticsearch.plugin.ingest.IngestBootstrapper; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java index 646244f95d5..9de4107c7db 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java @@ -26,8 +26,8 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.plugin.ingest.IngestBootstrapper; +import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesAction.java index acd54240b27..8a9a15c221c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesAction.java @@ -25,7 +25,13 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportService; import java.util.ArrayList; import java.util.List; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java index 682a03f21e7..dcff1e0e7f7 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java @@ -26,8 +26,8 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.plugin.ingest.IngestBootstrapper; +import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 1ab97961903..3239298143b 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -51,7 +51,10 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; public class IngestClientIT extends ESIntegTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index 011d47c67ed..a0067dc9659 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -22,9 +22,18 @@ package org.elasticsearch.ingest; import org.elasticsearch.test.ESTestCase; import org.junit.Before; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class IngestDocumentTests extends ESTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java index 73a5395fd63..5699cab7cfa 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java @@ -24,7 +24,12 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.elasticsearch.common.Strings; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.TreeMap; public final class RandomDocumentPicks { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java index 1c3f7dc3120..f21f1f2ad44 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java @@ -21,9 +21,15 @@ package org.elasticsearch.ingest; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; public class ValueSourceTests extends ESTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java index 6bb2f9dd0dc..0d5f21ff712 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java @@ -23,10 +23,8 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.Map; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java index 38c4e2493a2..b661a598edf 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java @@ -22,9 +22,13 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.test.ESTestCase; import org.junit.Before; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; public class ConfigurationUtilsTests extends ESTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java index 369e4d461de..f1b6fce6ced 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.ingest.processor.convert; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; -import java.util.Collections; import java.util.HashMap; import java.util.Map; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java index fe560656ce5..062f72f64bc 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java @@ -24,7 +24,12 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; import static org.elasticsearch.ingest.processor.convert.ConvertProcessor.Type; import static org.hamcrest.Matchers.containsString; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java index 62f726cf0cd..08b546dead4 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java @@ -1,11 +1,11 @@ /* - * Licensed to ElasticSearch and Shay Banon under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. ElasticSearch licenses this - * file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * @@ -22,7 +22,13 @@ package org.elasticsearch.ingest.processor.date; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTimeZone; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java index 86e4017d57c..43724480004 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java @@ -1,11 +1,11 @@ /* - * Licensed to ElasticSearch and Shay Banon under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. ElasticSearch licenses this - * file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * @@ -25,7 +25,12 @@ import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java index d42f87d8048..9716bcbd3c0 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java @@ -26,11 +26,17 @@ import org.junit.Before; import java.io.ByteArrayInputStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.sameInstance; -import static org.hamcrest.Matchers.startsWith; public class GeoIpProcessorFactoryTests extends ESTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java index cb20c5887cd..14164523726 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java @@ -23,9 +23,10 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; public class GrokProcessorTests extends ESTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java index b73a8e0030b..c253f7dfd84 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java @@ -24,9 +24,10 @@ import org.junit.Before; import java.io.IOException; import java.io.InputStream; -import java.lang.Object; -import java.lang.String; -import java.util.*; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java index e1e085d135f..bcf00244809 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java @@ -21,9 +21,7 @@ package org.elasticsearch.ingest.processor.gsub; import org.elasticsearch.test.ESTestCase; -import java.util.ArrayList; import java.util.HashMap; -import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java index 8eb5be790ea..f64d271539f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java @@ -24,10 +24,8 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; -import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.regex.Pattern; import static org.hamcrest.Matchers.containsString; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java index df6c835b3c9..6a8acc745ff 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java @@ -24,7 +24,10 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java index 34864e38eea..4ca0f1b8f04 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java @@ -21,9 +21,7 @@ package org.elasticsearch.ingest.processor.lowercase; import org.elasticsearch.test.ESTestCase; -import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java index 6e85b338b1a..8608bb769a0 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.ingest.processor.lowercase; import org.elasticsearch.ingest.processor.AbstractStringProcessor; import org.elasticsearch.ingest.processor.AbstractStringProcessorTestCase; -import java.util.Collection; import java.util.Locale; public class LowercaseProcessorTests extends AbstractStringProcessorTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java index eba08ad6c46..62c7fb7c9bd 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest.processor.rename; import org.elasticsearch.test.ESTestCase; -import java.util.Collections; import java.util.HashMap; import java.util.Map; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java index 3968a2ec73f..8dc40360390 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java @@ -24,7 +24,11 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java index 9d772602f78..dabdeb54f94 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java @@ -19,12 +19,16 @@ package org.elasticsearch.ingest.processor.set; -import org.elasticsearch.ingest.*; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.ValueSource; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; -import java.util.*; +import java.util.HashMap; import static org.hamcrest.Matchers.equalTo; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java index 4d6634b8568..5f144593ee3 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest.processor.split; import org.elasticsearch.test.ESTestCase; -import java.util.Collections; import java.util.HashMap; import java.util.Map; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java index 594ba3b4590..945faaf9fb4 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java @@ -24,7 +24,10 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java index 169ebda0064..d955cec0d0f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java @@ -21,9 +21,7 @@ package org.elasticsearch.ingest.processor.trim; import org.elasticsearch.test.ESTestCase; -import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java index a8e048bdcf2..4dd50e2c1e6 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java @@ -21,9 +21,7 @@ package org.elasticsearch.ingest.processor.uppercase; import org.elasticsearch.test.ESTestCase; -import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java index 00e4d1826ca..8327dc9f831 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.ingest.processor.uppercase; import org.elasticsearch.ingest.processor.AbstractStringProcessor; import org.elasticsearch.ingest.processor.AbstractStringProcessorTestCase; -import java.util.Collection; import java.util.Locale; public class UppercaseProcessorTests extends AbstractStringProcessorTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java index d726d1b294a..be0251726d0 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java @@ -28,7 +28,12 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; @@ -49,7 +54,13 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.core.Is.is; import static org.mockito.Matchers.any; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.anyString; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class IngestBootstrapperTests extends ESTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index a1e3191e16c..a1ffa9586d1 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -35,11 +35,21 @@ import org.mockito.ArgumentMatcher; import org.mockito.Matchers; import org.mockito.invocation.InvocationOnMock; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Matchers.any; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.anyString; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class PipelineExecutionServiceTests extends ESTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java index 90972ac41c2..84ee9ad0ee9 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; -import org.elasticsearch.env.Environment; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHits; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index cc32634c9a0..e55393edd31 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -33,10 +33,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.plugin.ingest.IngestBootstrapper; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineExecutionService; import org.elasticsearch.plugin.ingest.PipelineStore; -import org.elasticsearch.plugin.ingest.IngestBootstrapper; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; @@ -47,11 +47,18 @@ import java.util.Collections; import java.util.HashSet; import java.util.Set; -import static org.elasticsearch.plugin.ingest.transport.IngestActionFilter.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.plugin.ingest.transport.IngestActionFilter.BulkRequestModifier; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; public class IngestActionFilterTests extends ESTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java index 32467cfe555..d6f2d3f3d9f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java @@ -38,11 +38,11 @@ import java.util.Collections; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; -import static org.hamcrest.CoreMatchers.is; import static org.mockito.Mockito.when; public class ReloadPipelinesActionTests extends ESTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java index 973e8c1d8e1..a60b8945f10 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java @@ -31,8 +31,16 @@ import org.junit.Before; import java.util.Arrays; -import static org.hamcrest.Matchers.*; -import static org.mockito.Mockito.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class SimulateExecutionServiceTests extends ESTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java index 9484a621d58..4bff810a9c2 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java @@ -27,9 +27,16 @@ import org.elasticsearch.test.ESTestCase; import org.junit.Before; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; -import static org.elasticsearch.ingest.IngestDocument.MetaData.*; +import static org.elasticsearch.ingest.IngestDocument.MetaData.ID; +import static org.elasticsearch.ingest.IngestDocument.MetaData.INDEX; +import static org.elasticsearch.ingest.IngestDocument.MetaData.TYPE; import static org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequest.Fields; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java index 1b3b35e2154..47dd12dc75c 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java @@ -30,7 +30,9 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import static org.hamcrest.CoreMatchers.*; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.nullValue; public class SimulatePipelineResponseTests extends ESTestCase { From 3fcb8ecd54f89945ff66ed1262533aaf8fd80359 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 22 Dec 2015 11:25:32 +0100 Subject: [PATCH 132/347] Upgraded geoip2 and its dependencies --- plugins/ingest/build.gradle | 7 ++++--- plugins/ingest/licenses/geoip2-2.3.1.jar.sha1 | 1 - plugins/ingest/licenses/geoip2-2.4.0.jar.sha1 | 1 + plugins/ingest/licenses/maxmind-db-1.0.0.jar.sha1 | 1 - plugins/ingest/licenses/maxmind-db-1.0.1.jar.sha1 | 1 + .../ingest/src/main/plugin-metadata/plugin-security.policy | 2 -- 6 files changed, 6 insertions(+), 7 deletions(-) delete mode 100644 plugins/ingest/licenses/geoip2-2.3.1.jar.sha1 create mode 100644 plugins/ingest/licenses/geoip2-2.4.0.jar.sha1 delete mode 100644 plugins/ingest/licenses/maxmind-db-1.0.0.jar.sha1 create mode 100644 plugins/ingest/licenses/maxmind-db-1.0.1.jar.sha1 diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index 383ad6c88fa..2e5aede319f 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -27,11 +27,11 @@ dependencies { // joni dependencies: compile 'org.jruby.jcodings:jcodings:1.0.12' - compile ('com.maxmind.geoip2:geoip2:2.3.1') + compile ('com.maxmind.geoip2:geoip2:2.4.0') // geoip2 dependencies: compile('com.fasterxml.jackson.core:jackson-annotations:2.5.0') compile('com.fasterxml.jackson.core:jackson-databind:2.5.3') - compile('com.maxmind.db:maxmind-db:1.0.0') + compile('com.maxmind.db:maxmind-db:1.0.1') compile 'joda-time:joda-time:2.8.2' testCompile 'org.elasticsearch:geolite2-databases:20151029' @@ -63,5 +63,6 @@ bundlePlugin { } } -//geoip depends on asm and google http client which we don't need +//geoip WebServiceClient needs Google http client, but we're not using WebServiceClient and +// joni has AsmCompilerSupport, but that isn't being used: thirdPartyAudit.missingClasses = true diff --git a/plugins/ingest/licenses/geoip2-2.3.1.jar.sha1 b/plugins/ingest/licenses/geoip2-2.3.1.jar.sha1 deleted file mode 100644 index cb1982ffef5..00000000000 --- a/plugins/ingest/licenses/geoip2-2.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0b128448f5bcfafb6caa82ae079ab39aa56dafb4 diff --git a/plugins/ingest/licenses/geoip2-2.4.0.jar.sha1 b/plugins/ingest/licenses/geoip2-2.4.0.jar.sha1 new file mode 100644 index 00000000000..485286f06a4 --- /dev/null +++ b/plugins/ingest/licenses/geoip2-2.4.0.jar.sha1 @@ -0,0 +1 @@ +ad40667ae87138e0aed075d2c15884497fa64acc \ No newline at end of file diff --git a/plugins/ingest/licenses/maxmind-db-1.0.0.jar.sha1 b/plugins/ingest/licenses/maxmind-db-1.0.0.jar.sha1 deleted file mode 100644 index 4437d02a227..00000000000 --- a/plugins/ingest/licenses/maxmind-db-1.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b140295a52005aaf224b6c711ad4ecb38b1da155 diff --git a/plugins/ingest/licenses/maxmind-db-1.0.1.jar.sha1 b/plugins/ingest/licenses/maxmind-db-1.0.1.jar.sha1 new file mode 100644 index 00000000000..6cb749e35ae --- /dev/null +++ b/plugins/ingest/licenses/maxmind-db-1.0.1.jar.sha1 @@ -0,0 +1 @@ +305429b84dbcd1cc3d393686f412cdcaec9cdbe6 \ No newline at end of file diff --git a/plugins/ingest/src/main/plugin-metadata/plugin-security.policy b/plugins/ingest/src/main/plugin-metadata/plugin-security.policy index dadb2acd640..f49d15d5521 100644 --- a/plugins/ingest/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/ingest/src/main/plugin-metadata/plugin-security.policy @@ -18,8 +18,6 @@ */ grant { - // needed because geoip2 is using reflection to deserialize data into its own domain classes - permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; // needed because jackson-databind is using Class#getDeclaredConstructors(), Class#getDeclaredMethods() and // Class#getDeclaredAnnotations() to find all public, private, protected, package protected and // private constructors, methods or annotations. Just locating all public constructors, methods and annotations From 7e99ee4edf51d92f1f1e2126e21487d5f4b41ead Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 22 Dec 2015 09:52:12 +0100 Subject: [PATCH 133/347] ExecutionService should be able to process multiple index requests at a time. --- .../ingest/PipelineExecutionService.java | 97 ++++++++---- .../ingest/transport/IngestActionFilter.java | 51 ++----- .../elasticsearch/ingest/IngestClientIT.java | 51 ++++--- .../ingest/PipelineExecutionServiceTests.java | 138 +++++++++++++----- .../transport/BulkRequestModifierTests.java | 101 +++++++++++++ .../transport/IngestActionFilterTests.java | 126 ++-------------- 6 files changed, 325 insertions(+), 239 deletions(-) create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/BulkRequestModifierTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index 3e7f405ab51..61162563f5c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -19,7 +19,7 @@ package org.elasticsearch.plugin.ingest; -import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -28,6 +28,7 @@ import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.threadpool.ThreadPool; import java.util.Map; +import java.util.function.Consumer; public class PipelineExecutionService { @@ -41,43 +42,75 @@ public class PipelineExecutionService { this.threadPool = threadPool; } - public void execute(IndexRequest indexRequest, String pipelineId, ActionListener listener) { - Pipeline pipeline = store.get(pipelineId); - if (pipeline == null) { - listener.onFailure(new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist")); - return; - } - + public void execute(IndexRequest request, String pipelineId, Consumer failureHandler, Consumer completionHandler) { + Pipeline pipeline = getPipeline(pipelineId); threadPool.executor(THREAD_POOL_NAME).execute(() -> { - String index = indexRequest.index(); - String type = indexRequest.type(); - String id = indexRequest.id(); - String routing = indexRequest.routing(); - String parent = indexRequest.parent(); - String timestamp = indexRequest.timestamp(); - String ttl = indexRequest.ttl() == null ? null : indexRequest.ttl().toString(); - Map sourceAsMap = indexRequest.sourceAsMap(); - IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, parent, timestamp, ttl, sourceAsMap); try { - pipeline.execute(ingestDocument); - Map metadataMap = ingestDocument.extractMetadata(); - //it's fine to set all metadata fields all the time, as ingest document holds their starting values - //before ingestion, which might also get modified during ingestion. - indexRequest.index(metadataMap.get(IngestDocument.MetaData.INDEX)); - indexRequest.type(metadataMap.get(IngestDocument.MetaData.TYPE)); - indexRequest.id(metadataMap.get(IngestDocument.MetaData.ID)); - indexRequest.routing(metadataMap.get(IngestDocument.MetaData.ROUTING)); - indexRequest.parent(metadataMap.get(IngestDocument.MetaData.PARENT)); - indexRequest.timestamp(metadataMap.get(IngestDocument.MetaData.TIMESTAMP)); - indexRequest.ttl(metadataMap.get(IngestDocument.MetaData.TTL)); - indexRequest.source(ingestDocument.getSourceAndMetadata()); - listener.onResponse(null); - } catch (Throwable e) { - listener.onFailure(e); + innerExecute(request, pipeline); + completionHandler.accept(true); + } catch (Exception e) { + failureHandler.accept(e); } }); } + public void execute(Iterable indexRequests, String pipelineId, + Consumer itemFailureHandler, Consumer completionHandler) { + Pipeline pipeline = getPipeline(pipelineId); + threadPool.executor(THREAD_POOL_NAME).execute(() -> { + Throwable lastThrowable = null; + for (ActionRequest actionRequest : indexRequests) { + if ((actionRequest instanceof IndexRequest) == false) { + continue; + } + + IndexRequest indexRequest = (IndexRequest) actionRequest; + try { + innerExecute(indexRequest, pipeline); + } catch (Throwable e) { + lastThrowable = e; + if (itemFailureHandler != null) { + itemFailureHandler.accept(e); + } + } + } + completionHandler.accept(lastThrowable == null); + }); + } + + private void innerExecute(IndexRequest indexRequest, Pipeline pipeline) throws Exception { + String index = indexRequest.index(); + String type = indexRequest.type(); + String id = indexRequest.id(); + String routing = indexRequest.routing(); + String parent = indexRequest.parent(); + String timestamp = indexRequest.timestamp(); + String ttl = indexRequest.ttl() == null ? null : indexRequest.ttl().toString(); + Map sourceAsMap = indexRequest.sourceAsMap(); + IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, parent, timestamp, ttl, sourceAsMap); + pipeline.execute(ingestDocument); + + Map metadataMap = ingestDocument.extractMetadata(); + //it's fine to set all metadata fields all the time, as ingest document holds their starting values + //before ingestion, which might also get modified during ingestion. + indexRequest.index(metadataMap.get(IngestDocument.MetaData.INDEX)); + indexRequest.type(metadataMap.get(IngestDocument.MetaData.TYPE)); + indexRequest.id(metadataMap.get(IngestDocument.MetaData.ID)); + indexRequest.routing(metadataMap.get(IngestDocument.MetaData.ROUTING)); + indexRequest.parent(metadataMap.get(IngestDocument.MetaData.PARENT)); + indexRequest.timestamp(metadataMap.get(IngestDocument.MetaData.TIMESTAMP)); + indexRequest.ttl(metadataMap.get(IngestDocument.MetaData.TTL)); + indexRequest.source(ingestDocument.getSourceAndMetadata()); + } + + private Pipeline getPipeline(String pipelineId) { + Pipeline pipeline = store.get(pipelineId); + if (pipeline == null) { + throw new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist"); + } + return pipeline; + } + public static Settings additionalSettings(Settings nodeSettings) { Settings settings = nodeSettings.getAsSettings("threadpool." + THREAD_POOL_NAME); if (!settings.names().isEmpty()) { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index b91dd34a206..99117697358 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -68,8 +68,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio BulkRequest bulkRequest = (BulkRequest) request; @SuppressWarnings("unchecked") ActionListener actionListener = (ActionListener) listener; - BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(bulkRequest); - processBulkIndexRequest(bulkRequestModifier, pipelineId, action, chain, actionListener); + processBulkIndexRequest(bulkRequest, pipelineId, action, chain, actionListener); } else { chain.proceed(action, request, listener); } @@ -88,23 +87,21 @@ public final class IngestActionFilter extends AbstractComponent implements Actio chain.proceed(action, indexRequest, listener); return; } - executionService.execute(indexRequest, pipelineId, new ActionListener() { - @Override - public void onResponse(Void aVoid) { - indexRequest.putHeader(IngestPlugin.PIPELINE_ALREADY_PROCESSED, true); - chain.proceed(action, indexRequest, listener); - } - - @Override - public void onFailure(Throwable e) { - logger.error("failed to execute pipeline [{}]", e, pipelineId); - listener.onFailure(e); - } + executionService.execute(indexRequest, pipelineId, t -> { + logger.error("failed to execute pipeline [{}]", t, pipelineId); + listener.onFailure(t); + }, success -> { + indexRequest.putHeader(IngestPlugin.PIPELINE_ALREADY_PROCESSED, true); + chain.proceed(action, indexRequest, listener); }); } - void processBulkIndexRequest(BulkRequestModifier bulkRequestModifier, String pipelineId, String action, ActionFilterChain chain, ActionListener listener) { - if (!bulkRequestModifier.hasNext()) { + void processBulkIndexRequest(BulkRequest original, String pipelineId, String action, ActionFilterChain chain, ActionListener listener) { + BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original); + executionService.execute(() -> bulkRequestModifier, pipelineId, e -> { + logger.debug("failed to execute pipeline [{}]", e, pipelineId); + bulkRequestModifier.markCurrentItemAsFailed(e); + }, (success) -> { BulkRequest bulkRequest = bulkRequestModifier.getBulkRequest(); ActionListener actionListener = bulkRequestModifier.wrapActionListenerIfNeeded(listener); if (bulkRequest.requests().isEmpty()) { @@ -115,28 +112,6 @@ public final class IngestActionFilter extends AbstractComponent implements Actio } else { chain.proceed(action, bulkRequest, actionListener); } - return; - } - - ActionRequest actionRequest = bulkRequestModifier.next(); - if (!(actionRequest instanceof IndexRequest)) { - processBulkIndexRequest(bulkRequestModifier, pipelineId, action, chain, listener); - return; - } - - IndexRequest indexRequest = (IndexRequest) actionRequest; - executionService.execute(indexRequest, pipelineId, new ActionListener() { - @Override - public void onResponse(Void aVoid) { - processBulkIndexRequest(bulkRequestModifier, pipelineId, action, chain, listener); - } - - @Override - public void onFailure(Throwable e) { - logger.debug("failed to execute pipeline [{}]", e, pipelineId); - bulkRequestModifier.markCurrentItemAsFailed(e); - processBulkIndexRequest(bulkRequestModifier, pipelineId, action, chain, listener); - } }); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 3239298143b..0c752abc87a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -44,10 +45,7 @@ import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRespon import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; +import java.util.*; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -146,39 +144,48 @@ public class IngestClientIT extends ESIntegTestCase { assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); } - public void testBulkWithIngestFailures() { + public void testBulkWithIngestFailures() throws Exception { createIndex("index"); + new PutPipelineRequestBuilder(client(), PutPipelineAction.INSTANCE) + .setId("_id") + .setSource(jsonBuilder().startObject() + .field("description", "my_pipeline") + .startArray("processors") + .startObject() + .startObject("join") + .field("field", "field1") + .field("separator", "|") + .endObject() + .endObject() + .endArray() + .endObject().bytes()) + .get(); + int numRequests = scaledRandomIntBetween(32, 128); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_none_existing_id"); + bulkRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); for (int i = 0; i < numRequests; i++) { + IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i)); if (i % 2 == 0) { - UpdateRequest updateRequest = new UpdateRequest("index", "type", Integer.toString(i)); - updateRequest.upsert("field", "value"); - updateRequest.doc(new HashMap()); - bulkRequest.add(updateRequest); + indexRequest.source("field1", Arrays.asList("value1", "value2")); } else { - IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i)); - indexRequest.source("field1", "value1"); - bulkRequest.add(indexRequest); + indexRequest.source("field2", Arrays.asList("value1", "value2")); } + bulkRequest.add(indexRequest); } BulkResponse response = client().bulk(bulkRequest).actionGet(); assertThat(response.getItems().length, equalTo(bulkRequest.requests().size())); for (int i = 0; i < bulkRequest.requests().size(); i++) { - ActionRequest request = bulkRequest.requests().get(i); BulkItemResponse itemResponse = response.getItems()[i]; - if (request instanceof IndexRequest) { - BulkItemResponse.Failure failure = itemResponse.getFailure(); - assertThat(failure.getMessage(), equalTo("java.lang.IllegalArgumentException: pipeline with id [_none_existing_id] does not exist")); - } else if (request instanceof UpdateRequest) { - UpdateResponse updateResponse = itemResponse.getResponse(); - assertThat(updateResponse.getId(), equalTo(Integer.toString(i))); - assertThat(updateResponse.isCreated(), is(true)); + if (i % 2 == 0) { + IndexResponse indexResponse = itemResponse.getResponse(); + assertThat(indexResponse.getId(), equalTo(Integer.toString(i))); + assertThat(indexResponse.isCreated(), is(true)); } else { - fail("unexpected request item [" + request + "]"); + BulkItemResponse.Failure failure = itemResponse.getFailure(); + assertThat(failure.getMessage(), equalTo("java.lang.IllegalArgumentException: field [field1] not present as part of path [field1]")); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index a1ffa9586d1..2bdcbd61471 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -20,8 +20,11 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; @@ -39,9 +42,11 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Objects; +import java.util.function.Consumer; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyBoolean; import static org.mockito.Mockito.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doThrow; @@ -67,11 +72,16 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testExecutePipelineDoesNotExist() { when(store.get("_id")).thenReturn(null); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener)mock(ActionListener.class); - executionService.execute(indexRequest, "_id", listener); - verify(listener).onFailure(any(IllegalArgumentException.class)); - verify(listener, times(0)).onResponse(any()); + Consumer failureHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); + try { + executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + fail("IllegalArgumentException expected"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("pipeline with id [_id] does not exist")); + } + verify(failureHandler, never()).accept(any(Throwable.class)); + verify(completionHandler, never()).accept(anyBoolean()); } public void testExecuteSuccess() throws Exception { @@ -79,13 +89,13 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener)mock(ActionListener.class); - executionService.execute(indexRequest, "_id", listener); + Consumer failureHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); + executionService.execute(indexRequest, "_id", failureHandler, completionHandler); //TODO we remove metadata, this check is not valid anymore, what do we replace it with? //verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(listener).onResponse(null); - verify(listener, times(0)).onFailure(any(Exception.class)); + verify(failureHandler, never()).accept(any()); + verify(completionHandler, times(1)).accept(true); } public void testExecutePropagateAllMetaDataUpdates() throws Exception { @@ -105,12 +115,12 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener)mock(ActionListener.class); - executionService.execute(indexRequest, "_id", listener); + Consumer failureHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); + executionService.execute(indexRequest, "_id", failureHandler, completionHandler); verify(processor).execute(any()); - verify(listener).onResponse(any()); - verify(listener, times(0)).onFailure(any(Exception.class)); + verify(failureHandler, never()).accept(any()); + verify(completionHandler, times(1)).accept(true); assertThat(indexRequest.index(), equalTo("update_index")); assertThat(indexRequest.type(), equalTo("update_type")); @@ -126,12 +136,12 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener)mock(ActionListener.class); - executionService.execute(indexRequest, "_id", listener); + Consumer failureHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); + executionService.execute(indexRequest, "_id", failureHandler, completionHandler); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(listener, times(0)).onResponse(null); - verify(listener).onFailure(any(RuntimeException.class)); + verify(failureHandler, times(1)).accept(any(RuntimeException.class)); + verify(completionHandler, never()).accept(anyBoolean()); } @SuppressWarnings("unchecked") @@ -145,12 +155,13 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - ActionListener listener = (ActionListener)mock(ActionListener.class); - executionService.execute(indexRequest, "_id", listener); + Consumer failureHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); + executionService.execute(indexRequest, "_id", failureHandler, completionHandler); assertThat(indexRequest.ttl(), equalTo(TimeValue.parseTimeValue("5d", null, "ttl"))); - verify(listener, times(1)).onResponse(any()); - verify(listener, never()).onFailure(any()); + verify(failureHandler, never()).accept(any()); + verify(completionHandler, times(1)).accept(true); // test with invalid ttl metaProcessorFactory = new SetProcessor.Factory(TestTemplateService.instance()); @@ -161,11 +172,11 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - listener = mock(ActionListener.class); - executionService.execute(indexRequest, "_id", listener); - - verify(listener, never()).onResponse(any()); - verify(listener, times(1)).onFailure(any(ElasticsearchParseException.class)); + failureHandler = mock(Consumer.class); + completionHandler = mock(Consumer.class); + executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + verify(failureHandler, times(1)).accept(any(ElasticsearchParseException.class)); + verify(completionHandler, never()).accept(anyBoolean()); // test with provided ttl when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.emptyList())); @@ -173,12 +184,71 @@ public class PipelineExecutionServiceTests extends ESTestCase { indexRequest = new IndexRequest("_index", "_type", "_id") .source(Collections.emptyMap()) .ttl(1000L); - listener = mock(ActionListener.class); - executionService.execute(indexRequest, "_id", listener); + failureHandler = mock(Consumer.class); + completionHandler = mock(Consumer.class); + executionService.execute(indexRequest, "_id", failureHandler, completionHandler); assertThat(indexRequest.ttl(), equalTo(new TimeValue(1000L))); - verify(listener, times(1)).onResponse(any()); - verify(listener, never()).onFailure(any(Throwable.class)); + verify(failureHandler, never()).accept(any()); + verify(completionHandler, times(1)).accept(true); + } + + public void testBulkRequestExecutionWithFailures() throws Exception { + BulkRequest bulkRequest = new BulkRequest(); + + int numRequest = scaledRandomIntBetween(8, 64); + int numIndexRequests = 0; + for (int i = 0; i < numRequest; i++) { + ActionRequest request; + if (randomBoolean()) { + if (randomBoolean()) { + request = new DeleteRequest("_index", "_type", "_id"); + } else { + request = new UpdateRequest("_index", "_type", "_id"); + } + } else { + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + indexRequest.source("field1", "value1"); + request = indexRequest; + numIndexRequests++; + } + bulkRequest.add(request); + } + + String pipelineId = "_id"; + + Processor pipeline = mock(Processor.class); + Exception error = new RuntimeException(); + doThrow(error).when(pipeline).execute(any()); + when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, Collections.singletonList(pipeline))); + + Consumer requestItemErrorHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); + executionService.execute(bulkRequest.requests(), pipelineId, requestItemErrorHandler, completionHandler); + + verify(requestItemErrorHandler, times(numIndexRequests)).accept(error); + verify(completionHandler, times(1)).accept(false); + } + + public void testBulkRequestExecution() throws Exception { + BulkRequest bulkRequest = new BulkRequest(); + + int numRequest = scaledRandomIntBetween(8, 64); + for (int i = 0; i < numRequest; i++) { + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + indexRequest.source("field1", "value1"); + bulkRequest.add(indexRequest); + } + + String pipelineId = "_id"; + when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, Collections.emptyList())); + + Consumer requestItemErrorHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); + executionService.execute(bulkRequest.requests(), pipelineId, requestItemErrorHandler, completionHandler); + + verify(requestItemErrorHandler, never()).accept(any()); + verify(completionHandler, times(1)).accept(true); } private IngestDocument eqID(String index, String type, String id, Map source) { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/BulkRequestModifierTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/BulkRequestModifierTests.java new file mode 100644 index 00000000000..6c4871a140a --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/BulkRequestModifierTests.java @@ -0,0 +1,101 @@ +package org.elasticsearch.plugin.ingest.transport; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; +import org.mockito.Mockito; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class BulkRequestModifierTests extends ESTestCase { + + public void testPipelineFailures() { + BulkRequest originalBulkRequest = new BulkRequest(); + for (int i = 0; i < 32; i++) { + originalBulkRequest.add(new IndexRequest("index", "type", String.valueOf(i))); + } + + IngestActionFilter.BulkRequestModifier modifier = new IngestActionFilter.BulkRequestModifier(originalBulkRequest); + for (int i = 0; modifier.hasNext(); i++) { + modifier.next(); + if (i % 2 == 0) { + modifier.markCurrentItemAsFailed(new RuntimeException()); + } + } + + // So half of the requests have "failed", so only the successful requests are left: + BulkRequest bulkRequest = modifier.getBulkRequest(); + assertThat(bulkRequest.requests().size(), Matchers.equalTo(16)); + + List responses = new ArrayList<>(); + ActionListener bulkResponseListener = modifier.wrapActionListenerIfNeeded(new ActionListener() { + @Override + public void onResponse(BulkResponse bulkItemResponses) { + responses.addAll(Arrays.asList(bulkItemResponses.getItems())); + } + + @Override + public void onFailure(Throwable e) { + } + }); + + List originalResponses = new ArrayList<>(); + for (ActionRequest actionRequest : bulkRequest.requests()) { + IndexRequest indexRequest = (IndexRequest) actionRequest; + IndexResponse indexResponse = new IndexResponse(new ShardId("index", 0), indexRequest.type(), indexRequest.id(), 1, true); + originalResponses.add(new BulkItemResponse(Integer.parseInt(indexRequest.id()), indexRequest.opType().lowercase(), indexResponse)); + } + bulkResponseListener.onResponse(new BulkResponse(originalResponses.toArray(new BulkItemResponse[0]), 0)); + + assertThat(responses.size(), Matchers.equalTo(32)); + for (int i = 0; i < 32; i++) { + assertThat(responses.get(i).getId(), Matchers.equalTo(String.valueOf(i))); + } + } + + public void testNoFailures() { + BulkRequest originalBulkRequest = new BulkRequest(); + for (int i = 0; i < 32; i++) { + originalBulkRequest.add(new IndexRequest("index", "type", String.valueOf(i))); + } + + IngestActionFilter.BulkRequestModifier modifier = new IngestActionFilter.BulkRequestModifier(originalBulkRequest); + for (int i = 0; modifier.hasNext(); i++) { + modifier.next(); + } + + BulkRequest bulkRequest = modifier.getBulkRequest(); + assertThat(bulkRequest, Matchers.sameInstance(originalBulkRequest)); + ActionListener actionListener = Mockito.mock(ActionListener.class); + assertThat(modifier.wrapActionListenerIfNeeded(actionListener), Matchers.sameInstance(actionListener)); + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index e55393edd31..6baf588d0d1 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -46,6 +46,7 @@ import org.mockito.stubbing.Answer; import java.util.Collections; import java.util.HashSet; import java.util.Set; +import java.util.function.Consumer; import static org.elasticsearch.plugin.ingest.transport.IngestActionFilter.BulkRequestModifier; import static org.hamcrest.Matchers.equalTo; @@ -93,7 +94,7 @@ public class IngestActionFilterTests extends ESTestCase { filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); + verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); verifyZeroInteractions(actionFilterChain); } @@ -106,7 +107,7 @@ public class IngestActionFilterTests extends ESTestCase { filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); + verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); verifyZeroInteractions(actionFilterChain); } @@ -133,14 +134,14 @@ public class IngestActionFilterTests extends ESTestCase { Answer answer = invocationOnMock -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; - listener.onResponse(null); + Consumer listener = (Consumer) invocationOnMock.getArguments()[3]; + listener.accept(true); return null; }; - doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); + doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); + verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); verify(actionFilterChain).proceed("_action", indexRequest, actionListener); verifyZeroInteractions(actionListener); } @@ -156,26 +157,22 @@ public class IngestActionFilterTests extends ESTestCase { Answer answer = new Answer() { @Override public Object answer(InvocationOnMock invocationOnMock) throws Throwable { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; - listener.onFailure(exception); + Consumer handler = (Consumer) invocationOnMock.getArguments()[2]; + handler.accept(exception); return null; } }; - doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); + doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); filter.apply("_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); + verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); verify(actionListener).onFailure(exception); verifyZeroInteractions(actionFilterChain); } public void testApplyWithBulkRequest() throws Exception { - ThreadPool threadPool = new ThreadPool( - Settings.builder() - .put("name", "_name") - .put(PipelineExecutionService.additionalSettings(Settings.EMPTY)) - .build() - ); + ThreadPool threadPool = mock(ThreadPool.class); + when(threadPool.executor(any())).thenReturn(Runnable::run); PipelineStore store = mock(PipelineStore.class); Processor processor = new Processor() { @@ -238,83 +235,6 @@ public class IngestActionFilterTests extends ESTestCase { assertThat(assertedRequests, equalTo(numRequest)); } }); - - threadPool.shutdown(); - } - - - public void testApplyWithBulkRequestWithFailureAllFailed() throws Exception { - BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); - int numRequest = scaledRandomIntBetween(0, 8); - for (int i = 0; i < numRequest; i++) { - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); - indexRequest.source("field1", "value1"); - bulkRequest.add(indexRequest); - } - - RuntimeException exception = new RuntimeException(); - Answer answer = (invocationOnMock) -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; - listener.onFailure(exception); - return null; - }; - doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); - - CaptureActionListener actionListener = new CaptureActionListener(); - RecordRequestAFC actionFilterChain = new RecordRequestAFC(); - - filter.apply("_action", bulkRequest, actionListener, actionFilterChain); - - assertThat(actionFilterChain.request, nullValue()); - ActionResponse response = actionListener.response; - assertThat(response, instanceOf(BulkResponse.class)); - BulkResponse bulkResponse = (BulkResponse) response; - assertThat(bulkResponse.getItems().length, equalTo(numRequest)); - for (BulkItemResponse bulkItemResponse : bulkResponse) { - assertThat(bulkItemResponse.isFailed(), equalTo(true)); - } - } - - public void testApplyWithBulkRequestWithFailure() throws Exception { - BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); - int numRequest = scaledRandomIntBetween(8, 64); - int numNonIndexRequests = 0; - for (int i = 0; i < numRequest; i++) { - ActionRequest request; - if (randomBoolean()) { - numNonIndexRequests++; - if (randomBoolean()) { - request = new DeleteRequest("_index", "_type", "_id"); - } else { - request = new UpdateRequest("_index", "_type", "_id"); - } - } else { - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); - indexRequest.source("field1", "value1"); - request = indexRequest; - } - bulkRequest.add(request); - } - - RuntimeException exception = new RuntimeException(); - Answer answer = (invocationOnMock) -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; - listener.onFailure(exception); - return null; - }; - doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(ActionListener.class)); - - ActionListener actionListener = mock(ActionListener.class); - RecordRequestAFC actionFilterChain = new RecordRequestAFC(); - - filter.apply("_action", bulkRequest, actionListener, actionFilterChain); - - BulkRequest interceptedRequests = actionFilterChain.getRequest(); - assertThat(interceptedRequests.requests().size(), equalTo(numNonIndexRequests)); - - verifyZeroInteractions(actionListener); } public void testBulkRequestModifier() { @@ -357,26 +277,6 @@ public class IngestActionFilterTests extends ESTestCase { } } - private final static class RecordRequestAFC implements ActionFilterChain { - - private ActionRequest request; - - @Override - public void proceed(String action, ActionRequest request, ActionListener listener) { - this.request = request; - } - - @Override - public void proceed(String action, ActionResponse response, ActionListener listener) { - - } - - @SuppressWarnings("unchecked") - public > T getRequest() { - return (T) request; - } - } - private final static class CaptureActionListener implements ActionListener { private BulkResponse response; From 72fc34731ee6ecd95dd8cc0a5256757e26d24148 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 22 Dec 2015 12:20:16 +0100 Subject: [PATCH 134/347] applied feedback --- .../plugin/ingest/PipelineExecutionService.java | 8 +++----- .../java/org/elasticsearch/ingest/IngestClientIT.java | 9 +++++---- .../plugin/ingest/PipelineExecutionServiceTests.java | 2 +- 3 files changed, 9 insertions(+), 10 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java index 61162563f5c..c55faf3b09e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java @@ -54,12 +54,11 @@ public class PipelineExecutionService { }); } - public void execute(Iterable indexRequests, String pipelineId, + public void execute(Iterable actionRequests, String pipelineId, Consumer itemFailureHandler, Consumer completionHandler) { Pipeline pipeline = getPipeline(pipelineId); threadPool.executor(THREAD_POOL_NAME).execute(() -> { - Throwable lastThrowable = null; - for (ActionRequest actionRequest : indexRequests) { + for (ActionRequest actionRequest : actionRequests) { if ((actionRequest instanceof IndexRequest) == false) { continue; } @@ -68,13 +67,12 @@ public class PipelineExecutionService { try { innerExecute(indexRequest, pipeline); } catch (Throwable e) { - lastThrowable = e; if (itemFailureHandler != null) { itemFailureHandler.accept(e); } } } - completionHandler.accept(lastThrowable == null); + completionHandler.accept(true); }); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 0c752abc87a..545f8a0b6bf 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.ingest; -import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; @@ -27,8 +26,6 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; @@ -45,7 +42,11 @@ import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRespon import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import java.util.*; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.Collections; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index 2bdcbd61471..961d820b876 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -227,7 +227,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { executionService.execute(bulkRequest.requests(), pipelineId, requestItemErrorHandler, completionHandler); verify(requestItemErrorHandler, times(numIndexRequests)).accept(error); - verify(completionHandler, times(1)).accept(false); + verify(completionHandler, times(1)).accept(true); } public void testBulkRequestExecution() throws Exception { From 1b7dc45c286a28c315946af40aa86eb707a8166e Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 22 Dec 2015 13:40:17 +0100 Subject: [PATCH 135/347] adapt to upstream changes: remove wildcard imports from qa/ingest-with-mustache --- .../plugin/ingest/IngestDocumentMustacheIT.java | 7 ++++++- .../plugin/ingest/IngestMustacheSetProcessorIT.java | 3 ++- .../elasticsearch/plugin/ingest/TemplateServiceIT.java | 7 ++++--- .../plugin/ingest/ValueSourceMustacheIT.java | 10 ++++++++-- 4 files changed, 20 insertions(+), 7 deletions(-) diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java index 8096d3a8d5d..c7e76cb062b 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java @@ -22,7 +22,12 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.ValueSource; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java index 1cf70543ce1..b979eea5098 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java @@ -20,7 +20,8 @@ package org.elasticsearch.plugin.ingest; -import org.elasticsearch.ingest.*; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.ValueSource; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.ingest.processor.set.SetProcessor; import org.hamcrest.Matchers; diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java index 6925959e710..be34d25b726 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java @@ -20,11 +20,12 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.ingest.TemplateService; -import org.elasticsearch.script.ScriptException; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; public class TemplateServiceIT extends AbstractMustacheTests { diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java index 85fd9561dad..3b9e5245bb5 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java @@ -22,9 +22,15 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.ValueSource; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; public class ValueSourceMustacheIT extends AbstractMustacheTests { From 46f99a11a02db3ddedc86f92f13ad33d4d2acf7e Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 18 Dec 2015 10:33:36 +0100 Subject: [PATCH 136/347] Add append processor The append processor allows to append one or more values to an existing list; add a new list with the provided values if the field doesn't exist yet, or convert an existing scalar into a list and add the provided values to the newly created list. This required adapting of IngestDocument#appendFieldValue behaviour, also added support for templating to it. Closes #14324 --- docs/plugins/ingest.asciidoc | 15 + .../elasticsearch/ingest/IngestDocument.java | 87 +++++- .../processor/append/AppendProcessor.java | 80 +++++ .../plugin/ingest/IngestModule.java | 2 + .../ingest/IngestDocumentTests.java | 291 +++++++++++++++++- .../append/AppendProcessorFactoryTests.java | 90 ++++++ .../append/AppendProcessorTests.java | 209 +++++++++++++ .../processor/set/SetProcessorTests.java | 3 +- .../rest-api-spec/test/ingest/30_grok.yaml | 16 - .../rest-api-spec/test/ingest/60_mutate.yaml | 15 +- .../10_pipeline_with_mustache_templates.yaml | 36 +-- 11 files changed, 785 insertions(+), 59 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/append/AppendProcessor.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/append/AppendProcessorFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/append/AppendProcessorTests.java diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 4f84da7203f..1d75daf387d 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -16,6 +16,21 @@ its value will be replaced with the provided one. } -------------------------------------------------- +==== Append processor +Appends one or more values to an existing array if the field already exists and it is an array. +Converts a scalar to an array and appends one or more values to it if the field exists and it is a scalar. +Creates an array containing the provided values if the fields doesn't exist. +Accepts a single value or an array of values. + +[source,js] +-------------------------------------------------- +{ + "append": { + "field1": ["item2", "item3", "item4"] + } +} +-------------------------------------------------- + ==== Remove processor Removes an existing field. If the field doesn't exist, an exception will be thrown diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index d721f00d284..993f6e2fa91 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Strings; import java.text.DateFormat; import java.text.SimpleDateFormat; +import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; @@ -243,23 +244,46 @@ public final class IngestDocument { /** * Appends the provided value to the provided path in the document. - * Any non existing path element will be created. Same as {@link #setFieldValue(String, Object)} - * but if the last element is a list, the value will be appended to the existing list. + * Any non existing path element will be created. + * If the path identifies a list, the value will be appended to the existing list. + * If the path identifies a scalar, the scalar will be converted to a list and + * the provided value will be added to the newly created list. + * Supports multiple values too provided in forms of list, in that case all the values will be appeneded to the + * existing (or newly created) list. * @param path The path within the document in dot-notation - * @param value The value to put in for the path key - * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. + * @param value The value or values to append to the existing ones + * @throws IllegalArgumentException if the path is null, empty or invalid. */ public void appendFieldValue(String path, Object value) { setFieldValue(path, value, true); } + /** + * Appends the provided value to the provided path in the document. + * Any non existing path element will be created. + * If the path identifies a list, the value will be appended to the existing list. + * If the path identifies a scalar, the scalar will be converted to a list and + * the provided value will be added to the newly created list. + * Supports multiple values too provided in forms of list, in that case all the values will be appeneded to the + * existing (or newly created) list. + * @param fieldPathTemplate Resolves to the path with dot-notation within the document + * @param valueSource The value source that will produce the value or values to append to the existing ones + * @throws IllegalArgumentException if the path is null, empty or invalid. + */ + public void appendFieldValue(TemplateService.Template fieldPathTemplate, ValueSource valueSource) { + Map model = createTemplateModel(); + appendFieldValue(fieldPathTemplate.execute(model), valueSource.copyAndResolve(model)); + } + /** * Sets the provided value to the provided path in the document. - * Any non existing path element will be created. If the last element is a list, - * the value will replace the existing list. + * Any non existing path element will be created. + * If the last item in the path is a list, the value will replace the existing list as a whole. + * Use {@link #appendFieldValue(String, Object)} to append values to lists instead. * @param path The path within the document in dot-notation * @param value The value to put in for the path key - * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. + * @throws IllegalArgumentException if the path is null, empty, invalid or if the value cannot be set to the + * item identified by the provided path. */ public void setFieldValue(String path, Object value) { setFieldValue(path, value, false); @@ -271,7 +295,8 @@ public final class IngestDocument { * the value will replace the existing list. * @param fieldPathTemplate Resolves to the path with dot-notation within the document * @param valueSource The value source that will produce the value to put in for the path key - * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. + * @throws IllegalArgumentException if the path is null, empty, invalid or if the value cannot be set to the + * item identified by the provided path. */ public void setFieldValue(TemplateService.Template fieldPathTemplate, ValueSource valueSource) { Map model = createTemplateModel(); @@ -324,13 +349,16 @@ public final class IngestDocument { if (append) { if (map.containsKey(leafKey)) { Object object = map.get(leafKey); - if (object instanceof List) { - @SuppressWarnings("unchecked") - List list = (List) object; - list.add(value); - return; + List list = appendValues(path, object, value); + if (list != object) { + map.put(leafKey, list); } + } else { + List list = new ArrayList<>(); + appendValues(list, value); + map.put(leafKey, list); } + return; } map.put(leafKey, value); } else if (context instanceof List) { @@ -345,12 +373,45 @@ public final class IngestDocument { if (index < 0 || index >= list.size()) { throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path + "]"); } + if (append) { + Object object = list.get(index); + List newList = appendValues(path, object, value); + if (newList != object) { + list.set(index, newList); + } + return; + } list.set(index, value); } else { throw new IllegalArgumentException("cannot set [" + leafKey + "] with parent object of type [" + context.getClass().getName() + "] as part of path [" + path + "]"); } } + @SuppressWarnings("unchecked") + private static List appendValues(String path, Object maybeList, Object value) { + List list; + if (maybeList instanceof List) { + //maybeList is already a list, we append the provided values to it + list = (List) maybeList; + } else { + //maybeList is a scalar, we convert it to a list and append the provided values to it + list = new ArrayList<>(); + list.add(maybeList); + } + appendValues(list, value); + return list; + } + + private static void appendValues(List list, Object value) { + if (value instanceof List) { + @SuppressWarnings("unchecked") + List valueList = (List) value; + valueList.stream().forEach(list::add); + } else { + list.add(value); + } + } + private static T cast(String path, Object object, Class clazz) { if (object == null) { return null; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/append/AppendProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/append/AppendProcessor.java new file mode 100644 index 00000000000..3b1c3a7a68b --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/append/AppendProcessor.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.append; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.ingest.ValueSource; +import org.elasticsearch.ingest.processor.ConfigurationUtils; +import org.elasticsearch.ingest.processor.Processor; + +import java.util.Map; + +/** + * Processor that appends value or values to existing lists. If the field is not present a new list holding the + * provided values will be added. If the field is a scalar it will be converted to a single item list and the provided + * values will be added to the newly created list. + */ +public class AppendProcessor implements Processor { + + public static final String TYPE = "append"; + + private final TemplateService.Template field; + private final ValueSource value; + + AppendProcessor(TemplateService.Template field, ValueSource value) { + this.field = field; + this.value = value; + } + + public TemplateService.Template getField() { + return field; + } + + public ValueSource getValue() { + return value; + } + + @Override + public void execute(IngestDocument ingestDocument) throws Exception { + ingestDocument.appendFieldValue(field, value); + } + + @Override + public String getType() { + return TYPE; + } + + public static final class Factory implements Processor.Factory { + + private final TemplateService templateService; + + public Factory(TemplateService templateService) { + this.templateService = templateService; + } + + @Override + public AppendProcessor create(Map config) throws Exception { + String field = ConfigurationUtils.readStringProperty(config, "field"); + Object value = ConfigurationUtils.readObject(config, "value"); + return new AppendProcessor(templateService.compile(field), ValueSource.wrap(value, templateService)); + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index 53bfea9e00d..c471163c0fa 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -21,6 +21,7 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; +import org.elasticsearch.ingest.processor.append.AppendProcessor; import org.elasticsearch.ingest.processor.convert.ConvertProcessor; import org.elasticsearch.ingest.processor.date.DateProcessor; import org.elasticsearch.ingest.processor.geoip.GeoIpProcessor; @@ -52,6 +53,7 @@ public class IngestModule extends AbstractModule { addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); addProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); addProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); + addProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); addProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); addProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); addProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index a0067dc9659..9076b2102cb 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -22,18 +22,27 @@ package org.elasticsearch.ingest; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import java.text.DateFormat; +import java.text.SimpleDateFormat; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; +import java.util.Date; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; public class IngestDocumentTests extends ESTestCase { @@ -51,12 +60,18 @@ public class IngestDocumentTests extends ESTestCase { innerObject.put("buzz", "hello world"); innerObject.put("foo_null", null); innerObject.put("1", "bar"); + List innerInnerList = new ArrayList<>(); + innerInnerList.add("item1"); + List innerList = new ArrayList<>(); + innerList.add(innerInnerList); + innerObject.put("list", innerList); document.put("fizz", innerObject); List> list = new ArrayList<>(); Map value = new HashMap<>(); value.put("field", "value"); list.add(value); list.add(null); + document.put("list", list); ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); } @@ -414,6 +429,254 @@ public class IngestDocumentTests extends ESTestCase { assertThat(list.get(2), equalTo("new_value")); } + public void testListAppendFieldValues() { + ingestDocument.appendFieldValue("list", Arrays.asList("item1", "item2", "item3")); + Object object = ingestDocument.getSourceAndMetadata().get("list"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(5)); + assertThat(list.get(0), equalTo(Collections.singletonMap("field", "value"))); + assertThat(list.get(1), nullValue()); + assertThat(list.get(2), equalTo("item1")); + assertThat(list.get(3), equalTo("item2")); + assertThat(list.get(4), equalTo("item3")); + } + + public void testAppendFieldValueToNonExistingList() { + ingestDocument.appendFieldValue("non_existing_list", "new_value"); + Object object = ingestDocument.getSourceAndMetadata().get("non_existing_list"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(1)); + assertThat(list.get(0), equalTo("new_value")); + } + + public void testAppendFieldValuesToNonExistingList() { + ingestDocument.appendFieldValue("non_existing_list", Arrays.asList("item1", "item2", "item3")); + Object object = ingestDocument.getSourceAndMetadata().get("non_existing_list"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(3)); + assertThat(list.get(0), equalTo("item1")); + assertThat(list.get(1), equalTo("item2")); + assertThat(list.get(2), equalTo("item3")); + } + + public void testAppendFieldValueConvertStringToList() { + ingestDocument.appendFieldValue("fizz.buzz", "new_value"); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + assertThat(object, instanceOf(Map.class)); + @SuppressWarnings("unchecked") + Map map = (Map) object; + object = map.get("buzz"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(2)); + assertThat(list.get(0), equalTo("hello world")); + assertThat(list.get(1), equalTo("new_value")); + } + + public void testAppendFieldValuesConvertStringToList() { + ingestDocument.appendFieldValue("fizz.buzz", Arrays.asList("item1", "item2", "item3")); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + assertThat(object, instanceOf(Map.class)); + @SuppressWarnings("unchecked") + Map map = (Map) object; + object = map.get("buzz"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(4)); + assertThat(list.get(0), equalTo("hello world")); + assertThat(list.get(1), equalTo("item1")); + assertThat(list.get(2), equalTo("item2")); + assertThat(list.get(3), equalTo("item3")); + } + + public void testAppendFieldValueConvertIntegerToList() { + ingestDocument.appendFieldValue("int", 456); + Object object = ingestDocument.getSourceAndMetadata().get("int"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(2)); + assertThat(list.get(0), equalTo(123)); + assertThat(list.get(1), equalTo(456)); + } + + public void testAppendFieldValuesConvertIntegerToList() { + ingestDocument.appendFieldValue("int", Arrays.asList(456, 789)); + Object object = ingestDocument.getSourceAndMetadata().get("int"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(3)); + assertThat(list.get(0), equalTo(123)); + assertThat(list.get(1), equalTo(456)); + assertThat(list.get(2), equalTo(789)); + } + + public void testAppendFieldValueConvertMapToList() { + ingestDocument.appendFieldValue("fizz", Collections.singletonMap("field", "value")); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + assertThat(object, instanceOf(List.class)); + List list = (List) object; + assertThat(list.size(), equalTo(2)); + assertThat(list.get(0), instanceOf(Map.class)); + @SuppressWarnings("unchecked") + Map map = (Map) list.get(0); + assertThat(map.size(), equalTo(4)); + assertThat(list.get(1), equalTo(Collections.singletonMap("field", "value"))); + } + + public void testAppendFieldValueToNull() { + ingestDocument.appendFieldValue("fizz.foo_null", "new_value"); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + assertThat(object, instanceOf(Map.class)); + @SuppressWarnings("unchecked") + Map map = (Map) object; + object = map.get("foo_null"); + assertThat(object, instanceOf(List.class)); + List list = (List) object; + assertThat(list.size(), equalTo(2)); + assertThat(list.get(0), nullValue()); + assertThat(list.get(1), equalTo("new_value")); + } + + public void testAppendFieldValueToListElement() { + ingestDocument.appendFieldValue("fizz.list.0", "item2"); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + assertThat(object, instanceOf(Map.class)); + @SuppressWarnings("unchecked") + Map map = (Map) object; + object = map.get("list"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(1)); + object = list.get(0); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List innerList = (List) object; + assertThat(innerList.size(), equalTo(2)); + assertThat(innerList.get(0), equalTo("item1")); + assertThat(innerList.get(1), equalTo("item2")); + } + + public void testAppendFieldValuesToListElement() { + ingestDocument.appendFieldValue("fizz.list.0", Arrays.asList("item2", "item3", "item4")); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + assertThat(object, instanceOf(Map.class)); + @SuppressWarnings("unchecked") + Map map = (Map) object; + object = map.get("list"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(1)); + object = list.get(0); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List innerList = (List) object; + assertThat(innerList.size(), equalTo(4)); + assertThat(innerList.get(0), equalTo("item1")); + assertThat(innerList.get(1), equalTo("item2")); + assertThat(innerList.get(2), equalTo("item3")); + assertThat(innerList.get(3), equalTo("item4")); + } + + public void testAppendFieldValueConvertStringListElementToList() { + ingestDocument.appendFieldValue("fizz.list.0.0", "new_value"); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + assertThat(object, instanceOf(Map.class)); + @SuppressWarnings("unchecked") + Map map = (Map) object; + object = map.get("list"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(1)); + object = list.get(0); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List innerList = (List) object; + object = innerList.get(0); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List innerInnerList = (List) object; + assertThat(innerInnerList.size(), equalTo(2)); + assertThat(innerInnerList.get(0), equalTo("item1")); + assertThat(innerInnerList.get(1), equalTo("new_value")); + } + + public void testAppendFieldValuesConvertStringListElementToList() { + ingestDocument.appendFieldValue("fizz.list.0.0", Arrays.asList("item2", "item3", "item4")); + Object object = ingestDocument.getSourceAndMetadata().get("fizz"); + assertThat(object, instanceOf(Map.class)); + @SuppressWarnings("unchecked") + Map map = (Map) object; + object = map.get("list"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(1)); + object = list.get(0); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List innerList = (List) object; + object = innerList.get(0); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List innerInnerList = (List) object; + assertThat(innerInnerList.size(), equalTo(4)); + assertThat(innerInnerList.get(0), equalTo("item1")); + assertThat(innerInnerList.get(1), equalTo("item2")); + assertThat(innerInnerList.get(2), equalTo("item3")); + assertThat(innerInnerList.get(3), equalTo("item4")); + } + + public void testAppendFieldValueListElementConvertMapToList() { + ingestDocument.appendFieldValue("list.0", Collections.singletonMap("item2", "value2")); + Object object = ingestDocument.getSourceAndMetadata().get("list"); + assertThat(object, instanceOf(List.class)); + List list = (List) object; + assertThat(list.size(), equalTo(2)); + assertThat(list.get(0), instanceOf(List.class)); + assertThat(list.get(1), nullValue()); + list = (List) list.get(0); + assertThat(list.size(), equalTo(2)); + assertThat(list.get(0), equalTo(Collections.singletonMap("field", "value"))); + assertThat(list.get(1), equalTo(Collections.singletonMap("item2", "value2"))); + } + + public void testAppendFieldValueToNullListElement() { + ingestDocument.appendFieldValue("list.1", "new_value"); + Object object = ingestDocument.getSourceAndMetadata().get("list"); + assertThat(object, instanceOf(List.class)); + List list = (List) object; + assertThat(list.get(1), instanceOf(List.class)); + list = (List) list.get(1); + assertThat(list.size(), equalTo(2)); + assertThat(list.get(0), nullValue()); + assertThat(list.get(1), equalTo("new_value")); + } + + public void testAppendFieldValueToListOfMaps() { + ingestDocument.appendFieldValue("list", Collections.singletonMap("item2", "value2")); + Object object = ingestDocument.getSourceAndMetadata().get("list"); + assertThat(object, instanceOf(List.class)); + @SuppressWarnings("unchecked") + List list = (List) object; + assertThat(list.size(), equalTo(3)); + assertThat(list.get(0), equalTo(Collections.singletonMap("field", "value"))); + assertThat(list.get(1), nullValue()); + assertThat(list.get(2), equalTo(Collections.singletonMap("item2", "value2"))); + } + public void testListSetFieldValueIndexProvided() { ingestDocument.setFieldValue("list.1", "value"); Object object = ingestDocument.getSourceAndMetadata().get("list"); @@ -495,15 +758,20 @@ public class IngestDocumentTests extends ESTestCase { assertThat(ingestDocument.getSourceAndMetadata().get("fizz"), instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) ingestDocument.getSourceAndMetadata().get("fizz"); - assertThat(map.size(), equalTo(2)); + assertThat(map.size(), equalTo(3)); assertThat(map.containsKey("buzz"), equalTo(false)); ingestDocument.removeField("fizz.foo_null"); - assertThat(map.size(), equalTo(1)); + assertThat(map.size(), equalTo(2)); assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true)); ingestDocument.removeField("fizz.1"); + assertThat(map.size(), equalTo(1)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true)); + + ingestDocument.removeField("fizz.list"); assertThat(map.size(), equalTo(0)); assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true)); @@ -684,4 +952,23 @@ public class IngestDocumentTests extends ESTestCase { } } + public void testIngestMetadataTimestamp() throws Exception { + long before = System.currentTimeMillis(); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + long after = System.currentTimeMillis(); + String timestampString = ingestDocument.getIngestMetadata().get("timestamp"); + assertThat(timestampString, notNullValue()); + assertThat(timestampString, endsWith("+0000")); + DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", Locale.ROOT); + Date timestamp = df.parse(timestampString); + assertThat(timestamp.getTime(), greaterThanOrEqualTo(before)); + assertThat(timestamp.getTime(), lessThanOrEqualTo(after)); + } + + public void testCopyConstructor() { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + IngestDocument copy = new IngestDocument(ingestDocument); + assertThat(ingestDocument.getSourceAndMetadata(), not(sameInstance(copy.getSourceAndMetadata()))); + assertThat(ingestDocument.getSourceAndMetadata(), equalTo(copy.getSourceAndMetadata())); + } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/append/AppendProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/append/AppendProcessorFactoryTests.java new file mode 100644 index 00000000000..7ebb424e2d4 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/append/AppendProcessorFactoryTests.java @@ -0,0 +1,90 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.append; + +import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class AppendProcessorFactoryTests extends ESTestCase { + + private AppendProcessor.Factory factory; + + @Before + public void init() { + factory = new AppendProcessor.Factory(TestTemplateService.instance()); + } + + public void testCreate() throws Exception { + Map config = new HashMap<>(); + config.put("field", "field1"); + Object value; + if (randomBoolean()) { + value = "value1"; + } else { + value = Arrays.asList("value1", "value2", "value3"); + } + config.put("value", value); + AppendProcessor setProcessor = factory.create(config); + assertThat(setProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); + assertThat(setProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo(value)); + } + + public void testCreateNoFieldPresent() throws Exception { + Map config = new HashMap<>(); + config.put("value", "value1"); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [field] is missing")); + } + } + + public void testCreateNoValuePresent() throws Exception { + Map config = new HashMap<>(); + config.put("field", "field1"); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [value] is missing")); + } + } + + public void testCreateNullValue() throws Exception { + Map config = new HashMap<>(); + config.put("field", "field1"); + config.put("value", null); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [value] is missing")); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/append/AppendProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/append/AppendProcessorTests.java new file mode 100644 index 00000000000..787a698e76f --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/append/AppendProcessorTests.java @@ -0,0 +1,209 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.append; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.ValueSource; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.sameInstance; + +public class AppendProcessorTests extends ESTestCase { + + public void testAppendValuesToExistingList() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Scalar scalar = randomFrom(Scalar.values()); + List list = new ArrayList<>(); + int size = randomIntBetween(0, 10); + for (int i = 0; i < size; i++) { + list.add(scalar.randomValue()); + } + List checkList = new ArrayList<>(list); + String field = RandomDocumentPicks.addRandomField(random(), ingestDocument, list); + List values = new ArrayList<>(); + Processor appendProcessor; + if (randomBoolean()) { + Object value = scalar.randomValue(); + values.add(value); + appendProcessor = createAppendProcessor(field, value); + } else { + int valuesSize = randomIntBetween(0, 10); + for (int i = 0; i < valuesSize; i++) { + values.add(scalar.randomValue()); + } + appendProcessor = createAppendProcessor(field, values); + } + appendProcessor.execute(ingestDocument); + Object fieldValue = ingestDocument.getFieldValue(field, Object.class); + assertThat(fieldValue, sameInstance(list)); + assertThat(list.size(), equalTo(size + values.size())); + for (int i = 0; i < size; i++) { + assertThat(list.get(i), equalTo(checkList.get(i))); + } + for (int i = size; i < size + values.size(); i++) { + assertThat(list.get(i), equalTo(values.get(i - size))); + } + } + + public void testAppendValuesToNonExistingList() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + String field = RandomDocumentPicks.randomFieldName(random()); + Scalar scalar = randomFrom(Scalar.values()); + List values = new ArrayList<>(); + Processor appendProcessor; + if (randomBoolean()) { + Object value = scalar.randomValue(); + values.add(value); + appendProcessor = createAppendProcessor(field, value); + } else { + int valuesSize = randomIntBetween(0, 10); + for (int i = 0; i < valuesSize; i++) { + values.add(scalar.randomValue()); + } + appendProcessor = createAppendProcessor(field, values); + } + appendProcessor.execute(ingestDocument); + List list = ingestDocument.getFieldValue(field, List.class); + assertThat(list, not(sameInstance(values))); + assertThat(list, equalTo(values)); + } + + public void testConvertScalarToList() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Scalar scalar = randomFrom(Scalar.values()); + Object initialValue = scalar.randomValue(); + String field = RandomDocumentPicks.addRandomField(random(), ingestDocument, initialValue); + List values = new ArrayList<>(); + Processor appendProcessor; + if (randomBoolean()) { + Object value = scalar.randomValue(); + values.add(value); + appendProcessor = createAppendProcessor(field, value); + } else { + int valuesSize = randomIntBetween(0, 10); + for (int i = 0; i < valuesSize; i++) { + values.add(scalar.randomValue()); + } + appendProcessor = createAppendProcessor(field, values); + } + appendProcessor.execute(ingestDocument); + List fieldValue = ingestDocument.getFieldValue(field, List.class); + assertThat(fieldValue.size(), equalTo(values.size() + 1)); + assertThat(fieldValue.get(0), equalTo(initialValue)); + for (int i = 1; i < values.size() + 1; i++) { + assertThat(fieldValue.get(i), equalTo(values.get(i - 1))); + } + } + + public void testAppendMetadata() throws Exception { + //here any metadata field value becomes a list, which won't make sense in most of the cases, + // but support for append is streamlined like for set so we test it + IngestDocument.MetaData randomMetaData = randomFrom(IngestDocument.MetaData.values()); + List values = new ArrayList<>(); + Processor appendProcessor; + if (randomBoolean()) { + String value = randomAsciiOfLengthBetween(1, 10); + values.add(value); + appendProcessor = createAppendProcessor(randomMetaData.getFieldName(), value); + } else { + int valuesSize = randomIntBetween(0, 10); + for (int i = 0; i < valuesSize; i++) { + values.add(randomAsciiOfLengthBetween(1, 10)); + } + appendProcessor = createAppendProcessor(randomMetaData.getFieldName(), values); + } + + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Object initialValue = ingestDocument.getSourceAndMetadata().get(randomMetaData.getFieldName()); + appendProcessor.execute(ingestDocument); + List list = ingestDocument.getFieldValue(randomMetaData.getFieldName(), List.class); + if (initialValue == null) { + assertThat(list, equalTo(values)); + } else { + assertThat(list.size(), equalTo(values.size() + 1)); + assertThat(list.get(0), equalTo(initialValue)); + for (int i = 1; i < list.size(); i++) { + assertThat(list.get(i), equalTo(values.get(i - 1))); + } + } + } + + private static Processor createAppendProcessor(String fieldName, Object fieldValue) { + TemplateService templateService = TestTemplateService.instance(); + return new AppendProcessor(templateService.compile(fieldName), ValueSource.wrap(fieldValue, templateService)); + } + + private enum Scalar { + INTEGER { + @Override + Object randomValue() { + return randomInt(); + } + }, DOUBLE { + @Override + Object randomValue() { + return randomDouble(); + } + }, FLOAT { + @Override + Object randomValue() { + return randomFloat(); + } + }, BOOLEAN { + @Override + Object randomValue() { + return randomBoolean(); + } + }, STRING { + @Override + Object randomValue() { + return randomAsciiOfLengthBetween(1, 10); + } + }, MAP { + @Override + Object randomValue() { + int numItems = randomIntBetween(1, 10); + Map map = new HashMap<>(numItems); + for (int i = 0; i < numItems; i++) { + map.put(randomAsciiOfLengthBetween(1, 10), randomFrom(Scalar.values()).randomValue()); + } + return map; + } + }, NULL { + @Override + Object randomValue() { + return null; + } + }; + + abstract Object randomValue(); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java index dabdeb54f94..f4947d3ae3a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java @@ -76,9 +76,8 @@ public class SetProcessorTests extends ESTestCase { assertThat(ingestDocument.getFieldValue(randomMetaData.getFieldName(), String.class), Matchers.equalTo("_value")); } - private Processor createSetProcessor(String fieldName, Object fieldValue) { + private static Processor createSetProcessor(String fieldName, Object fieldValue) { TemplateService templateService = TestTemplateService.instance(); return new SetProcessor(templateService.compile(fieldName), ValueSource.wrap(fieldValue, templateService)); } - } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml index 7807631344a..7c43273657c 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml @@ -56,14 +56,6 @@ } - match: { _id: "my_pipeline" } - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - do: ingest.index: index: test @@ -101,14 +93,6 @@ } - match: { _id: "my_pipeline" } - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - do: ingest.index: index: test diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml index 75cef2971c0..a15bc440022 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml @@ -13,6 +13,12 @@ "value": "new_value" } }, + { + "append" : { + "field" : "new_field", + "value": ["item2", "item3", "item4"] + } + }, { "rename" : { "field" : "field_to_rename", @@ -93,6 +99,7 @@ id: 1 - is_false: _source.field_to_rename - is_false: _source.field_to_remove + - match: { _source.new_field: ["new_value", "item2", "item3", "item4"] } - match: { _source.renamed_field: "value" } - match: { _source.field_to_lowercase: "lowercase" } - match: { _source.field_to_uppercase: "UPPERCASE" } @@ -125,14 +132,6 @@ } - match: { _id: "my_pipeline" } - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - do: ingest.index: index: test diff --git a/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml b/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml index fb0fa9c1083..52be7299e29 100644 --- a/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml +++ b/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml @@ -16,19 +16,17 @@ "field" : "index_type_id", "value": "{{_index}}/{{_type}}/{{_id}}" } + }, + { + "append" : { + "field" : "metadata", + "value": ["{{_index}}", "{{_type}}", "{{_id}}"] + } } ] } - match: { _id: "my_pipeline_1" } - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - do: ingest.index: index: test @@ -42,8 +40,9 @@ index: test type: test id: 1 - - length: { _source: 1 } + - length: { _source: 2 } - match: { _source.index_type_id: "test/test/1" } + - match: { _source.metadata: ["test", "test", "1"] } --- "Test templateing": @@ -63,7 +62,14 @@ "field" : "field4", "value": "{{field1}}/{{field2}}/{{field3}}" } + }, + { + "append" : { + "field" : "metadata", + "value": ["{{field1}}", "{{field2}}", "{{field3}}"] + } } + ] } - match: { _id: "my_pipeline_1" } @@ -101,14 +107,6 @@ } - match: { _id: "my_pipeline_3" } - # Simulate a Thread.sleep(), because pipeline are updated in the background - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 2s - - match: { "timed_out": true } - - do: ingest.index: index: test @@ -116,6 +114,7 @@ id: 1 pipeline_id: "my_pipeline_1" body: { + metadata: "0", field1: "1", field2: "2", field3: "3" @@ -126,11 +125,12 @@ index: test type: test id: 1 - - length: { _source: 4 } + - length: { _source: 5 } - match: { _source.field1: "1" } - match: { _source.field2: "2" } - match: { _source.field3: "3" } - match: { _source.field4: "1/2/3" } + - match: { _source.metadata: ["0","1","2","3"] } - do: ingest.index: From 0bf4c8fb82e2c7ccf2f3f97683ed4fb2fb9a4e4c Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Mon, 7 Dec 2015 10:53:36 -0800 Subject: [PATCH 137/347] Add on_failure field to processors and pipelines. both processors and pipelines now have the ability to define a separate list of processors to be executed if the original line of execution throws an Exception. processors without an on_failure parameter defined will throw an exception and exit the pipeline immediately. processors with on_failure defined will catch the exception and allow for further processors to run. Exceptions within the on_failure block will be treated the same as the top-level. --- .../org/elasticsearch/ingest/Pipeline.java | 74 +++++++---- .../ingest/processor/CompoundProcessor.java | 81 ++++++++++++ .../ingest/PipelineFactoryTests.java | 45 +++++++ .../processor/CompoundProcessorTests.java | 117 +++++++++++++++++ .../ingest/PipelineExecutionServiceTests.java | 79 ++++++++++-- .../transport/IngestActionFilterTests.java | 3 +- .../SimulateExecutionServiceTests.java | 9 +- .../SimulatePipelineRequestParsingTests.java | 27 +++- .../rest-api-spec/test/ingest/20_crud.yaml | 50 +++++++ .../test/ingest/70_simulate.yaml | 42 ++++++ .../test/ingest/80_on_failure.yaml | 122 ++++++++++++++++++ 11 files changed, 602 insertions(+), 47 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/CompoundProcessor.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/CompoundProcessorTests.java create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_on_failure.yaml diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java index 7c92e4bbd53..b0e0a2a66a8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -22,6 +22,7 @@ package org.elasticsearch.ingest; import org.elasticsearch.ingest.processor.ConfigurationUtils; import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.processor.CompoundProcessor; import java.util.ArrayList; import java.util.Arrays; @@ -36,21 +37,19 @@ public final class Pipeline { private final String id; private final String description; - private final List processors; + private final CompoundProcessor compoundProcessor; - public Pipeline(String id, String description, List processors) { + public Pipeline(String id, String description, CompoundProcessor compoundProcessor) { this.id = id; this.description = description; - this.processors = processors; + this.compoundProcessor = compoundProcessor; } /** * Modifies the data of a document to be indexed based on the processor this pipeline holds */ public void execute(IngestDocument ingestDocument) throws Exception { - for (Processor processor : processors) { - processor.execute(ingestDocument); - } + compoundProcessor.execute(ingestDocument); } /** @@ -71,33 +70,56 @@ public final class Pipeline { * Unmodifiable list containing each processor that operates on the data. */ public List getProcessors() { - return processors; + return compoundProcessor.getProcessors(); + } + + /** + * Unmodifiable list containing each on_failure processor that operates on the data in case of + * exception thrown in pipeline processors + */ + public List getOnFailureProcessors() { + return compoundProcessor.getOnFailureProcessors(); } public final static class Factory { + private Processor readProcessor(Map processorRegistry, String type, Map config) throws Exception { + Processor.Factory factory = processorRegistry.get(type); + if (factory != null) { + List onFailureProcessors = readProcessors("on_failure", processorRegistry, config); + Processor processor = factory.create(config); + if (config.isEmpty() == false) { + throw new IllegalArgumentException("processor [" + type + "] doesn't support one or more provided configuration parameters " + Arrays.toString(config.keySet().toArray())); + } + if (onFailureProcessors.isEmpty()) { + return processor; + } else { + return new CompoundProcessor(Arrays.asList(processor), onFailureProcessors); + } + } else { + throw new IllegalArgumentException("No processor type exist with name [" + type + "]"); + } + } - public Pipeline create(String id, Map config, Map processorRegistry) throws Exception { - String description = ConfigurationUtils.readOptionalStringProperty(config, "description"); - List processors = new ArrayList<>(); - @SuppressWarnings("unchecked") - List>> processorConfigs = (List>>) config.get("processors"); - if (processorConfigs != null ) { - for (Map> processor : processorConfigs) { - for (Map.Entry> entry : processor.entrySet()) { - Processor.Factory factory = processorRegistry.get(entry.getKey()); - if (factory != null) { - Map processorConfig = entry.getValue(); - processors.add(factory.create(processorConfig)); - if (processorConfig.isEmpty() == false) { - throw new IllegalArgumentException("processor [" + entry.getKey() + "] doesn't support one or more provided configuration parameters " + Arrays.toString(processorConfig.keySet().toArray())); - } - } else { - throw new IllegalArgumentException("No processor type exist with name [" + entry.getKey() + "]"); - } + private List readProcessors(String fieldName, Map processorRegistry, Map config) throws Exception { + List>> onFailureProcessorConfigs = ConfigurationUtils.readOptionalList(config, fieldName); + List onFailureProcessors = new ArrayList<>(); + if (onFailureProcessorConfigs != null) { + for (Map> processorConfigWithKey : onFailureProcessorConfigs) { + for (Map.Entry> entry : processorConfigWithKey.entrySet()) { + onFailureProcessors.add(readProcessor(processorRegistry, entry.getKey(), entry.getValue())); } } } - return new Pipeline(id, description, Collections.unmodifiableList(processors)); + + return onFailureProcessors; + } + + public Pipeline create(String id, Map config, Map processorRegistry) throws Exception { + String description = ConfigurationUtils.readOptionalStringProperty(config, "description"); + List processors = readProcessors("processors", processorRegistry, config); + List onFailureProcessors = readProcessors("on_failure", processorRegistry, config); + CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.unmodifiableList(processors), Collections.unmodifiableList(onFailureProcessors)); + return new Pipeline(id, description, compoundProcessor); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/CompoundProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/CompoundProcessor.java new file mode 100644 index 00000000000..dbaad2e2313 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/CompoundProcessor.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +package org.elasticsearch.ingest.processor; + +import org.elasticsearch.ingest.IngestDocument; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +/** + * A Processor that executes a list of other "processors". It executes a separate list of + * "onFailureProcessors" when any of the processors throw an {@link Exception}. + */ +public class CompoundProcessor implements Processor { + + private final List processors; + private final List onFailureProcessors; + + public CompoundProcessor(Processor... processor) { + this(Arrays.asList(processor), Collections.emptyList()); + } + public CompoundProcessor(List processors, List onFailureProcessors) { + this.processors = processors; + this.onFailureProcessors = onFailureProcessors; + } + + public List getOnFailureProcessors() { + return onFailureProcessors; + } + + public List getProcessors() { + return processors; + } + + @Override + public String getType() { + return "compound[" + processors.stream().map(p -> p.getType()).collect(Collectors.joining(",")) + "]"; + } + + @Override + public void execute(IngestDocument ingestDocument) throws Exception { + try { + for (Processor processor : processors) { + processor.execute(ingestDocument); + } + } catch (Exception e) { + if (onFailureProcessors.isEmpty()) { + throw e; + } else { + executeOnFailure(ingestDocument); + } + } + + } + + void executeOnFailure(IngestDocument ingestDocument) throws Exception { + for (Processor processor : onFailureProcessors) { + processor.execute(ingestDocument); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java index 459f7a62869..5d61f11ac24 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java @@ -52,6 +52,29 @@ public class PipelineFactoryTests extends ESTestCase { assertThat(pipeline.getProcessors().get(0).getType(), equalTo("test-processor")); } + public void testCreateWithPipelineOnFailure() throws Exception { + Map processorConfig = new HashMap<>(); + Map pipelineConfig = new HashMap<>(); + pipelineConfig.put("description", "_description"); + pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("test-processor", processorConfig))); + pipelineConfig.put("on_failure", Collections.singletonList(Collections.singletonMap("test-processor", processorConfig))); + Pipeline.Factory factory = new Pipeline.Factory(); + Map processorRegistry = new HashMap<>(); + Processor processor = mock(Processor.class); + when(processor.getType()).thenReturn("test-processor"); + Processor.Factory processorFactory = mock(Processor.Factory.class); + when(processorFactory.create(processorConfig)).thenReturn(processor); + processorRegistry.put("test-processor", processorFactory); + + Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); + assertThat(pipeline.getId(), equalTo("_id")); + assertThat(pipeline.getDescription(), equalTo("_description")); + assertThat(pipeline.getProcessors().size(), equalTo(1)); + assertThat(pipeline.getProcessors().get(0).getType(), equalTo("test-processor")); + assertThat(pipeline.getOnFailureProcessors().size(), equalTo(1)); + assertThat(pipeline.getOnFailureProcessors().get(0).getType(), equalTo("test-processor")); + } + public void testCreateUnusedProcessorOptions() throws Exception { Map processorConfig = new HashMap<>(); processorConfig.put("unused", "value"); @@ -71,4 +94,26 @@ public class PipelineFactoryTests extends ESTestCase { assertThat(e.getMessage(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]")); } } + + public void testCreateProcessorsWithOnFailureProperties() throws Exception { + Map processorConfig = new HashMap<>(); + processorConfig.put("on_failure", Collections.singletonList(Collections.singletonMap("test", new HashMap<>()))); + + Map pipelineConfig = new HashMap<>(); + pipelineConfig.put("description", "_description"); + pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("test", processorConfig))); + Pipeline.Factory factory = new Pipeline.Factory(); + Map processorFactoryStore = new HashMap<>(); + Processor processor = mock(Processor.class); + when(processor.getType()).thenReturn("test-processor"); + Processor.Factory processorFactory = mock(Processor.Factory.class); + when(processorFactory.create(processorConfig)).thenReturn(processor); + processorFactoryStore.put("test", processorFactory); + + Pipeline pipeline = factory.create("_id", pipelineConfig, processorFactoryStore); + assertThat(pipeline.getId(), equalTo("_id")); + assertThat(pipeline.getDescription(), equalTo("_description")); + assertThat(pipeline.getProcessors().size(), equalTo(1)); + assertThat(pipeline.getProcessors().get(0).getType(), equalTo("compound[test-processor]")); + } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/CompoundProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/CompoundProcessorTests.java new file mode 100644 index 00000000000..efb49e4c6dd --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/CompoundProcessorTests.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.Arrays; +import java.util.HashMap; + +import static org.elasticsearch.mock.orig.Mockito.verify; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; + +public class CompoundProcessorTests extends ESTestCase { + private IngestDocument ingestDocument; + + @Before + public void init() { + ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); + } + + public void testEmpty() throws Exception { + CompoundProcessor processor = new CompoundProcessor(); + assertThat(processor.getProcessors().isEmpty(), is(true)); + assertThat(processor.getOnFailureProcessors().isEmpty(), is(true)); + processor.execute(ingestDocument); + } + + public void testSingleProcessor() throws Exception { + Processor processor = mock(Processor.class); + CompoundProcessor compoundProcessor = new CompoundProcessor(processor); + assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); + assertThat(compoundProcessor.getProcessors().get(0), equalTo(processor)); + assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true)); + compoundProcessor.execute(ingestDocument); + verify(processor, times(1)).execute(ingestDocument); + } + + public void testSingleProcessorWithException() throws Exception { + Processor processor = mock(Processor.class); + doThrow(new RuntimeException("error")).doNothing().when(processor).execute(ingestDocument); + CompoundProcessor compoundProcessor = new CompoundProcessor(processor); + assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); + assertThat(compoundProcessor.getProcessors().get(0), equalTo(processor)); + assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true)); + try { + compoundProcessor.execute(ingestDocument); + fail("should throw exception"); + } catch (Exception e) { + assertThat(e.getMessage(), equalTo("error")); + } + verify(processor, times(1)).execute(ingestDocument); + } + + public void testSingleProcessorWithOnFailureProcessor() throws Exception { + Processor processor = mock(Processor.class); + doThrow(new RuntimeException("error")).doNothing().when(processor).execute(ingestDocument); + Processor processorNext = mock(Processor.class); + CompoundProcessor compoundProcessor = new CompoundProcessor(Arrays.asList(processor), Arrays.asList(processorNext)); + assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); + assertThat(compoundProcessor.getProcessors().get(0), equalTo(processor)); + assertThat(compoundProcessor.getOnFailureProcessors().size(), equalTo(1)); + assertThat(compoundProcessor.getOnFailureProcessors().get(0), equalTo(processorNext)); + compoundProcessor.execute(ingestDocument); + verify(processor, times(1)).execute(ingestDocument); + verify(processorNext, times(1)).execute(ingestDocument); + } + + public void testSingleProcessorWithNestedFailures() throws Exception { + Processor processor = mock(Processor.class); + doThrow(new RuntimeException("error")).doNothing().when(processor).execute(ingestDocument); + Processor processorToFail = mock(Processor.class); + doThrow(new RuntimeException("error")).doNothing().when(processorToFail).execute(ingestDocument); + Processor lastProcessor = mock(Processor.class); + + CompoundProcessor innerCompoundOnFailProcessor = new CompoundProcessor(Arrays.asList(processorToFail), Arrays.asList(lastProcessor)); + CompoundProcessor compoundOnFailProcessor = spy(innerCompoundOnFailProcessor); + + CompoundProcessor innerCompoundProcessor = new CompoundProcessor(Arrays.asList(processor), Arrays.asList(compoundOnFailProcessor)); + CompoundProcessor compoundProcessor = spy(innerCompoundProcessor); + + assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); + assertThat(compoundProcessor.getProcessors().get(0), equalTo(processor)); + assertThat(compoundProcessor.getOnFailureProcessors().size(), equalTo(1)); + assertThat(compoundProcessor.getOnFailureProcessors().get(0), equalTo(compoundOnFailProcessor)); + compoundProcessor.execute(ingestDocument); + verify(processor, times(1)).execute(ingestDocument); + verify(compoundProcessor, times(1)).executeOnFailure(ingestDocument); + verify(compoundOnFailProcessor, times(1)).execute(ingestDocument); + verify(processorToFail, times(1)).execute(ingestDocument); + verify(compoundOnFailProcessor, times(1)).executeOnFailure(ingestDocument); + verify(lastProcessor, times(1)).execute(ingestDocument); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index 961d820b876..7140bf53a8d 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.processor.CompoundProcessor; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.ingest.processor.set.SetProcessor; import org.elasticsearch.test.ESTestCase; @@ -38,6 +39,7 @@ import org.mockito.ArgumentMatcher; import org.mockito.Matchers; import org.mockito.invocation.InvocationOnMock; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -85,8 +87,8 @@ public class PipelineExecutionServiceTests extends ESTestCase { } public void testExecuteSuccess() throws Exception { - Processor processor = mock(Processor.class); - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); + CompoundProcessor processor = mock(CompoundProcessor.class); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); Consumer failureHandler = mock(Consumer.class); @@ -99,7 +101,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { } public void testExecutePropagateAllMetaDataUpdates() throws Exception { - Processor processor = mock(Processor.class); + CompoundProcessor processor = mock(CompoundProcessor.class); doAnswer((InvocationOnMock invocationOnMock) -> { IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; for (IngestDocument.MetaData metaData : IngestDocument.MetaData.values()) { @@ -112,7 +114,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { } return null; }).when(processor).execute(any()); - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); Consumer failureHandler = mock(Consumer.class); @@ -132,8 +134,8 @@ public class PipelineExecutionServiceTests extends ESTestCase { } public void testExecuteFailure() throws Exception { - Processor processor = mock(Processor.class); - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); + CompoundProcessor processor = mock(CompoundProcessor.class); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); Consumer failureHandler = mock(Consumer.class); @@ -144,6 +146,57 @@ public class PipelineExecutionServiceTests extends ESTestCase { verify(completionHandler, never()).accept(anyBoolean()); } + public void testExecuteSuccessWithOnFailure() throws Exception { + Processor processor = mock(Processor.class); + Processor onFailureProcessor = mock(Processor.class); + CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), Collections.singletonList(new CompoundProcessor(onFailureProcessor))); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor)); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + Consumer failureHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); + executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + //TODO we remove metadata, this check is not valid anymore, what do we replace it with? + //verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + verify(failureHandler, never()).accept(any(RuntimeException.class)); + verify(completionHandler, times(1)).accept(true); + } + + public void testExecuteFailureWithOnFailure() throws Exception { + Processor processor = mock(Processor.class); + Processor onFailureProcessor = mock(Processor.class); + CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), Collections.singletonList(new CompoundProcessor(onFailureProcessor))); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor)); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + doThrow(new RuntimeException()).when(onFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + Consumer failureHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); + executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + verify(failureHandler, times(1)).accept(any(RuntimeException.class)); + verify(completionHandler, never()).accept(anyBoolean()); + } + + public void testExecuteFailureWithNestedOnFailure() throws Exception { + Processor processor = mock(Processor.class); + Processor onFailureProcessor = mock(Processor.class); + Processor onFailureOnFailureProcessor = mock(Processor.class); + CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), + Collections.singletonList(new CompoundProcessor(Arrays.asList(onFailureProcessor),Arrays.asList(onFailureOnFailureProcessor)))); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor)); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + doThrow(new RuntimeException()).when(onFailureOnFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + doThrow(new RuntimeException()).when(onFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + Consumer failureHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); + executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + verify(failureHandler, times(1)).accept(any(RuntimeException.class)); + verify(completionHandler, never()).accept(anyBoolean()); + } + @SuppressWarnings("unchecked") public void testExecuteTTL() throws Exception { // test with valid ttl @@ -152,7 +205,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { config.put("field", "_ttl"); config.put("value", "5d"); Processor processor = metaProcessorFactory.create(config); - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); Consumer failureHandler = mock(Consumer.class); @@ -169,7 +222,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { config.put("field", "_ttl"); config.put("value", "abc"); processor = metaProcessorFactory.create(config); - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor))); indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); failureHandler = mock(Consumer.class); @@ -179,7 +232,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { verify(completionHandler, never()).accept(anyBoolean()); // test with provided ttl - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.emptyList())); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", mock(CompoundProcessor.class))); indexRequest = new IndexRequest("_index", "_type", "_id") .source(Collections.emptyMap()) @@ -217,10 +270,10 @@ public class PipelineExecutionServiceTests extends ESTestCase { String pipelineId = "_id"; - Processor pipeline = mock(Processor.class); + CompoundProcessor processor = mock(CompoundProcessor.class); Exception error = new RuntimeException(); - doThrow(error).when(pipeline).execute(any()); - when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, Collections.singletonList(pipeline))); + doThrow(error).when(processor).execute(any()); + when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, processor)); Consumer requestItemErrorHandler = mock(Consumer.class); Consumer completionHandler = mock(Consumer.class); @@ -241,7 +294,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { } String pipelineId = "_id"; - when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, Collections.emptyList())); + when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, new CompoundProcessor())); Consumer requestItemErrorHandler = mock(Consumer.class); Consumer completionHandler = mock(Consumer.class); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index 6baf588d0d1..2bb44b80868 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.processor.CompoundProcessor; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.plugin.ingest.IngestBootstrapper; import org.elasticsearch.plugin.ingest.IngestPlugin; @@ -186,7 +187,7 @@ public class IngestActionFilterTests extends ESTestCase { return null; } }; - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", Collections.singletonList(processor))); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor))); executionService = new PipelineExecutionService(store, threadPool); IngestBootstrapper bootstrapper = mock(IngestBootstrapper.class); when(bootstrapper.getPipelineExecutionService()).thenReturn(executionService); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java index a60b8945f10..c4d12b23a47 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.processor.CompoundProcessor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; @@ -47,7 +47,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { private ThreadPool threadPool; private SimulateExecutionService executionService; private Pipeline pipeline; - private Processor processor; + private CompoundProcessor processor; private IngestDocument ingestDocument; @Before @@ -58,10 +58,9 @@ public class SimulateExecutionServiceTests extends ESTestCase { .build() ); executionService = new SimulateExecutionService(threadPool); - processor = mock(Processor.class); + processor = mock(CompoundProcessor.class); when(processor.getType()).thenReturn("mock"); - pipeline = new Pipeline("_id", "_description", Arrays.asList(processor, processor)); - //ingestDocument = new IngestDocument("_index", "_type", "_id", Collections.singletonMap("foo", "bar")); + pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor)); ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java index 4bff810a9c2..bee6ddd141c 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.processor.CompoundProcessor; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.test.ESTestCase; @@ -28,6 +29,7 @@ import org.junit.Before; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; @@ -49,7 +51,9 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { @Before public void init() throws IOException { - Pipeline pipeline = new Pipeline(SimulatePipelineRequest.SIMULATED_PIPELINE_ID, null, Collections.singletonList(mock(Processor.class))); + CompoundProcessor pipelineCompoundProcessor = mock(CompoundProcessor.class); + when(pipelineCompoundProcessor.getProcessors()).thenReturn(Arrays.asList(mock(Processor.class))); + Pipeline pipeline = new Pipeline(SimulatePipelineRequest.SIMULATED_PIPELINE_ID, null, pipelineCompoundProcessor); Map processorRegistry = new HashMap<>(); processorRegistry.put("mock_processor", mock(Processor.Factory.class)); store = mock(PipelineStore.class); @@ -133,9 +137,28 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { List> processors = new ArrayList<>(); int numProcessors = randomIntBetween(1, 10); for (int i = 0; i < numProcessors; i++) { - processors.add(Collections.singletonMap("mock_processor", Collections.emptyMap())); + Map processorConfig = new HashMap<>(); + List> onFailureProcessors = new ArrayList<>(); + int numOnFailureProcessors = randomIntBetween(0, 1); + for (int j = 0; j < numOnFailureProcessors; j++) { + onFailureProcessors.add(Collections.singletonMap("mock_processor", Collections.emptyMap())); + } + if (numOnFailureProcessors > 0) { + processorConfig.put("on_failure", onFailureProcessors); + } + processors.add(Collections.singletonMap("mock_processor", processorConfig)); } pipelineConfig.put("processors", processors); + + List> onFailureProcessors = new ArrayList<>(); + int numOnFailureProcessors = randomIntBetween(0, 1); + for (int i = 0; i < numOnFailureProcessors; i++) { + onFailureProcessors.add(Collections.singletonMap("mock_processor", Collections.emptyMap())); + } + if (numOnFailureProcessors > 0) { + pipelineConfig.put("on_failure", onFailureProcessors); + } + requestContent.put(Fields.PIPELINE, pipelineConfig); SimulatePipelineRequest.Parsed actualRequest = SimulatePipelineRequest.parse(requestContent, false, store); diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml index 7dd83313a7f..01b43cfefaa 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml @@ -72,3 +72,53 @@ } ] } + +--- +"Test basic pipeline with on_failure in processor": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field2", + "value": "_value", + "on_failure": [ + { + "set" : { + "field" : "field2", + "value" : "_failed_value" + } + } + ] + } + } + ] + } + - match: { _index: ".ingest" } + - match: { _type: "pipeline" } + - match: { _version: 1 } + - match: { _id: "my_pipeline" } + + - do: + ingest.get_pipeline: + id: "my_pipeline" + - match: { my_pipeline._source.description: "_description" } + - match: { my_pipeline._version: 1 } + + - do: + ingest.delete_pipeline: + id: "my_pipeline" + - match: { _index: ".ingest" } + - match: { _type: "pipeline" } + - match: { _version: 2 } + - match: { _id: "my_pipeline" } + - match: { found: true } + + - do: + catch: missing + ingest.get_pipeline: + id: "my_pipeline" diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml index 641fecf16f1..256587f6549 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml @@ -69,6 +69,48 @@ } - length: { docs: 1 } +--- +"Test simulate with provided pipeline definition with on_failure block": + - do: + ingest.simulate: + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "rename" : { + "field" : "does_not_exist", + "to" : "field2", + "on_failure" : [ + { + "set" : { + "field" : "field2", + "value" : "_value" + } + } + ] + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "foo": "bar" + } + } + ] + } + - length: { docs: 1 } + - match: { docs.0.doc._source.foo: "bar" } + - match: { docs.0.doc._source.field2: "_value" } + - length: { docs.0.doc._ingest: 1 } + - is_true: docs.0.doc._ingest.timestamp + --- "Test simulate with no provided pipeline or pipeline_id": - do: diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_on_failure.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_on_failure.yaml new file mode 100644 index 00000000000..eeae210c2b5 --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_on_failure.yaml @@ -0,0 +1,122 @@ +--- +"Test Pipeline With On Failure Block": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "rename" : { + "field" : "foofield", + "to" : "field1" + } + }, + { + "grok" : { + "field" : "field1", + "pattern" : "%{NUMBER:val} %{NUMBER:status} <%{WORD:msg}>" + } + } + ], + "on_failure" : [ + { + "grok" : { + "field" : "field1", + "pattern" : "%{NUMBER:val:float} %{NUMBER:status:int} <%{WORD:msg}>" + } + }, + { + "set" : { + "field" : "_failed", + "value" : true + } + } + ] + } + - match: { _id: "my_pipeline" } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline_id: "my_pipeline" + body: {field1: "123.42 400 "} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.val: 123.42 } + - match: { _source.status: 400 } + - match: { _source.msg: "foo" } + - match: { _source._failed: true } + +--- +"Test Pipeline With Nested Processor On Failures": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "grok" : { + "field" : "field1", + "pattern" : "%{NUMBER:val:float} %{NUMBER:status:int} <%{WORD:msg}>" + } + }, + { + "rename" : { + "field" : "foofield", + "to" : "field1", + "on_failure" : [ + { + "set" : { + "field" : "foofield", + "value" : "exists" + } + }, + { + "rename" : { + "field" : "foofield2", + "to" : "field1", + "on_failure" : [ + { + "set" : { + "field" : "foofield2", + "value" : "ran" + } + } + ] + } + } + ] + } + } + ] + } + - match: { _id: "my_pipeline" } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline_id: "my_pipeline" + body: {field1: "123.42 400 "} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.val: 123.42 } + - match: { _source.msg: "foo" } + - match: { _source.status: 400 } + - match: { _source.foofield: "exists" } + - match: { _source.foofield2: "ran" } From 44d64c8a45fb2a6385946e0f88cc05018f88f9ee Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Tue, 22 Dec 2015 12:07:02 -0800 Subject: [PATCH 138/347] rename pipeline_id param to pipeline --- .../org/elasticsearch/plugin/ingest/IngestPlugin.java | 2 +- .../src/test/resources/rest-api-spec/api/ingest.bulk.json | 2 +- .../test/resources/rest-api-spec/api/ingest.index.json | 2 +- .../test/resources/rest-api-spec/test/ingest/30_grok.yaml | 6 +++--- .../rest-api-spec/test/ingest/40_geoip_processor.yaml | 6 +++--- .../rest-api-spec/test/ingest/50_date_processor.yaml | 2 +- .../resources/rest-api-spec/test/ingest/60_mutate.yaml | 4 ++-- .../rest-api-spec/test/ingest/80_on_failure.yaml | 4 ++-- .../10_pipeline_with_mustache_templates.yaml | 8 ++++---- 9 files changed, 18 insertions(+), 18 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 2db3c9ec69d..af85765c416 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -50,7 +50,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; public class IngestPlugin extends Plugin { public static final String PIPELINE_ID_PARAM_CONTEXT_KEY = "__pipeline_id__"; - public static final String PIPELINE_ID_PARAM = "pipeline_id"; + public static final String PIPELINE_ID_PARAM = "pipeline"; public static final String PIPELINE_ALREADY_PROCESSED = "ingest_already_processed"; public static final String NAME = "ingest"; public static final String NODE_INGEST_SETTING = "node.ingest"; diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json index 5595c2007ff..ecd53ee496b 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json @@ -41,7 +41,7 @@ "type": "list", "description" : "Default comma-separated list of fields to return in the response for updates" }, - "pipeline_id" : { + "pipeline" : { "type" : "string", "description" : "The pipeline id to preprocess incoming documents with" } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json index 02dc30b2f3b..7420f69e45e 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json +++ b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json @@ -66,7 +66,7 @@ "options" : ["internal", "external", "external_gte", "force"], "description" : "Specific version type" }, - "pipeline_id" : { + "pipeline" : { "type" : "string", "description" : "The pipeline id to preprocess incoming documents with" } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml index 7c43273657c..e8a59d4ea65 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml @@ -22,7 +22,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline" + pipeline: "my_pipeline" body: {field1: "123.42 400 "} - do: @@ -61,7 +61,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline" + pipeline: "my_pipeline" body: {field1: ""} - do: @@ -98,7 +98,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline" + pipeline: "my_pipeline" body: {field1: ""} - do: diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml index d35898a59d5..d86eb25d4d4 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml @@ -21,7 +21,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline" + pipeline: "my_pipeline" body: {field1: "128.101.101.101"} - do: @@ -65,7 +65,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline" + pipeline: "my_pipeline" body: {field1: "128.101.101.101"} - do: @@ -110,7 +110,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline" + pipeline: "my_pipeline" body: {field1: "128.101.101.101"} - do: diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_date_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_date_processor.yaml index 5caad399d23..64354c8ac16 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_date_processor.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_date_processor.yaml @@ -24,7 +24,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline" + pipeline: "my_pipeline" body: {date_source_field: "12/06/2010"} - do: diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml index a15bc440022..f1a324318e7 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml @@ -79,7 +79,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline" + pipeline: "my_pipeline" body: { field_to_rename: "value", field_to_remove: "old_value", @@ -137,7 +137,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline" + pipeline: "my_pipeline" body: {field: "value"} - do: diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_on_failure.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_on_failure.yaml index eeae210c2b5..e28d5ddaf94 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_on_failure.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_on_failure.yaml @@ -42,7 +42,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline" + pipeline: "my_pipeline" body: {field1: "123.42 400 "} - do: @@ -107,7 +107,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline" + pipeline: "my_pipeline" body: {field1: "123.42 400 "} - do: diff --git a/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml b/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml index 52be7299e29..9c44d86feaf 100644 --- a/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml +++ b/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml @@ -32,7 +32,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline_1" + pipeline: "my_pipeline_1" body: {} - do: @@ -112,7 +112,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline_1" + pipeline: "my_pipeline_1" body: { metadata: "0", field1: "1", @@ -137,7 +137,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline_2" + pipeline: "my_pipeline_2" body: { field1: "field2" } @@ -156,7 +156,7 @@ index: test type: test id: 1 - pipeline_id: "my_pipeline_3" + pipeline: "my_pipeline_3" body: { field_to_remove: "field2", field2: "2", From dbbb29632213940fbd8f07a7c2414e684c16b486 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 22 Dec 2015 16:51:25 +0100 Subject: [PATCH 139/347] added a `node.ingest` setting that controls whether ingest is active or not. Defaults to `false`. If `node.ingest` isn't active then ingest related API calls fail and if the `pipeline_id` parameter is set then index and bulk requests fail. --- docs/plugins/ingest.asciidoc | 27 +++++++++ plugins/ingest/build.gradle | 6 ++ .../plugin/ingest/IngestModule.java | 46 +++++++++------ .../plugin/ingest/IngestPlugin.java | 36 ++++++++---- .../ingest/rest/RestIngestDisabledAction.java | 48 +++++++++++++++ .../transport/IngestDisabledActionFilter.java | 58 +++++++++++++++++++ .../elasticsearch/ingest/IngestClientIT.java | 17 ++++++ .../reload/ReloadPipelinesActionTests.java | 2 +- qa/ingest-disabled/build.gradle | 30 ++++++++++ .../smoketest/IngestDisabledIT.java | 41 +++++++++++++ .../ingest_mustache/10_ingest_disabled.yaml | 58 +++++++++++++++++++ qa/ingest-with-mustache/build.gradle | 1 + settings.gradle | 1 + 13 files changed, 341 insertions(+), 30 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestIngestDisabledAction.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestDisabledActionFilter.java create mode 100644 qa/ingest-disabled/build.gradle create mode 100644 qa/ingest-disabled/src/test/java/org/elasticsearch/smoketest/IngestDisabledIT.java create mode 100644 qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 1d75daf387d..74f1d66b847 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -1,6 +1,33 @@ [[ingest]] == Ingest Plugin +The ingest plugin can be used to pre-process documents before the actual indexing takes place. +This pre-processing happens by the ingest plugin that intercepts bulk and index requests, applies the +transformations and then passes the documents back to the index or bulk APIs. + +The ingest plugin is disabled by default. In order to enable the ingest plugin the following +setting should be configured in the elasticsearch.yml file: + +[source,yaml] +-------------------------------------------------- +node.ingest: true +-------------------------------------------------- + +The ingest plugin can be installed and enabled on any node. It is possible to run ingest +on an master and or data node or have dedicated client nodes that run with ingest. + +In order to pre-process document before indexing the `pipeline` parameter should be used +on an index or bulk request to tell the ingest plugin what pipeline is going to be used. + +[source,js] +-------------------------------------------------- +PUT /my-index/my-type/my-id?ingest=my_pipeline_id +{ + ... +} +-------------------------------------------------- +// AUTOSENSE + === Processors ==== Set processor diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index 2e5aede319f..dfdf421e608 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -66,3 +66,9 @@ bundlePlugin { //geoip WebServiceClient needs Google http client, but we're not using WebServiceClient and // joni has AsmCompilerSupport, but that isn't being used: thirdPartyAudit.missingClasses = true + +integTest { + cluster { + systemProperty 'es.node.ingest', 'true' + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index c471163c0fa..bc083938ec7 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -42,31 +42,41 @@ import java.util.Map; public class IngestModule extends AbstractModule { + private final boolean ingestEnabled; private final Map processorFactoryProviders = new HashMap<>(); + public IngestModule(boolean ingestEnabled) { + this.ingestEnabled = ingestEnabled; + } + @Override protected void configure() { + // Even if ingest isn't enable we still need to make sure that rest requests with pipeline + // param copy the pipeline into the context, so that in IngestDisabledActionFilter + // index/bulk requests can be failed binder().bind(IngestRestFilter.class).asEagerSingleton(); - binder().bind(IngestBootstrapper.class).asEagerSingleton(); + if (ingestEnabled) { + binder().bind(IngestBootstrapper.class).asEagerSingleton(); - addProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); - addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); - addProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); - addProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); - addProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); - addProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); - addProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); - addProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); - addProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); - addProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); - addProcessor(LowercaseProcessor.TYPE, (environment, mustacheFactory) -> new LowercaseProcessor.Factory()); - addProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); - addProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); - addProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); + addProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); + addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); + addProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); + addProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); + addProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); + addProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); + addProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); + addProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); + addProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); + addProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); + addProcessor(LowercaseProcessor.TYPE, (environment, mustacheFactory) -> new LowercaseProcessor.Factory()); + addProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); + addProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); + addProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); - MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, ProcessorFactoryProvider.class); - for (Map.Entry entry : processorFactoryProviders.entrySet()) { - mapBinder.addBinding(entry.getKey()).toInstance(entry.getValue()); + MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, ProcessorFactoryProvider.class); + for (Map.Entry entry : processorFactoryProviders.entrySet()) { + mapBinder.addBinding(entry.getKey()).toInstance(entry.getValue()); + } } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index af85765c416..f8eb044d881 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -30,7 +30,9 @@ import org.elasticsearch.plugin.ingest.rest.RestDeletePipelineAction; import org.elasticsearch.plugin.ingest.rest.RestGetPipelineAction; import org.elasticsearch.plugin.ingest.rest.RestPutPipelineAction; import org.elasticsearch.plugin.ingest.rest.RestSimulatePipelineAction; +import org.elasticsearch.plugin.ingest.rest.RestIngestDisabledAction; import org.elasticsearch.plugin.ingest.transport.IngestActionFilter; +import org.elasticsearch.plugin.ingest.transport.IngestDisabledActionFilter; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineTransportAction; import org.elasticsearch.plugin.ingest.transport.get.GetPipelineAction; @@ -56,11 +58,13 @@ public class IngestPlugin extends Plugin { public static final String NODE_INGEST_SETTING = "node.ingest"; private final Settings nodeSettings; + private final boolean ingestEnabled; private final boolean transportClient; public IngestPlugin(Settings nodeSettings) { this.nodeSettings = nodeSettings; - transportClient = TransportClient.CLIENT_TYPE.equals(nodeSettings.get(Client.CLIENT_TYPE_SETTING)); + this.ingestEnabled = nodeSettings.getAsBoolean(NODE_INGEST_SETTING, false); + this.transportClient = TransportClient.CLIENT_TYPE.equals(nodeSettings.get(Client.CLIENT_TYPE_SETTING)); } @Override @@ -78,13 +82,13 @@ public class IngestPlugin extends Plugin { if (transportClient) { return Collections.emptyList(); } else { - return Collections.singletonList(new IngestModule()); + return Collections.singletonList(new IngestModule(ingestEnabled)); } } @Override public Collection> nodeServices() { - if (transportClient) { + if (transportClient|| ingestEnabled == false) { return Collections.emptyList(); } else { return Collections.singletonList(IngestBootstrapper.class); @@ -95,27 +99,37 @@ public class IngestPlugin extends Plugin { public Settings additionalSettings() { return settingsBuilder() .put(PipelineExecutionService.additionalSettings(nodeSettings)) - // TODO: in a followup issue this should be made configurable - .put(NODE_INGEST_SETTING, true) .build(); } public void onModule(ActionModule module) { if (transportClient == false) { - module.registerFilter(IngestActionFilter.class); + if (ingestEnabled) { + module.registerFilter(IngestActionFilter.class); + } else { + module.registerFilter(IngestDisabledActionFilter.class); + } + } + if (ingestEnabled) { + module.registerAction(PutPipelineAction.INSTANCE, PutPipelineTransportAction.class); + module.registerAction(GetPipelineAction.INSTANCE, GetPipelineTransportAction.class); + module.registerAction(DeletePipelineAction.INSTANCE, DeletePipelineTransportAction.class); + module.registerAction(SimulatePipelineAction.INSTANCE, SimulatePipelineTransportAction.class); } - module.registerAction(PutPipelineAction.INSTANCE, PutPipelineTransportAction.class); - module.registerAction(GetPipelineAction.INSTANCE, GetPipelineTransportAction.class); - module.registerAction(DeletePipelineAction.INSTANCE, DeletePipelineTransportAction.class); - module.registerAction(SimulatePipelineAction.INSTANCE, SimulatePipelineTransportAction.class); } public void onModule(NetworkModule networkModule) { - if (transportClient == false) { + if (transportClient) { + return; + } + + if (ingestEnabled) { networkModule.registerRestHandler(RestPutPipelineAction.class); networkModule.registerRestHandler(RestGetPipelineAction.class); networkModule.registerRestHandler(RestDeletePipelineAction.class); networkModule.registerRestHandler(RestSimulatePipelineAction.class); + } else { + networkModule.registerRestHandler(RestIngestDisabledAction.class); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestIngestDisabledAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestIngestDisabledAction.java new file mode 100644 index 00000000000..80d0784956d --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestIngestDisabledAction.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.plugin.ingest.rest; + +import org.elasticsearch.client.Client; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.BytesRestResponse; + +public class RestIngestDisabledAction extends BaseRestHandler { + + @Inject + public RestIngestDisabledAction(Settings settings, RestController controller, Client client) { + super(settings, controller, client); + controller.registerHandler(RestRequest.Method.DELETE, "/_ingest/pipeline/{id}", this); + controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}", this); + controller.registerHandler(RestRequest.Method.PUT, "/_ingest/pipeline/{id}", this); + controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/{id}/_simulate", this); + controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}/_simulate", this); + controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/_simulate", this); + controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/_simulate", this); + } + + @Override + protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception { + channel.sendResponse(new BytesRestResponse(channel, new IllegalArgumentException("ingest plugin is disabled, pipeline CRUD or simulate APIs cannot be used"))); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestDisabledActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestDisabledActionFilter.java new file mode 100644 index 00000000000..06db1ab099c --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestDisabledActionFilter.java @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.plugin.ingest.transport; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.ActionFilter; +import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.plugin.ingest.IngestPlugin; + +public final class IngestDisabledActionFilter implements ActionFilter { + + @Override + public void apply(String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { + String pipelineId = request.getFromContext(IngestPlugin.PIPELINE_ID_PARAM_CONTEXT_KEY); + if (pipelineId != null) { + failRequest(pipelineId); + } + pipelineId = request.getHeader(IngestPlugin.PIPELINE_ID_PARAM); + if (pipelineId != null) { + failRequest(pipelineId); + } + + chain.proceed(action, request, listener); + } + + @Override + public void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { + chain.proceed(action, response, listener); + } + + @Override + public int order() { + return Integer.MAX_VALUE; + } + + private static void failRequest(String pipelineId) { + throw new IllegalArgumentException("ingest plugin is disabled, cannot execute pipeline with id [" + pipelineId + "]"); + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 545f8a0b6bf..13934f5a83d 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugin.ingest.IngestPlugin; @@ -68,6 +69,22 @@ public class IngestClientIT extends ESIntegTestCase { return nodePlugins(); } + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(IngestPlugin.NODE_INGEST_SETTING, true) + .build(); + } + + @Override + protected Settings externalClusterClientSettings() { + return Settings.builder() + .put(super.transportClientSettings()) + .put(IngestPlugin.NODE_INGEST_SETTING, true) + .build(); + } + public void testSimulate() throws Exception { new PutPipelineRequestBuilder(client(), PutPipelineAction.INSTANCE) .setId("_id") diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java index d6f2d3f3d9f..87a2554ede1 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java @@ -153,7 +153,7 @@ public class ReloadPipelinesActionTests extends ESTestCase { if (ingestNode) { attributes = Collections.singletonMap("ingest", "true"); } else { - attributes = Collections.emptyMap(); + attributes = randomBoolean() ? Collections.emptyMap() : Collections.singletonMap("ingest", "false"); } String id = String.valueOf(index); return new DiscoveryNode(id, id, new LocalTransportAddress(id), attributes, Version.CURRENT); diff --git a/qa/ingest-disabled/build.gradle b/qa/ingest-disabled/build.gradle new file mode 100644 index 00000000000..c74535e3789 --- /dev/null +++ b/qa/ingest-disabled/build.gradle @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +apply plugin: 'elasticsearch.rest-test' + +dependencies { + testCompile project(path: ':plugins:ingest', configuration: 'runtime') +} + +integTest { + cluster { + plugin 'ingest', project(':plugins:ingest') + } +} diff --git a/qa/ingest-disabled/src/test/java/org/elasticsearch/smoketest/IngestDisabledIT.java b/qa/ingest-disabled/src/test/java/org/elasticsearch/smoketest/IngestDisabledIT.java new file mode 100644 index 00000000000..e162807baca --- /dev/null +++ b/qa/ingest-disabled/src/test/java/org/elasticsearch/smoketest/IngestDisabledIT.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.smoketest; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestCandidate; +import org.elasticsearch.test.rest.parser.RestTestParseException; + +import java.io.IOException; + +public class IngestDisabledIT extends ESRestTestCase { + + public IngestDisabledIT(@Name("yaml") RestTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, RestTestParseException { + return ESRestTestCase.createParameters(0, 1); + } + +} diff --git a/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml b/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml new file mode 100644 index 00000000000..f470b3152bd --- /dev/null +++ b/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml @@ -0,0 +1,58 @@ +--- +"Test ingest APIS fail when is disabled": + - do: + catch: /ingest plugin is disabled, pipeline CRUD or simulate APIs cannot be used/ + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field", + "value": "valie" + } + } + ] + } + + - do: + catch: /ingest plugin is disabled, pipeline CRUD or simulate APIs cannot be used/ + ingest.delete_pipeline: + id: "my_pipeline" + + - do: + catch: /ingest plugin is disabled, pipeline CRUD or simulate APIs cannot be used/ + ingest.get_pipeline: + id: "my_pipeline" + + - do: + catch: /ingest plugin is disabled, pipeline CRUD or simulate APIs cannot be used/ + ingest.simulate: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field", + "value": "valie" + } + } + ] + } + + - do: + catch: /ingest plugin is disabled, cannot execute pipeline with id \[my_pipeline_1\]/ + ingest.index: + index: test + type: test + id: 1 + pipeline: "my_pipeline_1" + body: { + field1: "1", + field2: "2", + field3: "3" + } diff --git a/qa/ingest-with-mustache/build.gradle b/qa/ingest-with-mustache/build.gradle index 32ed5f8956f..8c0adbefaef 100644 --- a/qa/ingest-with-mustache/build.gradle +++ b/qa/ingest-with-mustache/build.gradle @@ -27,5 +27,6 @@ dependencies { integTest { cluster { plugin 'ingest', project(':plugins:ingest') + systemProperty 'es.node.ingest', 'true' } } diff --git a/settings.gradle b/settings.gradle index d42952095d3..7070abcac89 100644 --- a/settings.gradle +++ b/settings.gradle @@ -41,6 +41,7 @@ List projects = [ 'qa:smoke-test-multinode', 'qa:smoke-test-plugins', 'qa:ingest-with-mustache', + 'qa:ingest-disabled', 'qa:vagrant', ] From 8b671d7d9311f9f4643294280b3a410658c6b01c Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 22 Dec 2015 22:52:23 +0100 Subject: [PATCH 140/347] added note --- .../src/main/java/org/elasticsearch/ingest/TemplateService.java | 1 + 1 file changed, 1 insertion(+) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/TemplateService.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/TemplateService.java index c0505365e0a..c5bd3e97320 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/TemplateService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/TemplateService.java @@ -23,6 +23,7 @@ import java.util.Map; /** * Abstraction for the template engine. */ +// NOTE: this abstraction is added because the 'org.elasticsearch.ingest' has the requirement to be ES agnostic public interface TemplateService { Template compile(String template); From 1936d6118df79fa63e18014675facfc905877d5b Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 23 Dec 2015 15:09:57 +0100 Subject: [PATCH 141/347] Added index template for the '.ingest' index and added logic that ensure the index template is installed. An index template for the '.ingest' index is required because: * We don't want arbitrary fields in pipeline documents, because that can turn into upgrade problems if we add more properties to the pipeline dsl. * We know what are the usages are of the '.ingest' index, so we can optimize for that and prevent that this index is used for different purposes. Closes #15001 --- .../plugin/ingest/IngestBootstrapper.java | 71 +++++++++-- plugins/ingest/src/main/resources/ingest.json | 31 +++++ .../ingest/IngestBootstrapperTests.java | 20 +++- .../plugin/ingest/IngestTemplateTests.java | 110 ++++++++++++++++++ 4 files changed, 221 insertions(+), 11 deletions(-) create mode 100644 plugins/ingest/src/main/resources/ingest.json create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestTemplateTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java index 2a836d1ef2e..d07b3a8528a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java @@ -19,14 +19,19 @@ package org.elasticsearch.plugin.ingest; +import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; +import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.env.Environment; @@ -36,6 +41,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.io.InputStream; import java.util.Map; /** @@ -44,6 +50,9 @@ import java.util.Map; */ public class IngestBootstrapper extends AbstractLifecycleComponent implements ClusterStateListener { + static final String INGEST_INDEX_TEMPLATE_NAME = "ingest-template"; + + private Client client; private final ThreadPool threadPool; private final Environment environment; private final PipelineStore pipelineStore; @@ -86,6 +95,7 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl @Inject public void setClient(Client client) { + this.client = client; pipelineStore.setClient(client); } @@ -96,17 +106,25 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl @Override public void clusterChanged(ClusterChangedEvent event) { - if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { + ClusterState state = event.state(); + if (state.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { return; } if (pipelineStore.isStarted()) { - if (validClusterState(event.state()) == false) { - stopPipelineStore("cluster state invalid [" + event.state() + "]"); + if (validClusterState(state) == false) { + stopPipelineStore("cluster state invalid [" + state + "]"); + } + // We always check if the index template still exist, + // because it may have been removed via an api call and + // this allows us to add it back immediately: + // (this method gets invoked on each cluster state update) + if (isIngestTemplateInstallationRequired(state.metaData())) { + forkAndInstallIngestIndexTemplate(); } } else { - if (validClusterState(event.state())) { - startPipelineStore(); + if (validClusterState(state)) { + startPipelineStore(state.metaData()); } } } @@ -126,6 +144,39 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl } } + boolean isIngestTemplateInstallationRequired(MetaData metaData) { + if (metaData.getTemplates().containsKey(INGEST_INDEX_TEMPLATE_NAME)) { + logger.trace("not installing ingest index template, because it already is installed"); + return false; + } + return true; + } + + void forkAndInstallIngestIndexTemplate() { + threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { + try { + installIngestIndexTemplate(); + } catch (IOException e) { + logger.debug("Failed to install .ingest index template", e); + } + }); + } + + void installIngestIndexTemplate() throws IOException { + logger.debug("installing .ingest index template..."); + try (InputStream is = IngestBootstrapper.class.getResourceAsStream("/ingest.json")) { + final byte[] template; + try (BytesStreamOutput out = new BytesStreamOutput()) { + Streams.copy(is, out); + template = out.bytes().toBytes(); + } + PutIndexTemplateRequest request = new PutIndexTemplateRequest(INGEST_INDEX_TEMPLATE_NAME); + request.source(template); + client.execute(PutIndexTemplateAction.INSTANCE, request).actionGet(); + logger.debug(".ingest index template has been installed"); + } + } + @Override protected void doStart() { } @@ -143,13 +194,19 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl } } - void startPipelineStore() { + void startPipelineStore(MetaData metaData) { threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { try { + // Before we start the pipeline store we check if the index template exists, + // if it doesn't we add it. If for some reason this fails we will try again later, + // but the pipeline store won't start before that happened + if (isIngestTemplateInstallationRequired(metaData)) { + installIngestIndexTemplate(); + } pipelineStore.start(); } catch (Exception e) { logger.warn("pipeline store failed to start, retrying...", e); - startPipelineStore(); + startPipelineStore(metaData); } }); } diff --git a/plugins/ingest/src/main/resources/ingest.json b/plugins/ingest/src/main/resources/ingest.json new file mode 100644 index 00000000000..46e1e184e14 --- /dev/null +++ b/plugins/ingest/src/main/resources/ingest.json @@ -0,0 +1,31 @@ +{ + "template": ".ingest", + "order": 2147483647, + "settings": { + "index.number_of_shards": 1, + "index.mapper.dynamic" : false + }, + "mappings": { + "pipeline": { + "dynamic" : "strict", + "_all" : { + "enabled" : false + }, + "properties": { + "processors": { + "type": "object", + "enabled" : false, + "dynamic" : true + }, + "on_failure": { + "type": "object", + "enabled" : false, + "dynamic" : true + }, + "description": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java index be0251726d0..c8eff4dcd30 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -107,7 +108,10 @@ public class IngestBootstrapperTests extends ESTestCase { assertThat(e.getMessage(), equalTo("pipeline store isn't ready yet")); } - bootstrapper.startPipelineStore(); + MetaData metadata = MetaData.builder() + .put(IndexTemplateMetaData.builder(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME)) + .build(); + bootstrapper.startPipelineStore(metadata); assertBusy(() -> { assertThat(store.isStarted(), is(true)); assertThat(store.get("1"), notNullValue()); @@ -123,7 +127,7 @@ public class IngestBootstrapperTests extends ESTestCase { hits.add(new InternalSearchHit(0, "2", new Text("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description2\"}")) ); - bootstrapper.startPipelineStore(); + bootstrapper.startPipelineStore(metadata); assertBusy(() -> { assertThat(store.isStarted(), is(true)); assertThat(store.get("1"), notNullValue()); @@ -149,7 +153,10 @@ public class IngestBootstrapperTests extends ESTestCase { ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); csBuilder.blocks(ClusterBlocks.builder() .addGlobalBlock(randomBoolean() ? DiscoverySettings.NO_MASTER_BLOCK_WRITES : DiscoverySettings.NO_MASTER_BLOCK_ALL)); - ClusterState cs = csBuilder.metaData(MetaData.builder()).build(); + ClusterState cs = csBuilder.metaData( + MetaData.builder() + .put(IndexTemplateMetaData.builder(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME)) + ).build(); // We're not started and there is a no master block, doing nothing: bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); @@ -165,7 +172,9 @@ public class IngestBootstrapperTests extends ESTestCase { public void testPipelineStoreBootstrappingNoIngestIndex() throws Exception { ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); - ClusterState cs = csBuilder.metaData(MetaData.builder()).build(); + ClusterState cs = csBuilder.metaData(MetaData.builder() + .put(IndexTemplateMetaData.builder(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME))) + .build(); bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, times(1)).start(); } @@ -174,6 +183,7 @@ public class IngestBootstrapperTests extends ESTestCase { // .ingest index, but not all primary shards started: ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); MetaData.Builder metaDateBuilder = MetaData.builder(); + metaDateBuilder.put(IndexTemplateMetaData.builder(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME)); RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); Settings settings = settings(Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) @@ -206,6 +216,7 @@ public class IngestBootstrapperTests extends ESTestCase { // .ingest index, but not all primary shards started: ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); MetaData.Builder metaDateBuilder = MetaData.builder(); + metaDateBuilder.put(IndexTemplateMetaData.builder(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME)); RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); Settings settings = settings(Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) @@ -238,6 +249,7 @@ public class IngestBootstrapperTests extends ESTestCase { // .ingest index, but not all primary shards started: ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); MetaData.Builder metaDateBuilder = MetaData.builder(); + metaDateBuilder.put(IndexTemplateMetaData.builder(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME)); RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); Settings settings = settings(Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestTemplateTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestTemplateTests.java new file mode 100644 index 00000000000..d7456cd9152 --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestTemplateTests.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.ingest; + +import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.mockito.Mockito; + +import java.util.Collections; + +public class IngestTemplateTests extends ESSingleNodeTestCase { + + private IngestBootstrapper bootstrapper; + + @Override + protected boolean resetNodeAfterTest() { + return true; + } + + @Before + public void init() { + ThreadPool threadPool = Mockito.mock(ThreadPool.class); + Mockito.when(threadPool.executor(Mockito.anyString())).thenReturn(Runnable::run); + Environment environment = Mockito.mock(Environment.class); + ClusterService clusterService = Mockito.mock(ClusterService.class); + TransportService transportService = Mockito.mock(TransportService.class); + bootstrapper = new IngestBootstrapper( + Settings.EMPTY, threadPool, environment, clusterService, transportService, Collections.emptyMap() + ); + bootstrapper.setClient(client()); + } + + public void testInstallIndexTemplate() throws Exception { + verifyNoIndexTemplates(); + ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); + bootstrapper.clusterChanged(new ClusterChangedEvent("test", clusterState, clusterState)); + verifyIngestIndexTemplateExist(); + } + + public void testInstallTemplateAfterItHasBeenRemoved() throws Exception { + verifyNoIndexTemplates(); + ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); + bootstrapper.clusterChanged(new ClusterChangedEvent("test", clusterState, clusterState)); + verifyIngestIndexTemplateExist(); + + client().admin().indices().prepareDeleteTemplate(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME).get(); + verifyNoIndexTemplates(); + + clusterState = client().admin().cluster().prepareState().get().getState(); + bootstrapper.clusterChanged(new ClusterChangedEvent("test", clusterState, clusterState)); + verifyIngestIndexTemplateExist(); + } + + public void testDoNotInstallTemplateBecauseIngestIndexTemplateAlreadyExists() throws Exception { + // add an empty template and check that it doesn't get overwritten: + client().admin().indices().preparePutTemplate(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME).setTemplate(".ingest").get(); + GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME).get(); + assertThat(response.getIndexTemplates().size(), Matchers.equalTo(1)); + assertThat(response.getIndexTemplates().get(0).getOrder(), Matchers.equalTo(0)); + + ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); + bootstrapper.clusterChanged(new ClusterChangedEvent("test", clusterState, clusterState)); + + response = client().admin().indices().prepareGetTemplates(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME).get(); + assertThat(response.getIndexTemplates().size(), Matchers.equalTo(1)); + assertThat("The empty index template shouldn't get overwritten", response.getIndexTemplates().get(0).getOrder(), Matchers.equalTo(0)); + assertThat("The empty index template shouldn't get overwritten", response.getIndexTemplates().get(0).getMappings().size(), Matchers.equalTo(0)); + } + + private static void verifyIngestIndexTemplateExist() { + GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME).get(); + assertThat(response.getIndexTemplates().size(), Matchers.equalTo(1)); + assertThat(response.getIndexTemplates().get(0).getName(), Matchers.equalTo(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME)); + assertThat(response.getIndexTemplates().get(0).getOrder(), Matchers.equalTo(Integer.MAX_VALUE)); + assertThat(response.getIndexTemplates().get(0).getMappings().size(), Matchers.equalTo(1)); + assertThat(response.getIndexTemplates().get(0).getMappings().get(PipelineStore.TYPE), Matchers.notNullValue()); + } + + private static void verifyNoIndexTemplates() { + GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates().get(); + assertThat(response.getIndexTemplates().size(), Matchers.equalTo(0)); + } + +} From 9fa3f469e95540a53651448fc3d4f1857de9f963 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 29 Dec 2015 13:00:27 +0100 Subject: [PATCH 142/347] fix test after merging in master branch --- .../elasticsearch/plugin/ingest/IngestBootstrapperTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java index c8eff4dcd30..3f966d09f39 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java @@ -192,7 +192,7 @@ public class IngestBootstrapperTests extends ESTestCase { metaDateBuilder.put(IndexMetaData.builder(PipelineStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1)); IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(PipelineStore.INDEX); indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(PipelineStore.INDEX, 0)) - .addShard(TestShardRouting.newShardRouting(PipelineStore.INDEX, 0, "_node_id", null, null, true, ShardRoutingState.UNASSIGNED, 1, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""))) + .addShard(TestShardRouting.newShardRouting(PipelineStore.INDEX, 0, "_node_id", null, null, true, ShardRoutingState.INITIALIZING, 1, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""))) .build()); indexRoutingTableBuilder.addReplica(); routingTableBuilder.add(indexRoutingTableBuilder.build()); From f2fce0edca733b89edccf5a2dcf7615d8236d503 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 29 Dec 2015 13:07:23 +0100 Subject: [PATCH 143/347] Removed ingest runner main class, `gradle run --debug-jvm` should be used instead. --- .../ingest/src/test/java/IngestRunner.java | 54 ------------------- 1 file changed, 54 deletions(-) delete mode 100644 plugins/ingest/src/test/java/IngestRunner.java diff --git a/plugins/ingest/src/test/java/IngestRunner.java b/plugins/ingest/src/test/java/IngestRunner.java deleted file mode 100644 index 10f7662b4b0..00000000000 --- a/plugins/ingest/src/test/java/IngestRunner.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.elasticsearch.Version; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.DiscoveryService; -import org.elasticsearch.node.MockNode; -import org.elasticsearch.node.Node; -import org.elasticsearch.plugin.ingest.IngestPlugin; - -import java.util.Collections; -import java.util.concurrent.CountDownLatch; - -public class IngestRunner { - - public static void main(String[] args) throws Exception { - Settings.Builder settings = Settings.builder(); - settings.put("http.cors.enabled", "true"); - settings.put("http.cors.allow-origin", "*"); - settings.put("script.inline", "on"); - settings.put("cluster.name", IngestRunner.class.getSimpleName()); - settings.put(DiscoveryService.SETTING_DISCOVERY_SEED, 0L); - - final CountDownLatch latch = new CountDownLatch(1); - final Node node = new MockNode(settings.build(), Version.CURRENT, Collections.singleton(IngestPlugin.class)); - Runtime.getRuntime().addShutdownHook(new Thread() { - - @Override - public void run() { - node.close(); - latch.countDown(); - } - }); - node.start(); - latch.await(); - } - -} From 8de31b3c64a4145dab60fd5243a3c7032b0ddb26 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 4 Jan 2016 13:11:44 +0100 Subject: [PATCH 144/347] make index type and id optional in simulate api Default values are _index, _type and _id. Closes #15711 --- .../simulate/SimulatePipelineRequest.java | 6 ++--- .../test/ingest/70_simulate.yaml | 27 +++++++++++++++++++ 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java index 8e48a5e057a..2d81fa71f63 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java @@ -147,9 +147,9 @@ public class SimulatePipelineRequest extends ActionRequest { List ingestDocumentList = new ArrayList<>(); for (Map dataMap : docs) { Map document = ConfigurationUtils.readMap(dataMap, Fields.SOURCE); - IngestDocument ingestDocument = new IngestDocument(ConfigurationUtils.readStringProperty(dataMap, MetaData.INDEX.getFieldName()), - ConfigurationUtils.readStringProperty(dataMap, MetaData.TYPE.getFieldName()), - ConfigurationUtils.readStringProperty(dataMap, MetaData.ID.getFieldName()), + IngestDocument ingestDocument = new IngestDocument(ConfigurationUtils.readStringProperty(dataMap, MetaData.INDEX.getFieldName(), "_index"), + ConfigurationUtils.readStringProperty(dataMap, MetaData.TYPE.getFieldName(), "_type"), + ConfigurationUtils.readStringProperty(dataMap, MetaData.ID.getFieldName(), "_id"), ConfigurationUtils.readOptionalStringProperty(dataMap, MetaData.ROUTING.getFieldName()), ConfigurationUtils.readOptionalStringProperty(dataMap, MetaData.PARENT.getFieldName()), ConfigurationUtils.readOptionalStringProperty(dataMap, MetaData.TIMESTAMP.getFieldName()), diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml index 256587f6549..932f67353cf 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml @@ -69,6 +69,33 @@ } - length: { docs: 1 } +--- +"Test simulate without index type and id": + - do: + ingest.simulate: + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field2", + "value" : "_value" + } + } + ] + }, + "docs": [ + { + "_source": { + "foo": "bar" + } + } + ] + } + - length: { docs: 1 } + --- "Test simulate with provided pipeline definition with on_failure block": - do: From f34ce9ddf4dcede79f0dfad51ad8f3c3a83bc069 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Wed, 23 Dec 2015 15:16:24 -0800 Subject: [PATCH 145/347] add on_failure context to ingest metadata during executeOnFailure --- .../ingest/processor/CompoundProcessor.java | 35 +++++++---- .../processor/CompoundProcessorTests.java | 58 +++++++++++++++++-- .../10_pipeline_with_mustache_templates.yaml | 53 ++++++++++++++++- 3 files changed, 126 insertions(+), 20 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/CompoundProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/CompoundProcessor.java index dbaad2e2313..42bb567da46 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/CompoundProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/CompoundProcessor.java @@ -25,6 +25,7 @@ import org.elasticsearch.ingest.IngestDocument; import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.stream.Collectors; /** @@ -32,6 +33,8 @@ import java.util.stream.Collectors; * "onFailureProcessors" when any of the processors throw an {@link Exception}. */ public class CompoundProcessor implements Processor { + static final String ON_FAILURE_MESSAGE_FIELD = "on_failure_message"; + static final String ON_FAILURE_PROCESSOR_FIELD = "on_failure_processor"; private final List processors; private final List onFailureProcessors; @@ -59,23 +62,31 @@ public class CompoundProcessor implements Processor { @Override public void execute(IngestDocument ingestDocument) throws Exception { - try { - for (Processor processor : processors) { + for (Processor processor : processors) { + try { processor.execute(ingestDocument); - } - } catch (Exception e) { - if (onFailureProcessors.isEmpty()) { - throw e; - } else { - executeOnFailure(ingestDocument); + } catch (Exception e) { + if (onFailureProcessors.isEmpty()) { + throw e; + } else { + executeOnFailure(ingestDocument, e, processor.getType()); + } + return; } } - } - void executeOnFailure(IngestDocument ingestDocument) throws Exception { - for (Processor processor : onFailureProcessors) { - processor.execute(ingestDocument); + void executeOnFailure(IngestDocument ingestDocument, Exception cause, String failedProcessorType) throws Exception { + Map ingestMetadata = ingestDocument.getIngestMetadata(); + try { + ingestMetadata.put(ON_FAILURE_MESSAGE_FIELD, cause.getMessage()); + ingestMetadata.put(ON_FAILURE_PROCESSOR_FIELD, failedProcessorType); + for (Processor processor : onFailureProcessors) { + processor.execute(ingestDocument); + } + } finally { + ingestMetadata.remove(ON_FAILURE_MESSAGE_FIELD); + ingestMetadata.remove(ON_FAILURE_PROCESSOR_FIELD); } } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/CompoundProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/CompoundProcessorTests.java index efb49e4c6dd..85bbee1e6d3 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/CompoundProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/CompoundProcessorTests.java @@ -22,17 +22,21 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import org.mockito.stubbing.Answer; import java.util.Arrays; import java.util.HashMap; +import java.util.Map; import static org.elasticsearch.mock.orig.Mockito.verify; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; +import static org.mockito.Mockito.when; public class CompoundProcessorTests extends ESTestCase { private IngestDocument ingestDocument; @@ -61,6 +65,7 @@ public class CompoundProcessorTests extends ESTestCase { public void testSingleProcessorWithException() throws Exception { Processor processor = mock(Processor.class); + when(processor.getType()).thenReturn("failed_processor"); doThrow(new RuntimeException("error")).doNothing().when(processor).execute(ingestDocument); CompoundProcessor compoundProcessor = new CompoundProcessor(processor); assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); @@ -72,29 +77,70 @@ public class CompoundProcessorTests extends ESTestCase { } catch (Exception e) { assertThat(e.getMessage(), equalTo("error")); } + verify(processor, times(1)).execute(ingestDocument); } public void testSingleProcessorWithOnFailureProcessor() throws Exception { + Exception error = new RuntimeException("error"); + Processor processor = mock(Processor.class); - doThrow(new RuntimeException("error")).doNothing().when(processor).execute(ingestDocument); + when(processor.getType()).thenReturn("first"); + doThrow(error).doNothing().when(processor).execute(ingestDocument); + Processor processorNext = mock(Processor.class); - CompoundProcessor compoundProcessor = new CompoundProcessor(Arrays.asList(processor), Arrays.asList(processorNext)); + Answer checkMetadataAnswer = invocationOnMock -> { + @SuppressWarnings("unchecked") + IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; + Map ingestMetadata = ingestDocument.getIngestMetadata(); + assertThat(ingestMetadata.size(), equalTo(2)); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_FIELD), equalTo("first")); + return null; + }; + doAnswer(checkMetadataAnswer).when(processorNext).execute(ingestDocument); + + CompoundProcessor compoundProcessor = spy(new CompoundProcessor(Arrays.asList(processor), Arrays.asList(processorNext))); assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); assertThat(compoundProcessor.getProcessors().get(0), equalTo(processor)); assertThat(compoundProcessor.getOnFailureProcessors().size(), equalTo(1)); assertThat(compoundProcessor.getOnFailureProcessors().get(0), equalTo(processorNext)); compoundProcessor.execute(ingestDocument); + verify(compoundProcessor).executeOnFailure(ingestDocument, error, "first"); verify(processor, times(1)).execute(ingestDocument); verify(processorNext, times(1)).execute(ingestDocument); + } public void testSingleProcessorWithNestedFailures() throws Exception { + Exception error = new RuntimeException("error"); Processor processor = mock(Processor.class); - doThrow(new RuntimeException("error")).doNothing().when(processor).execute(ingestDocument); + when(processor.getType()).thenReturn("first"); + doThrow(error).doNothing().when(processor).execute(ingestDocument); Processor processorToFail = mock(Processor.class); - doThrow(new RuntimeException("error")).doNothing().when(processorToFail).execute(ingestDocument); + Answer checkMetadataAnswer = invocationOnMock -> { + @SuppressWarnings("unchecked") + IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; + Map ingestMetadata = ingestDocument.getIngestMetadata(); + assertThat(ingestMetadata.size(), equalTo(2)); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_FIELD), equalTo("first")); + return null; + }; + doAnswer(checkMetadataAnswer).when(processorToFail).execute(ingestDocument); + when(processorToFail.getType()).thenReturn("second"); + doThrow(error).doNothing().when(processorToFail).execute(ingestDocument); Processor lastProcessor = mock(Processor.class); + Answer checkLastMetadataAnswer = invocationOnMock -> { + @SuppressWarnings("unchecked") + IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; + Map ingestMetadata = ingestDocument.getIngestMetadata(); + assertThat(ingestMetadata.size(), equalTo(2)); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_FIELD), equalTo("second")); + return null; + }; + doAnswer(checkLastMetadataAnswer).when(lastProcessor).execute(ingestDocument); CompoundProcessor innerCompoundOnFailProcessor = new CompoundProcessor(Arrays.asList(processorToFail), Arrays.asList(lastProcessor)); CompoundProcessor compoundOnFailProcessor = spy(innerCompoundOnFailProcessor); @@ -108,10 +154,10 @@ public class CompoundProcessorTests extends ESTestCase { assertThat(compoundProcessor.getOnFailureProcessors().get(0), equalTo(compoundOnFailProcessor)); compoundProcessor.execute(ingestDocument); verify(processor, times(1)).execute(ingestDocument); - verify(compoundProcessor, times(1)).executeOnFailure(ingestDocument); + verify(compoundProcessor, times(1)).executeOnFailure(ingestDocument, error, "first"); verify(compoundOnFailProcessor, times(1)).execute(ingestDocument); verify(processorToFail, times(1)).execute(ingestDocument); - verify(compoundOnFailProcessor, times(1)).executeOnFailure(ingestDocument); + verify(compoundOnFailProcessor, times(1)).executeOnFailure(ingestDocument, error, "second"); verify(lastProcessor, times(1)).execute(ingestDocument); } } diff --git a/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml b/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml index 9c44d86feaf..3421fc7fd00 100644 --- a/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml +++ b/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml @@ -1,5 +1,5 @@ --- -"Test metadata templateing": +"Test metadata templating": - do: cluster.health: wait_for_status: green @@ -45,7 +45,7 @@ - match: { _source.metadata: ["test", "test", "1"] } --- -"Test templateing": +"Test templating": - do: cluster.health: wait_for_status: green @@ -169,3 +169,52 @@ id: 1 - length: { _source: 1 } - match: { _source.field_to_remove: "field2" } + +--- +"Test on_failure metadata context templating": + - do: + cluster.health: + wait_for_status: green + + - do: + ingest.put_pipeline: + id: "my_handled_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "remove" : { + "field" : "field_to_remove", + "on_failure" : [ + { + "set" : { + "field" : "error", + "value" : "processor [{{ _ingest.on_failure_processor }}]: {{ _ingest.on_failure_message }}" + } + } + ] + } + } + ] + } + - match: { _id: "my_handled_pipeline" } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline: "my_handled_pipeline" + body: { + do_nothing: "foo", + } + + - do: + get: + index: test + type: test + id: 1 + - length: { _source: 2 } + - match: { _source.do_nothing: "foo" } + - match: { _source.error: "processor [remove]: field [field_to_remove] not present as part of path [field_to_remove]" } From 9dd54f2b4fd759bc73ff5884ad3d658f7689db69 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Wed, 23 Dec 2015 16:20:11 -0800 Subject: [PATCH 146/347] Introduce a fail processor --- docs/plugins/ingest.asciidoc | 14 ++++ .../elasticsearch/ingest/IngestDocument.java | 7 +- .../ingest/processor/fail/FailProcessor.java | 72 +++++++++++++++++++ .../fail/FailProcessorException.java | 31 ++++++++ .../plugin/ingest/IngestModule.java | 2 + .../fail/FailProcessorFactoryTests.java | 58 +++++++++++++++ .../processor/fail/FailProcessorTests.java | 43 +++++++++++ .../rest-api-spec/test/ingest/90_fail.yaml | 68 ++++++++++++++++++ 8 files changed, 293 insertions(+), 2 deletions(-) create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/fail/FailProcessor.java create mode 100644 plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/fail/FailProcessorException.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/fail/FailProcessorFactoryTests.java create mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/fail/FailProcessorTests.java create mode 100644 plugins/ingest/src/test/resources/rest-api-spec/test/ingest/90_fail.yaml diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 74f1d66b847..ca841b0d2b2 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -445,6 +445,20 @@ An example that adds the parsed date to the `timestamp` field based on the `init } -------------------------------------------------- +==== Fail processor +The Fail Processor is used to raise an exception. This is useful for when +a user expects a pipeline to fail and wishes to relay a specific message +to the requester. + +[source,js] +-------------------------------------------------- +{ + "fail": { + "message": "an error message" + } +} +-------------------------------------------------- + === Accessing data in pipelines Processors in pipelines have read and write access to documents that pass through the pipeline. diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 993f6e2fa91..c6356867bd9 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -165,8 +165,7 @@ public final class IngestDocument { * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. */ public void removeField(TemplateService.Template fieldPathTemplate) { - Map model = createTemplateModel(); - removeField(fieldPathTemplate.execute(model)); + removeField(renderTemplate(fieldPathTemplate)); } /** @@ -422,6 +421,10 @@ public final class IngestDocument { throw new IllegalArgumentException("field [" + path + "] of type [" + object.getClass().getName() + "] cannot be cast to [" + clazz.getName() + "]"); } + public String renderTemplate(TemplateService.Template template) { + return template.execute(createTemplateModel()); + } + private Map createTemplateModel() { Map model = new HashMap<>(sourceAndMetadata); model.put(SOURCE_KEY, sourceAndMetadata); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/fail/FailProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/fail/FailProcessor.java new file mode 100644 index 00000000000..e4b8e23fd8f --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/fail/FailProcessor.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.fail; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.ingest.processor.ConfigurationUtils; +import org.elasticsearch.ingest.processor.Processor; + +import java.util.Map; + +/** + * Processor that raises a runtime exception with a provided + * error message. + */ +public class FailProcessor implements Processor { + + public static final String TYPE = "fail"; + + private final TemplateService.Template message; + + FailProcessor(TemplateService.Template message) { + this.message = message; + } + + public TemplateService.Template getMessage() { + return message; + } + + @Override + public void execute(IngestDocument document) { + throw new FailProcessorException(document.renderTemplate(message)); + } + + @Override + public String getType() { + return TYPE; + } + + public static class Factory implements Processor.Factory { + + private final TemplateService templateService; + + public Factory(TemplateService templateService) { + this.templateService = templateService; + } + + @Override + public FailProcessor create(Map config) throws Exception { + String message = ConfigurationUtils.readStringProperty(config, "message"); + return new FailProcessor(templateService.compile(message)); + } + } +} + diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/fail/FailProcessorException.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/fail/FailProcessorException.java new file mode 100644 index 00000000000..8c451c63864 --- /dev/null +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/fail/FailProcessorException.java @@ -0,0 +1,31 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.fail; + +/** + * Exception class thrown by {@link FailProcessor}. + */ +public class FailProcessorException extends RuntimeException { + + public FailProcessorException(String message) { + super(message); + } +} + diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index bc083938ec7..bb987e5fac1 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.ingest.processor.append.AppendProcessor; import org.elasticsearch.ingest.processor.convert.ConvertProcessor; import org.elasticsearch.ingest.processor.date.DateProcessor; +import org.elasticsearch.ingest.processor.fail.FailProcessor; import org.elasticsearch.ingest.processor.geoip.GeoIpProcessor; import org.elasticsearch.ingest.processor.grok.GrokProcessor; import org.elasticsearch.ingest.processor.gsub.GsubProcessor; @@ -72,6 +73,7 @@ public class IngestModule extends AbstractModule { addProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); addProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); addProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); + addProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, ProcessorFactoryProvider.class); for (Map.Entry entry : processorFactoryProviders.entrySet()) { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/fail/FailProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/fail/FailProcessorFactoryTests.java new file mode 100644 index 00000000000..346409c72fb --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/fail/FailProcessorFactoryTests.java @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.fail; + +import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class FailProcessorFactoryTests extends ESTestCase { + + private FailProcessor.Factory factory; + + @Before + public void init() { + factory = new FailProcessor.Factory(TestTemplateService.instance()); + } + + public void testCreate() throws Exception { + Map config = new HashMap<>(); + config.put("message", "error"); + FailProcessor failProcessor = factory.create(config); + assertThat(failProcessor.getMessage().execute(Collections.emptyMap()), equalTo("error")); + } + + public void testCreateMissingMessageField() throws Exception { + Map config = new HashMap<>(); + try { + factory.create(config); + fail("factory create should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("required property [message] is missing")); + } + } + +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/fail/FailProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/fail/FailProcessorTests.java new file mode 100644 index 00000000000..51e33034a4b --- /dev/null +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/fail/FailProcessorTests.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor.fail; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class FailProcessorTests extends ESTestCase { + + public void test() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String message = randomAsciiOfLength(10); + Processor processor = new FailProcessor(new TestTemplateService.MockTemplate(message)); + try { + processor.execute(ingestDocument); + fail("fail processor should throw an exception"); + } catch (FailProcessorException e) { + assertThat(e.getMessage(), equalTo(message)); + } + } +} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/90_fail.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/90_fail.yaml new file mode 100644 index 00000000000..d416e84100a --- /dev/null +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/90_fail.yaml @@ -0,0 +1,68 @@ +--- +"Test Fail Processor": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "fail" : { + "message" : "error_message" + } + } + ] + } + - match: { _id: "my_pipeline" } + + - do: + catch: request + ingest.index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {} + +--- +"Test fail with on_failure": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "fail" : { + "message" : "error", + "on_failure" : [ + { + "set" : { + "field" : "error_message", + "value" : "fail_processor_ran" + } + } + ] + } + } + ] + } + - match: { _id: "my_pipeline" } + + - do: + ingest.index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.error_message: "fail_processor_ran" } + From 183386173cf3d6278261db06437b6e64a9234982 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Mon, 4 Jan 2016 16:10:42 -0800 Subject: [PATCH 147/347] cleanup simulate test and add docs --- docs/plugins/ingest.asciidoc | 275 ++++++++++++++++++ .../test/ingest/70_simulate.yaml | 2 +- 2 files changed, 276 insertions(+), 1 deletion(-) diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index ca841b0d2b2..ed9660ac3af 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -631,3 +631,278 @@ The delete pipeline api deletes pipelines by id. DELETE _ingest/pipeline/my-pipeline-id -------------------------------------------------- // AUTOSENSE + +==== Simulate pipeline API + +The simulate pipeline api executes a specific pipeline against +the set of documents provided in the body of the request. + +A simulate request may call upon an existing pipeline to be executed +against the provided documents, or supply a pipeline definition in +the body of the request. + +Here is the structure of a simulate request with a provided pipeline: + +[source,js] +-------------------------------------------------- +POST _ingest/pipeline/_simulate +{ + "pipeline" : { + // pipeline definition here + }, + "docs" : [ + { /** first document **/ }, + { /** second document **/ }, + // ... + ] +} +-------------------------------------------------- + +Here is the structure of a simulate request against a pre-existing pipeline: + +[source,js] +-------------------------------------------------- +POST _ingest/pipeline/my-pipeline-id/_simulate +{ + "docs" : [ + { /** first document **/ }, + { /** second document **/ }, + // ... + ] +} +-------------------------------------------------- + + +Here is an example simulate request with a provided pipeline and its response: + +[source,js] +-------------------------------------------------- +POST _ingest/pipeline/_simulate +{ + "pipeline" : + { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field2", + "value" : "_value" + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "foo": "bar" + } + }, + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "foo": "rab" + } + } + ] +} +-------------------------------------------------- +// AUTOSENSE + +response: + +[source,js] +-------------------------------------------------- +{ + "docs": [ + { + "doc": { + "_id": "id", + "_ttl": null, + "_parent": null, + "_index": "index", + "_routing": null, + "_type": "type", + "_timestamp": null, + "_source": { + "field2": "_value", + "foo": "bar" + }, + "_ingest": { + "timestamp": "2016-01-04T23:53:27.186+0000" + } + } + }, + { + "doc": { + "_id": "id", + "_ttl": null, + "_parent": null, + "_index": "index", + "_routing": null, + "_type": "type", + "_timestamp": null, + "_source": { + "field2": "_value", + "foo": "rab" + }, + "_ingest": { + "timestamp": "2016-01-04T23:53:27.186+0000" + } + } + } + ] +} +-------------------------------------------------- + +It is often useful to see how each processor affects the ingest document +as it is passed through the pipeline. To see the intermediate results of +each processor in the simulat request, a `verbose` parameter may be added +to the request + +Here is an example verbose request and its response: + + +[source,js] +-------------------------------------------------- +POST _ingest/pipeline/_simulate?verbose +{ + "pipeline" : + { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field2", + "value" : "_value2" + } + }, + { + "set" : { + "field" : "field3", + "value" : "_value3" + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "foo": "bar" + } + }, + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "foo": "rab" + } + } + ] +} +-------------------------------------------------- +// AUTOSENSE + +response: + +[source,js] +-------------------------------------------------- +{ + "docs": [ + { + "processor_results": [ + { + "processor_id": "processor[set]-0", + "doc": { + "_id": "id", + "_ttl": null, + "_parent": null, + "_index": "index", + "_routing": null, + "_type": "type", + "_timestamp": null, + "_source": { + "field2": "_value2", + "foo": "bar" + }, + "_ingest": { + "timestamp": "2016-01-05T00:02:51.383+0000" + } + } + }, + { + "processor_id": "processor[set]-1", + "doc": { + "_id": "id", + "_ttl": null, + "_parent": null, + "_index": "index", + "_routing": null, + "_type": "type", + "_timestamp": null, + "_source": { + "field3": "_value3", + "field2": "_value2", + "foo": "bar" + }, + "_ingest": { + "timestamp": "2016-01-05T00:02:51.383+0000" + } + } + } + ] + }, + { + "processor_results": [ + { + "processor_id": "processor[set]-0", + "doc": { + "_id": "id", + "_ttl": null, + "_parent": null, + "_index": "index", + "_routing": null, + "_type": "type", + "_timestamp": null, + "_source": { + "field2": "_value2", + "foo": "rab" + }, + "_ingest": { + "timestamp": "2016-01-05T00:02:51.384+0000" + } + } + }, + { + "processor_id": "processor[set]-1", + "doc": { + "_id": "id", + "_ttl": null, + "_parent": null, + "_index": "index", + "_routing": null, + "_type": "type", + "_timestamp": null, + "_source": { + "field3": "_value3", + "field2": "_value2", + "foo": "rab" + }, + "_ingest": { + "timestamp": "2016-01-05T00:02:51.384+0000" + } + } + } + ] + } + ] +} +-------------------------------------------------- diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml index 932f67353cf..9947129788b 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml @@ -207,7 +207,7 @@ - length: { docs.0.processor_results.1.doc._source: 3 } - match: { docs.0.processor_results.1.doc._source.foo: "bar" } - match: { docs.0.processor_results.1.doc._source.field2: "_value" } - - match: { docs.0.processor_results.1.doc._source..field3: "third_val" } + - match: { docs.0.processor_results.1.doc._source.field3: "third_val" } - length: { docs.0.processor_results.1.doc._ingest: 1 } - is_true: docs.0.processor_results.1.doc._ingest.timestamp From 15ecf76d54b7956a2c67aefdd341481fffc52035 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Tue, 5 Jan 2016 09:54:13 -0800 Subject: [PATCH 148/347] [doc] update convert processor example format --- docs/plugins/ingest.asciidoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index ed9660ac3af..953683e900f 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -97,7 +97,8 @@ false if its string value is equal to `false` (ignore case) and it will throw ex -------------------------------------------------- { "convert": { - "foo": "integer" + "field" : "foo" + "type": "integer" } } -------------------------------------------------- From 11d4417251760d5cd362c20413ef3bf241a31378 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Tue, 5 Jan 2016 11:45:52 -0800 Subject: [PATCH 149/347] [doc] update set and append processor doc examples --- docs/plugins/ingest.asciidoc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 953683e900f..e41f9bbd431 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -38,7 +38,8 @@ its value will be replaced with the provided one. -------------------------------------------------- { "set": { - "field1": 582.1 + "field": "field1", + "value": 582.1 } } -------------------------------------------------- @@ -53,7 +54,8 @@ Accepts a single value or an array of values. -------------------------------------------------- { "append": { - "field1": ["item2", "item3", "item4"] + "field": "field1" + "value": ["item2", "item3", "item4"] } } -------------------------------------------------- From 1e8995d984b782d3439ae4f18e082a622ae866e4 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 6 Jan 2016 12:36:27 +0100 Subject: [PATCH 150/347] move all the processors under the same package org.elasticsearch.ingest.processor --- .../{append => }/AppendProcessor.java | 4 +-- .../{convert => }/ConvertProcessor.java | 4 +-- .../processor/{date => }/DateFormat.java | 2 +- .../processor/{date => }/DateProcessor.java | 4 +-- .../processor/{fail => }/FailProcessor.java | 4 +-- .../{fail => }/FailProcessorException.java | 2 +- .../processor/{geoip => }/GeoIpProcessor.java | 3 +- .../ingest/processor/{grok => }/Grok.java | 2 +- .../processor/{grok => }/GrokMatchGroup.java | 2 +- .../processor/{grok => }/GrokProcessor.java | 4 +-- .../processor/{gsub => }/GsubProcessor.java | 4 +-- .../processor/{join => }/JoinProcessor.java | 4 +-- .../{lowercase => }/LowercaseProcessor.java | 4 +-- .../{remove => }/RemoveProcessor.java | 4 +-- .../{rename => }/RenameProcessor.java | 4 +-- .../processor/{set => }/SetProcessor.java | 4 +-- .../processor/{split => }/SplitProcessor.java | 4 +-- .../processor/{trim => }/TrimProcessor.java | 4 +-- .../{uppercase => }/UppercaseProcessor.java | 4 +-- .../plugin/ingest/IngestModule.java | 30 +++++++++---------- .../AppendProcessorFactoryTests.java | 2 +- .../{append => }/AppendProcessorTests.java | 3 +- .../ConvertProcessorFactoryTests.java | 3 +- .../{convert => }/ConvertProcessorTests.java | 5 ++-- .../processor/{date => }/DateFormatTests.java | 3 +- .../{date => }/DateProcessorFactoryTests.java | 3 +- .../{date => }/DateProcessorTests.java | 2 +- .../{fail => }/FailProcessorFactoryTests.java | 3 +- .../{fail => }/FailProcessorTests.java | 4 ++- .../GeoIpProcessorFactoryTests.java | 3 +- .../{geoip => }/GeoIpProcessorTests.java | 3 +- .../{grok => }/GrokProcessorFactoryTests.java | 3 +- .../{grok => }/GrokProcessorTests.java | 4 ++- .../processor/{grok => }/GrokTests.java | 4 ++- .../{gsub => }/GsubProcessorFactoryTests.java | 3 +- .../{gsub => }/GsubProcessorTests.java | 3 +- .../{join => }/JoinProcessorFactoryTests.java | 2 +- .../{join => }/JoinProcessorTests.java | 3 +- .../LowercaseProcessorFactoryTests.java | 2 +- .../LowercaseProcessorTests.java | 5 +--- .../RemoveProcessorFactoryTests.java | 2 +- .../{remove => }/RemoveProcessorTests.java | 3 +- .../RenameProcessorFactoryTests.java | 2 +- .../{rename => }/RenameProcessorTests.java | 3 +- .../{set => }/SetProcessorFactoryTests.java | 2 +- .../{set => }/SetProcessorTests.java | 3 +- .../SplitProcessorFactoryTests.java | 2 +- .../{split => }/SplitProcessorTests.java | 3 +- .../{trim => }/TrimProcessorFactoryTests.java | 2 +- .../{trim => }/TrimProcessorTests.java | 5 +--- .../UppercaseProcessorFactoryTests.java | 2 +- .../UppercaseProcessorTests.java | 5 +--- .../ingest/PipelineExecutionServiceTests.java | 2 +- .../IngestMustacheRemoveProcessorIT.java | 2 +- .../ingest/IngestMustacheSetProcessorIT.java | 2 +- 55 files changed, 87 insertions(+), 113 deletions(-) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{append => }/AppendProcessor.java (93%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{convert => }/ConvertProcessor.java (96%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{date => }/DateFormat.java (98%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{date => }/DateProcessor.java (96%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{fail => }/FailProcessor.java (92%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{fail => }/FailProcessorException.java (95%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{geoip => }/GeoIpProcessor.java (99%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{grok => }/Grok.java (99%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{grok => }/GrokMatchGroup.java (97%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{grok => }/GrokProcessor.java (96%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{gsub => }/GsubProcessor.java (94%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{join => }/JoinProcessor.java (93%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{lowercase => }/LowercaseProcessor.java (92%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{remove => }/RemoveProcessor.java (92%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{rename => }/RenameProcessor.java (94%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{set => }/SetProcessor.java (93%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{split => }/SplitProcessor.java (93%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{trim => }/TrimProcessor.java (92%) rename plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/{uppercase => }/UppercaseProcessor.java (92%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{append => }/AppendProcessorFactoryTests.java (98%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{append => }/AppendProcessorTests.java (98%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{convert => }/ConvertProcessorFactoryTests.java (96%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{convert => }/ConvertProcessorTests.java (98%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{date => }/DateFormatTests.java (97%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{date => }/DateProcessorFactoryTests.java (98%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{date => }/DateProcessorTests.java (99%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{fail => }/FailProcessorFactoryTests.java (95%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{fail => }/FailProcessorTests.java (90%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{geoip => }/GeoIpProcessorFactoryTests.java (98%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{geoip => }/GeoIpProcessorTests.java (97%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{grok => }/GrokProcessorFactoryTests.java (96%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{grok => }/GrokProcessorTests.java (95%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{grok => }/GrokTests.java (99%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{gsub => }/GsubProcessorFactoryTests.java (96%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{gsub => }/GsubProcessorTests.java (97%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{join => }/JoinProcessorFactoryTests.java (98%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{join => }/JoinProcessorTests.java (97%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{lowercase => }/LowercaseProcessorFactoryTests.java (97%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{lowercase => }/LowercaseProcessorTests.java (85%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{remove => }/RemoveProcessorFactoryTests.java (97%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{remove => }/RemoveProcessorTests.java (95%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{rename => }/RenameProcessorFactoryTests.java (97%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{rename => }/RenameProcessorTests.java (98%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{set => }/SetProcessorFactoryTests.java (98%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{set => }/SetProcessorTests.java (97%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{split => }/SplitProcessorFactoryTests.java (97%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{split => }/SplitProcessorTests.java (97%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{trim => }/TrimProcessorFactoryTests.java (97%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{trim => }/TrimProcessorTests.java (89%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{uppercase => }/UppercaseProcessorFactoryTests.java (97%) rename plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/{uppercase => }/UppercaseProcessorTests.java (85%) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/append/AppendProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java similarity index 93% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/append/AppendProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java index 3b1c3a7a68b..f8d53f07af3 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/append/AppendProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java @@ -17,13 +17,11 @@ * under the License. */ -package org.elasticsearch.ingest.processor.append; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.ValueSource; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.ingest.processor.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java similarity index 96% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java index 4ef2663101e..b3c287dc9ba 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/convert/ConvertProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java @@ -17,11 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.processor.convert; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.ingest.processor.Processor; import java.util.ArrayList; import java.util.List; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateFormat.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java similarity index 98% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateFormat.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java index 224d43f238f..5748d3788ab 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateFormat.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.date; +package org.elasticsearch.ingest.processor; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java similarity index 96% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java index 0f2be638710..6fce95436f9 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/date/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java @@ -17,11 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.processor.date; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.ingest.processor.Processor; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/fail/FailProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java similarity index 92% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/fail/FailProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java index e4b8e23fd8f..727736fd283 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/fail/FailProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java @@ -17,12 +17,10 @@ * under the License. */ -package org.elasticsearch.ingest.processor.fail; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.TemplateService; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.ingest.processor.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/fail/FailProcessorException.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java similarity index 95% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/fail/FailProcessorException.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java index 8c451c63864..846ba40c136 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/fail/FailProcessorException.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.fail; +package org.elasticsearch.ingest.processor; /** * Exception class thrown by {@link FailProcessor}. diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java similarity index 99% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java index 30ca9541a4d..bfeaad4e15e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.geoip; +package org.elasticsearch.ingest.processor; import com.maxmind.geoip2.DatabaseReader; import com.maxmind.geoip2.exception.AddressNotFoundException; @@ -32,7 +32,6 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.processor.Processor; import java.io.Closeable; import java.io.IOException; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Grok.java similarity index 99% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Grok.java index bfad4e7e319..bb88aaa3436 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/Grok.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Grok.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.grok; +package org.elasticsearch.ingest.processor; import org.jcodings.specific.UTF8Encoding; import org.joni.Matcher; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokMatchGroup.java similarity index 97% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokMatchGroup.java index f44d758d7b8..3371a79c7e5 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokMatchGroup.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokMatchGroup.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.grok; +package org.elasticsearch.ingest.processor; final class GrokMatchGroup { private static final String DEFAULT_TYPE = "string"; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokProcessor.java similarity index 96% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokProcessor.java index 562a86a402a..60c7cee7ead 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/grok/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokProcessor.java @@ -17,11 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.processor.grok; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.ingest.processor.Processor; import java.io.BufferedReader; import java.io.IOException; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java similarity index 94% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java index e15f99d9fed..897592a4f15 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/gsub/GsubProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java @@ -17,11 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.processor.gsub; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.ingest.processor.Processor; import java.util.Map; import java.util.regex.Matcher; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java similarity index 93% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java index 542c536786b..fdc9aade1f8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/join/JoinProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java @@ -17,11 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.processor.join; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.ingest.processor.Processor; import java.util.List; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java similarity index 92% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java index e1d6cf222ad..e3e49f77c59 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java @@ -17,9 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.lowercase; - -import org.elasticsearch.ingest.processor.AbstractStringProcessor; +package org.elasticsearch.ingest.processor; import java.util.Locale; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java similarity index 92% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java index 9bdde91f38c..bbcf3e3f3d5 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/remove/RemoveProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java @@ -17,12 +17,10 @@ * under the License. */ -package org.elasticsearch.ingest.processor.remove; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.TemplateService; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.ingest.processor.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java similarity index 94% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java index bc340db8275..9aa76ab2d69 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/rename/RenameProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java @@ -17,11 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.processor.rename; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.ingest.processor.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java similarity index 93% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java index c872c479f2b..2bf69b73772 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/set/SetProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java @@ -17,13 +17,11 @@ * under the License. */ -package org.elasticsearch.ingest.processor.set; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.ValueSource; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.ingest.processor.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java similarity index 93% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java index a95c70cb66f..8838a384530 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/split/SplitProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java @@ -17,11 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.processor.split; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.ingest.processor.Processor; import java.util.Arrays; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java similarity index 92% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java index 8ed3afefe88..d1325510cd9 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/trim/TrimProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java @@ -17,9 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.trim; - -import org.elasticsearch.ingest.processor.AbstractStringProcessor; +package org.elasticsearch.ingest.processor; /** * Processor that trims the content of string fields. diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java similarity index 92% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java index a599f6cf32a..0f1757eaeec 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java @@ -17,9 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.uppercase; - -import org.elasticsearch.ingest.processor.AbstractStringProcessor; +package org.elasticsearch.ingest.processor; import java.util.Locale; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index bb987e5fac1..a63ebdf1ed5 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -21,21 +21,21 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; -import org.elasticsearch.ingest.processor.append.AppendProcessor; -import org.elasticsearch.ingest.processor.convert.ConvertProcessor; -import org.elasticsearch.ingest.processor.date.DateProcessor; -import org.elasticsearch.ingest.processor.fail.FailProcessor; -import org.elasticsearch.ingest.processor.geoip.GeoIpProcessor; -import org.elasticsearch.ingest.processor.grok.GrokProcessor; -import org.elasticsearch.ingest.processor.gsub.GsubProcessor; -import org.elasticsearch.ingest.processor.join.JoinProcessor; -import org.elasticsearch.ingest.processor.lowercase.LowercaseProcessor; -import org.elasticsearch.ingest.processor.remove.RemoveProcessor; -import org.elasticsearch.ingest.processor.rename.RenameProcessor; -import org.elasticsearch.ingest.processor.set.SetProcessor; -import org.elasticsearch.ingest.processor.split.SplitProcessor; -import org.elasticsearch.ingest.processor.trim.TrimProcessor; -import org.elasticsearch.ingest.processor.uppercase.UppercaseProcessor; +import org.elasticsearch.ingest.processor.AppendProcessor; +import org.elasticsearch.ingest.processor.ConvertProcessor; +import org.elasticsearch.ingest.processor.DateProcessor; +import org.elasticsearch.ingest.processor.FailProcessor; +import org.elasticsearch.ingest.processor.GeoIpProcessor; +import org.elasticsearch.ingest.processor.GrokProcessor; +import org.elasticsearch.ingest.processor.GsubProcessor; +import org.elasticsearch.ingest.processor.JoinProcessor; +import org.elasticsearch.ingest.processor.LowercaseProcessor; +import org.elasticsearch.ingest.processor.RemoveProcessor; +import org.elasticsearch.ingest.processor.RenameProcessor; +import org.elasticsearch.ingest.processor.SetProcessor; +import org.elasticsearch.ingest.processor.SplitProcessor; +import org.elasticsearch.ingest.processor.TrimProcessor; +import org.elasticsearch.ingest.processor.UppercaseProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; import java.util.HashMap; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/append/AppendProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java similarity index 98% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/append/AppendProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java index 7ebb424e2d4..09ce465c002 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/append/AppendProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.append; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/append/AppendProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java similarity index 98% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/append/AppendProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java index 787a698e76f..df9cc4074c4 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/append/AppendProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java @@ -17,13 +17,14 @@ * under the License. */ -package org.elasticsearch.ingest.processor.append; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.ingest.ValueSource; +import org.elasticsearch.ingest.processor.AppendProcessor; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java similarity index 96% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java index f1b6fce6ced..4feb20d41c4 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java @@ -17,8 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.processor.convert; +package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.processor.ConvertProcessor; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java similarity index 98% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java index 062f72f64bc..77ecd5056c3 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/convert/ConvertProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java @@ -17,10 +17,11 @@ * under the License. */ -package org.elasticsearch.ingest.processor.convert; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.processor.ConvertProcessor; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; @@ -31,7 +32,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; -import static org.elasticsearch.ingest.processor.convert.ConvertProcessor.Type; +import static org.elasticsearch.ingest.processor.ConvertProcessor.Type; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateFormatTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java similarity index 97% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateFormatTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java index a3f5c337fca..c53b420b244 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateFormatTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java @@ -17,8 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.processor.date; +package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.processor.DateFormat; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java similarity index 98% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java index 08b546dead4..4f62461ce46 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java @@ -17,8 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.processor.date; +package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.processor.DateProcessor; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTimeZone; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java similarity index 99% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java index 43724480004..46a81ca48ba 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/date/DateProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.date; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/fail/FailProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java similarity index 95% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/fail/FailProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java index 346409c72fb..4793cb62c25 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/fail/FailProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java @@ -17,9 +17,10 @@ * under the License. */ -package org.elasticsearch.ingest.processor.fail; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.processor.FailProcessor; import org.elasticsearch.test.ESTestCase; import org.junit.Before; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/fail/FailProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java similarity index 90% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/fail/FailProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java index 51e33034a4b..c79cb7b73e5 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/fail/FailProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java @@ -17,11 +17,13 @@ * under the License. */ -package org.elasticsearch.ingest.processor.fail; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.processor.FailProcessor; +import org.elasticsearch.ingest.processor.FailProcessorException; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorFactoryTests.java similarity index 98% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorFactoryTests.java index 9716bcbd3c0..b19168ec920 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorFactoryTests.java @@ -17,8 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.processor.geoip; +package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.processor.GeoIpProcessor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; import org.junit.Before; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorTests.java similarity index 97% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorTests.java index 479541b0f32..4f830fe5559 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorTests.java @@ -17,11 +17,12 @@ * under the License. */ -package org.elasticsearch.ingest.processor.geoip; +package org.elasticsearch.ingest.processor; import com.maxmind.geoip2.DatabaseReader; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.processor.GeoIpProcessor; import org.elasticsearch.test.ESTestCase; import java.io.InputStream; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorFactoryTests.java similarity index 96% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorFactoryTests.java index e8d44d392d7..833228d18d7 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorFactoryTests.java @@ -17,8 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.processor.grok; +package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.processor.GrokProcessor; import org.elasticsearch.test.ESTestCase; import org.junit.Before; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorTests.java similarity index 95% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorTests.java index 14164523726..c554b31aaf1 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorTests.java @@ -17,10 +17,12 @@ * under the License. */ -package org.elasticsearch.ingest.processor.grok; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.processor.Grok; +import org.elasticsearch.ingest.processor.GrokProcessor; import org.elasticsearch.test.ESTestCase; import java.util.Collections; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokTests.java similarity index 99% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokTests.java index c253f7dfd84..7565352885b 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/grok/GrokTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokTests.java @@ -17,8 +17,10 @@ * under the License. */ -package org.elasticsearch.ingest.processor.grok; +package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.processor.Grok; +import org.elasticsearch.ingest.processor.GrokProcessor; import org.elasticsearch.test.ESTestCase; import org.junit.Before; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java similarity index 96% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java index bcf00244809..7a7377b9045 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java @@ -17,8 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.processor.gsub; +package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.processor.GsubProcessor; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java similarity index 97% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java index f64d271539f..76e00b218e1 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/gsub/GsubProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java @@ -17,10 +17,11 @@ * under the License. */ -package org.elasticsearch.ingest.processor.gsub; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.processor.GsubProcessor; import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java similarity index 98% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java index deebe50b9c5..2d7bee118f5 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.join; +package org.elasticsearch.ingest.processor; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java similarity index 97% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java index 6a8acc745ff..3afc8132999 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/join/JoinProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java @@ -17,11 +17,10 @@ * under the License. */ -package org.elasticsearch.ingest.processor.join; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java similarity index 97% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java index 4ca0f1b8f04..478bb70b6fe 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.lowercase; +package org.elasticsearch.ingest.processor; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java similarity index 85% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java index 8608bb769a0..898e431faff 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/lowercase/LowercaseProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java @@ -17,10 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.lowercase; - -import org.elasticsearch.ingest.processor.AbstractStringProcessor; -import org.elasticsearch.ingest.processor.AbstractStringProcessorTestCase; +package org.elasticsearch.ingest.processor; import java.util.Locale; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java similarity index 97% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java index 3a370223813..6c0e899f6c8 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.remove; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java similarity index 95% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java index 9800c48702a..39d31d3a0c2 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/remove/RemoveProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java @@ -17,12 +17,11 @@ * under the License. */ -package org.elasticsearch.ingest.processor.remove; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java similarity index 97% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java index 62c7fb7c9bd..e0298a927d6 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.rename; +package org.elasticsearch.ingest.processor; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java similarity index 98% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java index 8dc40360390..75a5ec8e36b 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/rename/RenameProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java @@ -17,11 +17,10 @@ * under the License. */ -package org.elasticsearch.ingest.processor.rename; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java similarity index 98% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java index ddbec7c8546..22ece637739 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.set; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java similarity index 97% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java index f4947d3ae3a..bf91da0dab5 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/set/SetProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java @@ -17,14 +17,13 @@ * under the License. */ -package org.elasticsearch.ingest.processor.set; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.ingest.ValueSource; -import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java similarity index 97% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java index 5f144593ee3..d9cf2a0fdd8 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.split; +package org.elasticsearch.ingest.processor; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java similarity index 97% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java index 945faaf9fb4..7c1f1a13047 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/split/SplitProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java @@ -17,11 +17,10 @@ * under the License. */ -package org.elasticsearch.ingest.processor.split; +package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java similarity index 97% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java index d955cec0d0f..f7bcf832973 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.trim; +package org.elasticsearch.ingest.processor; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java similarity index 89% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java index eea867e57bd..265040ead19 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/trim/TrimProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java @@ -17,10 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.trim; - -import org.elasticsearch.ingest.processor.AbstractStringProcessor; -import org.elasticsearch.ingest.processor.AbstractStringProcessorTestCase; +package org.elasticsearch.ingest.processor; public class TrimProcessorTests extends AbstractStringProcessorTestCase { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java similarity index 97% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java index 4dd50e2c1e6..c683e8ebbc2 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorFactoryTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.uppercase; +package org.elasticsearch.ingest.processor; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java similarity index 85% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java rename to plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java index 8327dc9f831..343fff1b588 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/uppercase/UppercaseProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java @@ -17,10 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor.uppercase; - -import org.elasticsearch.ingest.processor.AbstractStringProcessor; -import org.elasticsearch.ingest.processor.AbstractStringProcessorTestCase; +package org.elasticsearch.ingest.processor; import java.util.Locale; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index 7140bf53a8d..9a31b4f9006 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -31,7 +31,7 @@ import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.ingest.processor.CompoundProcessor; import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.ingest.processor.set.SetProcessor; +import org.elasticsearch.ingest.processor.SetProcessor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheRemoveProcessorIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheRemoveProcessorIT.java index 9931b15f231..dccabb28a57 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheRemoveProcessorIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheRemoveProcessorIT.java @@ -19,7 +19,7 @@ package org.elasticsearch.plugin.ingest; -import org.elasticsearch.ingest.processor.remove.RemoveProcessor; +import org.elasticsearch.ingest.processor.RemoveProcessor; import org.hamcrest.CoreMatchers; import java.util.Collections; diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java index b979eea5098..b496d189f18 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java @@ -23,7 +23,7 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.ValueSource; import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.ingest.processor.set.SetProcessor; +import org.elasticsearch.ingest.processor.SetProcessor; import org.hamcrest.Matchers; import java.util.Collections; From 1eb5ae1dceae1ca6b94b2bbb75a7abe41bbe4872 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 6 Jan 2016 17:28:35 +0100 Subject: [PATCH 151/347] fix compile errors due to changes in master --- .../ingest/transport/IngestActionFilter.java | 21 ++++++------- .../transport/IngestDisabledActionFilter.java | 5 ++-- .../transport/IngestActionFilterTests.java | 30 ++++++++++++------- 3 files changed, 33 insertions(+), 23 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java index 99117697358..bce3b5cdaa2 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugin.ingest.IngestBootstrapper; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineExecutionService; +import org.elasticsearch.tasks.Task; import java.util.ArrayList; import java.util.HashSet; @@ -52,25 +53,25 @@ public final class IngestActionFilter extends AbstractComponent implements Actio } @Override - public void apply(String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { + public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { String pipelineId = request.getFromContext(IngestPlugin.PIPELINE_ID_PARAM_CONTEXT_KEY); if (pipelineId == null) { pipelineId = request.getHeader(IngestPlugin.PIPELINE_ID_PARAM); if (pipelineId == null) { - chain.proceed(action, request, listener); + chain.proceed(task, action, request, listener); return; } } if (request instanceof IndexRequest) { - processIndexRequest(action, listener, chain, (IndexRequest) request, pipelineId); + processIndexRequest(task, action, listener, chain, (IndexRequest) request, pipelineId); } else if (request instanceof BulkRequest) { BulkRequest bulkRequest = (BulkRequest) request; @SuppressWarnings("unchecked") ActionListener actionListener = (ActionListener) listener; - processBulkIndexRequest(bulkRequest, pipelineId, action, chain, actionListener); + processBulkIndexRequest(task, bulkRequest, pipelineId, action, chain, actionListener); } else { - chain.proceed(action, request, listener); + chain.proceed(task, action, request, listener); } } @@ -79,12 +80,12 @@ public final class IngestActionFilter extends AbstractComponent implements Actio chain.proceed(action, response, listener); } - void processIndexRequest(String action, ActionListener listener, ActionFilterChain chain, IndexRequest indexRequest, String pipelineId) { + void processIndexRequest(Task task, String action, ActionListener listener, ActionFilterChain chain, IndexRequest indexRequest, String pipelineId) { // The IndexRequest has the same type on the node that receives the request and the node that // processes the primary action. This could lead to a pipeline being executed twice for the same // index request, hence this check if (indexRequest.hasHeader(IngestPlugin.PIPELINE_ALREADY_PROCESSED)) { - chain.proceed(action, indexRequest, listener); + chain.proceed(task, action, indexRequest, listener); return; } executionService.execute(indexRequest, pipelineId, t -> { @@ -92,11 +93,11 @@ public final class IngestActionFilter extends AbstractComponent implements Actio listener.onFailure(t); }, success -> { indexRequest.putHeader(IngestPlugin.PIPELINE_ALREADY_PROCESSED, true); - chain.proceed(action, indexRequest, listener); + chain.proceed(task, action, indexRequest, listener); }); } - void processBulkIndexRequest(BulkRequest original, String pipelineId, String action, ActionFilterChain chain, ActionListener listener) { + void processBulkIndexRequest(Task task, BulkRequest original, String pipelineId, String action, ActionFilterChain chain, ActionListener listener) { BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original); executionService.execute(() -> bulkRequestModifier, pipelineId, e -> { logger.debug("failed to execute pipeline [{}]", e, pipelineId); @@ -110,7 +111,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio // (this will happen if all preprocessing all items in the bulk failed) actionListener.onResponse(new BulkResponse(new BulkItemResponse[0], 0)); } else { - chain.proceed(action, bulkRequest, actionListener); + chain.proceed(task, action, bulkRequest, actionListener); } }); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestDisabledActionFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestDisabledActionFilter.java index 06db1ab099c..63ff584988d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestDisabledActionFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestDisabledActionFilter.java @@ -24,11 +24,12 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.plugin.ingest.IngestPlugin; +import org.elasticsearch.tasks.Task; public final class IngestDisabledActionFilter implements ActionFilter { @Override - public void apply(String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { + public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { String pipelineId = request.getFromContext(IngestPlugin.PIPELINE_ID_PARAM_CONTEXT_KEY); if (pipelineId != null) { failRequest(pipelineId); @@ -38,7 +39,7 @@ public final class IngestDisabledActionFilter implements ActionFilter { failRequest(pipelineId); } - chain.proceed(action, request, listener); + chain.proceed(task, action, request, listener); } @Override diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index 2bb44b80868..336c02f7b0d 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.plugin.ingest.IngestBootstrapper; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineExecutionService; import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; @@ -77,42 +78,46 @@ public class IngestActionFilterTests extends ESTestCase { public void testApplyNoIngestId() throws Exception { IndexRequest indexRequest = new IndexRequest(); + Task task = mock(Task.class); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); - filter.apply("_action", indexRequest, actionListener, actionFilterChain); + filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); - verify(actionFilterChain).proceed("_action", indexRequest, actionListener); + verify(actionFilterChain).proceed(task, "_action", indexRequest, actionListener); verifyZeroInteractions(executionService, actionFilterChain); } public void testApplyIngestIdViaRequestParam() throws Exception { + Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); indexRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); - filter.apply("_action", indexRequest, actionListener, actionFilterChain); + filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); verifyZeroInteractions(actionFilterChain); } public void testApplyIngestIdViaContext() throws Exception { + Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); indexRequest.putInContext(IngestPlugin.PIPELINE_ID_PARAM_CONTEXT_KEY, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); - filter.apply("_action", indexRequest, actionListener, actionFilterChain); + filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); verifyZeroInteractions(actionFilterChain); } public void testApplyAlreadyProcessed() throws Exception { + Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); indexRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); @@ -120,13 +125,14 @@ public class IngestActionFilterTests extends ESTestCase { ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); - filter.apply("_action", indexRequest, actionListener, actionFilterChain); + filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); - verify(actionFilterChain).proceed("_action", indexRequest, actionListener); + verify(actionFilterChain).proceed(task, "_action", indexRequest, actionListener); verifyZeroInteractions(executionService, actionListener); } public void testApplyExecuted() throws Exception { + Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); indexRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); @@ -140,14 +146,15 @@ public class IngestActionFilterTests extends ESTestCase { return null; }; doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); - filter.apply("_action", indexRequest, actionListener, actionFilterChain); + filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); - verify(actionFilterChain).proceed("_action", indexRequest, actionListener); + verify(actionFilterChain).proceed(task, "_action", indexRequest, actionListener); verifyZeroInteractions(actionListener); } public void testApplyFailed() throws Exception { + Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); indexRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); @@ -164,7 +171,7 @@ public class IngestActionFilterTests extends ESTestCase { } }; doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); - filter.apply("_action", indexRequest, actionListener, actionFilterChain); + filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); verify(actionListener).onFailure(exception); @@ -172,6 +179,7 @@ public class IngestActionFilterTests extends ESTestCase { } public void testApplyWithBulkRequest() throws Exception { + Task task = mock(Task.class); ThreadPool threadPool = mock(ThreadPool.class); when(threadPool.executor(any())).thenReturn(Runnable::run); PipelineStore store = mock(PipelineStore.class); @@ -215,12 +223,12 @@ public class IngestActionFilterTests extends ESTestCase { ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); - filter.apply("_action", bulkRequest, actionListener, actionFilterChain); + filter.apply(task, "_action", bulkRequest, actionListener, actionFilterChain); assertBusy(new Runnable() { @Override public void run() { - verify(actionFilterChain).proceed("_action", bulkRequest, actionListener); + verify(actionFilterChain).proceed(task, "_action", bulkRequest, actionListener); verifyZeroInteractions(actionListener); int assertedRequests = 0; From 702f712204ef2387ca75eeda03b8a02b3641d627 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 6 Jan 2016 17:28:46 +0100 Subject: [PATCH 152/347] replaced thirdPartyAudit.missingClasses with specific excludes --- plugins/ingest/build.gradle | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index dfdf421e608..6b869e15582 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -35,7 +35,6 @@ dependencies { compile 'joda-time:joda-time:2.8.2' testCompile 'org.elasticsearch:geolite2-databases:20151029' - testCompile 'org.elasticsearch:securemock:1.2' } sourceSets { @@ -63,9 +62,22 @@ bundlePlugin { } } -//geoip WebServiceClient needs Google http client, but we're not using WebServiceClient and -// joni has AsmCompilerSupport, but that isn't being used: -thirdPartyAudit.missingClasses = true +thirdPartyAudit.excludes = [ + // geoip WebServiceClient needs Google http client, but we're not using WebServiceClient: + 'com.google.api.client.http.HttpTransport', + 'com.google.api.client.http.GenericUrl', + 'com.google.api.client.http.HttpResponse', + 'com.google.api.client.http.HttpRequestFactory', + 'com.google.api.client.http.HttpRequest', + 'com.google.api.client.http.HttpHeaders', + 'com.google.api.client.http.HttpResponseException', + 'com.google.api.client.http.javanet.NetHttpTransport', + 'com.google.api.client.http.javanet.NetHttpTransport', + // joni has AsmCompilerSupport, but that isn't being used: + 'org.objectweb.asm.ClassWriter', + 'org.objectweb.asm.MethodVisitor', + 'org.objectweb.asm.Opcodes', +] integTest { cluster { From 2478aafa46e70aacaeb8d57ab281c3515afd0223 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 5 Jan 2016 12:37:15 +0100 Subject: [PATCH 153/347] move ingest api to core --- .../ingest}/CompoundProcessor.java | 7 +- .../ingest}/ConfigurationUtils.java | 2 +- .../elasticsearch/ingest/IngestDocument.java | 34 ++-- .../org/elasticsearch/ingest/Pipeline.java | 11 +- .../org/elasticsearch/ingest}/Processor.java | 4 +- .../ingest/ProcessorFactoryProvider.java | 9 +- .../ingest/ProcessorsModule.java | 51 ++++++ .../elasticsearch/ingest/TemplateService.java | 1 + .../org/elasticsearch/ingest/ValueSource.java | 0 .../ingest/CompoundProcessorTests.java | 110 ++++++++++++ .../ingest}/ConfigurationUtilsTests.java | 4 +- .../ingest/IngestDocumentTests.java | 0 .../ingest/PipelineFactoryTests.java | 40 +---- .../ingest/RandomDocumentPicks.java | 0 .../elasticsearch/ingest/TestProcessor.java | 66 +++++++ .../ingest/TestTemplateService.java | 2 - .../ingest/ValueSourceTests.java | 1 - .../processor/AbstractStringProcessor.java | 2 + .../ingest/processor/AppendProcessor.java | 2 + .../ingest/processor/ConvertProcessor.java | 2 + .../ingest/processor/DateProcessor.java | 2 + .../ingest/processor/FailProcessor.java | 2 + .../ingest/processor/GeoIpProcessor.java | 5 +- .../ingest/processor/GrokProcessor.java | 2 + .../ingest/processor/GsubProcessor.java | 2 + .../ingest/processor/JoinProcessor.java | 2 + .../ingest/processor/RemoveProcessor.java | 2 + .../ingest/processor/RenameProcessor.java | 2 + .../ingest/processor/SetProcessor.java | 2 + .../ingest/processor/SplitProcessor.java | 2 + .../plugin/ingest/IngestBootstrapper.java | 1 + .../plugin/ingest/IngestModule.java | 51 +----- .../plugin/ingest/IngestPlugin.java | 39 ++++- .../plugin/ingest/PipelineStore.java | 3 +- .../plugin/ingest/rest/IngestRestFilter.java | 8 +- .../simulate/SimulateExecutionService.java | 2 +- .../simulate/SimulatePipelineRequest.java | 2 +- .../AbstractStringProcessorTestCase.java | 1 + .../processor/AppendProcessorTests.java | 3 +- .../processor/CompoundProcessorTests.java | 163 ------------------ .../processor/ConvertProcessorTests.java | 3 +- .../ingest/processor/FailProcessorTests.java | 4 +- .../ingest/processor/GsubProcessorTests.java | 3 +- .../ingest/processor/JoinProcessorTests.java | 1 + .../processor/RemoveProcessorTests.java | 1 + .../processor/RenameProcessorTests.java | 1 + .../ingest/processor/SetProcessorTests.java | 1 + .../ingest/processor/SplitProcessorTests.java | 1 + .../ingest/PipelineExecutionServiceTests.java | 4 +- .../transport/IngestActionFilterTests.java | 7 +- .../SimulateExecutionServiceTests.java | 4 +- .../SimulatePipelineRequestParsingTests.java | 4 +- .../ingest/IngestMustacheSetProcessorIT.java | 2 +- 53 files changed, 358 insertions(+), 322 deletions(-) rename {plugins/ingest/src/main/java/org/elasticsearch/ingest/processor => core/src/main/java/org/elasticsearch/ingest}/CompoundProcessor.java (93%) rename {plugins/ingest/src/main/java/org/elasticsearch/ingest/processor => core/src/main/java/org/elasticsearch/ingest}/ConfigurationUtils.java (99%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/IngestDocument.java (95%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/Pipeline.java (91%) rename {plugins/ingest/src/main/java/org/elasticsearch/ingest/processor => core/src/main/java/org/elasticsearch/ingest}/Processor.java (94%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin => core/src/main/java/org/elasticsearch}/ingest/ProcessorFactoryProvider.java (88%) create mode 100644 core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/TemplateService.java (93%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/ValueSource.java (100%) create mode 100644 core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java rename {plugins/ingest/src/test/java/org/elasticsearch/ingest/processor => core/src/test/java/org/elasticsearch/ingest}/ConfigurationUtilsTests.java (95%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java (70%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java (100%) create mode 100644 core/src/test/java/org/elasticsearch/ingest/TestProcessor.java rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/TestTemplateService.java (99%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java (99%) delete mode 100644 plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/CompoundProcessorTests.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/CompoundProcessor.java b/core/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java similarity index 93% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/CompoundProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java index 42bb567da46..7c71324300e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/CompoundProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java @@ -18,9 +18,7 @@ */ -package org.elasticsearch.ingest.processor; - -import org.elasticsearch.ingest.IngestDocument; +package org.elasticsearch.ingest; import java.util.Arrays; import java.util.Collections; @@ -42,6 +40,7 @@ public class CompoundProcessor implements Processor { public CompoundProcessor(Processor... processor) { this(Arrays.asList(processor), Collections.emptyList()); } + public CompoundProcessor(List processors, List onFailureProcessors) { this.processors = processors; this.onFailureProcessors = onFailureProcessors; @@ -57,7 +56,7 @@ public class CompoundProcessor implements Processor { @Override public String getType() { - return "compound[" + processors.stream().map(p -> p.getType()).collect(Collectors.joining(",")) + "]"; + return "compound[" + processors.stream().map(Processor::getType).collect(Collectors.joining(",")) + "]"; } @Override diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java b/core/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java similarity index 99% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java rename to core/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java index 7ba737eb56e..c51714615a8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConfigurationUtils.java +++ b/core/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor; +package org.elasticsearch.ingest; import java.util.List; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java similarity index 95% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java rename to core/src/main/java/org/elasticsearch/ingest/IngestDocument.java index c6356867bd9..565ee9242ff 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -20,6 +20,14 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.Strings; +import org.elasticsearch.index.mapper.internal.IdFieldMapper; +import org.elasticsearch.index.mapper.internal.IndexFieldMapper; +import org.elasticsearch.index.mapper.internal.ParentFieldMapper; +import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; +import org.elasticsearch.index.mapper.internal.SourceFieldMapper; +import org.elasticsearch.index.mapper.internal.TTLFieldMapper; +import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; +import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import java.text.DateFormat; import java.text.SimpleDateFormat; @@ -38,7 +46,6 @@ import java.util.TimeZone; public final class IngestDocument { public final static String INGEST_KEY = "_ingest"; - public final static String SOURCE_KEY = "_source"; static final String TIMESTAMP = "timestamp"; @@ -348,7 +355,7 @@ public final class IngestDocument { if (append) { if (map.containsKey(leafKey)) { Object object = map.get(leafKey); - List list = appendValues(path, object, value); + List list = appendValues(object, value); if (list != object) { map.put(leafKey, list); } @@ -374,7 +381,7 @@ public final class IngestDocument { } if (append) { Object object = list.get(index); - List newList = appendValues(path, object, value); + List newList = appendValues(object, value); if (newList != object) { list.set(index, newList); } @@ -387,7 +394,7 @@ public final class IngestDocument { } @SuppressWarnings("unchecked") - private static List appendValues(String path, Object maybeList, Object value) { + private static List appendValues(Object maybeList, Object value) { List list; if (maybeList instanceof List) { //maybeList is already a list, we append the provided values to it @@ -427,7 +434,7 @@ public final class IngestDocument { private Map createTemplateModel() { Map model = new HashMap<>(sourceAndMetadata); - model.put(SOURCE_KEY, sourceAndMetadata); + model.put(SourceFieldMapper.NAME, sourceAndMetadata); // If there is a field in the source with the name '_ingest' it gets overwritten here, // if access to that field is required then it get accessed via '_source._ingest' model.put(INGEST_KEY, ingestMetadata); @@ -489,13 +496,13 @@ public final class IngestDocument { } public enum MetaData { - INDEX("_index"), - TYPE("_type"), - ID("_id"), - ROUTING("_routing"), - PARENT("_parent"), - TIMESTAMP("_timestamp"), - TTL("_ttl"); + INDEX(IndexFieldMapper.NAME), + TYPE(TypeFieldMapper.NAME), + ID(IdFieldMapper.NAME), + ROUTING(RoutingFieldMapper.NAME), + PARENT(ParentFieldMapper.NAME), + TIMESTAMP(TimestampFieldMapper.NAME), + TTL(TTLFieldMapper.NAME); private final String fieldName; @@ -506,7 +513,6 @@ public final class IngestDocument { public String getFieldName() { return fieldName; } - } private class FieldPath { @@ -523,7 +529,7 @@ public final class IngestDocument { newPath = path.substring(8, path.length()); } else { initialContext = sourceAndMetadata; - if (path.startsWith(SOURCE_KEY + ".")) { + if (path.startsWith(SourceFieldMapper.NAME + ".")) { newPath = path.substring(8, path.length()); } else { newPath = path; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java b/core/src/main/java/org/elasticsearch/ingest/Pipeline.java similarity index 91% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java rename to core/src/main/java/org/elasticsearch/ingest/Pipeline.java index b0e0a2a66a8..56b49a5b063 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/core/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -20,10 +20,6 @@ package org.elasticsearch.ingest; -import org.elasticsearch.ingest.processor.ConfigurationUtils; -import org.elasticsearch.ingest.processor.Processor; -import org.elasticsearch.ingest.processor.CompoundProcessor; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -92,12 +88,10 @@ public final class Pipeline { } if (onFailureProcessors.isEmpty()) { return processor; - } else { - return new CompoundProcessor(Arrays.asList(processor), onFailureProcessors); } - } else { - throw new IllegalArgumentException("No processor type exist with name [" + type + "]"); + return new CompoundProcessor(Collections.singletonList(processor), onFailureProcessors); } + throw new IllegalArgumentException("No processor type exist with name [" + type + "]"); } private List readProcessors(String fieldName, Map processorRegistry, Map config) throws Exception { @@ -121,6 +115,5 @@ public final class Pipeline { CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.unmodifiableList(processors), Collections.unmodifiableList(onFailureProcessors)); return new Pipeline(id, description, compoundProcessor); } - } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java b/core/src/main/java/org/elasticsearch/ingest/Processor.java similarity index 94% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java rename to core/src/main/java/org/elasticsearch/ingest/Processor.java index 36bcf9689a6..5b97a485d9c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Processor.java +++ b/core/src/main/java/org/elasticsearch/ingest/Processor.java @@ -18,9 +18,7 @@ */ -package org.elasticsearch.ingest.processor; - -import org.elasticsearch.ingest.IngestDocument; +package org.elasticsearch.ingest; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/ProcessorFactoryProvider.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorFactoryProvider.java similarity index 88% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/ProcessorFactoryProvider.java rename to core/src/main/java/org/elasticsearch/ingest/ProcessorFactoryProvider.java index e99261e6408..e1126b305b1 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/ProcessorFactoryProvider.java +++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorFactoryProvider.java @@ -17,11 +17,9 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.env.Environment; -import org.elasticsearch.ingest.TemplateService; -import org.elasticsearch.ingest.processor.Processor; /** * The ingest framework (pipeline, processor and processor factory) can't rely on ES specific code. However some @@ -29,9 +27,8 @@ import org.elasticsearch.ingest.processor.Processor; * so we need some code that provides the physical location of the configuration directory to the processor factories * that need this and this is what this processor factory provider does. */ +//TODO this abstraction could be removed once ingest-core is part of es core? @FunctionalInterface -interface ProcessorFactoryProvider { - +public interface ProcessorFactoryProvider { Processor.Factory get(Environment environment, TemplateService templateService); - } diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java new file mode 100644 index 00000000000..c8dd515c81f --- /dev/null +++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.multibindings.MapBinder; + +import java.util.HashMap; +import java.util.Map; + +/** + * Registry for processor factories + * @see org.elasticsearch.ingest.Processor.Factory + * @see ProcessorFactoryProvider + */ +public class ProcessorsModule extends AbstractModule { + + private final Map processorFactoryProviders = new HashMap<>(); + + @Override + protected void configure() { + MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, ProcessorFactoryProvider.class); + for (Map.Entry entry : processorFactoryProviders.entrySet()) { + mapBinder.addBinding(entry.getKey()).toInstance(entry.getValue()); + } + } + + /** + * Adds a processor factory under a specific type name. + */ + public void addProcessor(String type, ProcessorFactoryProvider processorFactoryProvider) { + processorFactoryProviders.put(type, processorFactoryProvider); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/TemplateService.java b/core/src/main/java/org/elasticsearch/ingest/TemplateService.java similarity index 93% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/TemplateService.java rename to core/src/main/java/org/elasticsearch/ingest/TemplateService.java index c5bd3e97320..afc89dedfce 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/TemplateService.java +++ b/core/src/main/java/org/elasticsearch/ingest/TemplateService.java @@ -24,6 +24,7 @@ import java.util.Map; * Abstraction for the template engine. */ // NOTE: this abstraction is added because the 'org.elasticsearch.ingest' has the requirement to be ES agnostic +//TODO this abstraction could be removed once ingest-core is part of es core? public interface TemplateService { Template compile(String template); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/ValueSource.java b/core/src/main/java/org/elasticsearch/ingest/ValueSource.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/ValueSource.java rename to core/src/main/java/org/elasticsearch/ingest/ValueSource.java diff --git a/core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java new file mode 100644 index 00000000000..b6a508eb9a1 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class CompoundProcessorTests extends ESTestCase { + private IngestDocument ingestDocument; + + @Before + public void init() { + ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); + } + + public void testEmpty() throws Exception { + CompoundProcessor processor = new CompoundProcessor(); + assertThat(processor.getProcessors().isEmpty(), is(true)); + assertThat(processor.getOnFailureProcessors().isEmpty(), is(true)); + processor.execute(ingestDocument); + } + + public void testSingleProcessor() throws Exception { + TestProcessor processor = new TestProcessor(ingestDocument -> {}); + CompoundProcessor compoundProcessor = new CompoundProcessor(processor); + assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); + assertThat(compoundProcessor.getProcessors().get(0), equalTo(processor)); + assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true)); + compoundProcessor.execute(ingestDocument); + assertThat(processor.getInvokedCounter(), equalTo(1)); + } + + public void testSingleProcessorWithException() throws Exception { + TestProcessor processor = new TestProcessor(ingestDocument -> {throw new RuntimeException("error");}); + CompoundProcessor compoundProcessor = new CompoundProcessor(processor); + assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); + assertThat(compoundProcessor.getProcessors().get(0), equalTo(processor)); + assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true)); + try { + compoundProcessor.execute(ingestDocument); + fail("should throw exception"); + } catch (Exception e) { + assertThat(e.getMessage(), equalTo("error")); + } + assertThat(processor.getInvokedCounter(), equalTo(1)); + } + + public void testSingleProcessorWithOnFailureProcessor() throws Exception { + TestProcessor processor1 = new TestProcessor("first", ingestDocument -> {throw new RuntimeException("error");}); + TestProcessor processor2 = new TestProcessor(ingestDocument -> { + Map ingestMetadata = ingestDocument.getIngestMetadata(); + assertThat(ingestMetadata.size(), equalTo(2)); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_FIELD), equalTo("first")); + }); + + CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor1), Collections.singletonList(processor2)); + compoundProcessor.execute(ingestDocument); + + assertThat(processor1.getInvokedCounter(), equalTo(1)); + assertThat(processor2.getInvokedCounter(), equalTo(1)); + } + + public void testSingleProcessorWithNestedFailures() throws Exception { + TestProcessor processor = new TestProcessor("first", ingestDocument -> {throw new RuntimeException("error");}); + TestProcessor processorToFail = new TestProcessor("second", ingestDocument -> { + Map ingestMetadata = ingestDocument.getIngestMetadata(); + assertThat(ingestMetadata.size(), equalTo(2)); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_FIELD), equalTo("first")); + throw new RuntimeException("error"); + }); + TestProcessor lastProcessor = new TestProcessor(ingestDocument -> { + Map ingestMetadata = ingestDocument.getIngestMetadata(); + assertThat(ingestMetadata.size(), equalTo(2)); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_FIELD), equalTo("second")); + }); + CompoundProcessor compoundOnFailProcessor = new CompoundProcessor(Collections.singletonList(processorToFail), Collections.singletonList(lastProcessor)); + CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), Collections.singletonList(compoundOnFailProcessor)); + compoundProcessor.execute(ingestDocument); + + assertThat(processorToFail.getInvokedCounter(), equalTo(1)); + assertThat(lastProcessor.getInvokedCounter(), equalTo(1)); + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java b/core/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java similarity index 95% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java rename to core/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java index b661a598edf..a1d736d1c1e 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConfigurationUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor; +package org.elasticsearch.ingest; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -53,7 +53,7 @@ public class ConfigurationUtilsTests extends ESTestCase { assertThat(val, equalTo("bar")); } - public void testReadStringProperty_InvalidType() { + public void testReadStringPropertyInvalidType() { try { ConfigurationUtils.readStringProperty(config, "arr"); } catch (IllegalArgumentException e) { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java rename to core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java similarity index 70% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java index 5d61f11ac24..f3bb8f38451 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.ingest; -import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; import java.util.Collections; @@ -27,8 +26,6 @@ import java.util.HashMap; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class PipelineFactoryTests extends ESTestCase { @@ -38,13 +35,7 @@ public class PipelineFactoryTests extends ESTestCase { pipelineConfig.put("description", "_description"); pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("test", processorConfig))); Pipeline.Factory factory = new Pipeline.Factory(); - Map processorRegistry = new HashMap<>(); - Processor processor = mock(Processor.class); - when(processor.getType()).thenReturn("test-processor"); - Processor.Factory processorFactory = mock(Processor.Factory.class); - when(processorFactory.create(processorConfig)).thenReturn(processor); - processorRegistry.put("test", processorFactory); - + Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); assertThat(pipeline.getId(), equalTo("_id")); assertThat(pipeline.getDescription(), equalTo("_description")); @@ -56,16 +47,10 @@ public class PipelineFactoryTests extends ESTestCase { Map processorConfig = new HashMap<>(); Map pipelineConfig = new HashMap<>(); pipelineConfig.put("description", "_description"); - pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("test-processor", processorConfig))); - pipelineConfig.put("on_failure", Collections.singletonList(Collections.singletonMap("test-processor", processorConfig))); + pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("test", processorConfig))); + pipelineConfig.put("on_failure", Collections.singletonList(Collections.singletonMap("test", processorConfig))); Pipeline.Factory factory = new Pipeline.Factory(); - Map processorRegistry = new HashMap<>(); - Processor processor = mock(Processor.class); - when(processor.getType()).thenReturn("test-processor"); - Processor.Factory processorFactory = mock(Processor.Factory.class); - when(processorFactory.create(processorConfig)).thenReturn(processor); - processorRegistry.put("test-processor", processorFactory); - + Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); assertThat(pipeline.getId(), equalTo("_id")); assertThat(pipeline.getDescription(), equalTo("_description")); @@ -82,12 +67,7 @@ public class PipelineFactoryTests extends ESTestCase { pipelineConfig.put("description", "_description"); pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("test", processorConfig))); Pipeline.Factory factory = new Pipeline.Factory(); - Map processorRegistry = new HashMap<>(); - Processor processor = mock(Processor.class); - when(processor.getType()).thenReturn("test-processor"); - Processor.Factory processorFactory = mock(Processor.Factory.class); - when(processorFactory.create(processorConfig)).thenReturn(processor); - processorRegistry.put("test", processorFactory); + Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); try { factory.create("_id", pipelineConfig, processorRegistry); } catch (IllegalArgumentException e) { @@ -103,14 +83,8 @@ public class PipelineFactoryTests extends ESTestCase { pipelineConfig.put("description", "_description"); pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("test", processorConfig))); Pipeline.Factory factory = new Pipeline.Factory(); - Map processorFactoryStore = new HashMap<>(); - Processor processor = mock(Processor.class); - when(processor.getType()).thenReturn("test-processor"); - Processor.Factory processorFactory = mock(Processor.Factory.class); - when(processorFactory.create(processorConfig)).thenReturn(processor); - processorFactoryStore.put("test", processorFactory); - - Pipeline pipeline = factory.create("_id", pipelineConfig, processorFactoryStore); + Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); + Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); assertThat(pipeline.getId(), equalTo("_id")); assertThat(pipeline.getDescription(), equalTo("_description")); assertThat(pipeline.getProcessors().size(), equalTo(1)); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java b/core/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java rename to core/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java diff --git a/core/src/test/java/org/elasticsearch/ingest/TestProcessor.java b/core/src/test/java/org/elasticsearch/ingest/TestProcessor.java new file mode 100644 index 00000000000..67a2406da75 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/ingest/TestProcessor.java @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Consumer; + +/** + * Processor used for testing, keeps track of how many times it is invoked and + * accepts a {@link Consumer} of {@link IngestDocument} to be called when executed. + */ +public class TestProcessor implements Processor { + + private final String type; + private final Consumer ingestDocumentConsumer; + private final AtomicInteger invokedCounter = new AtomicInteger(); + + public TestProcessor(Consumer ingestDocumentConsumer) { + this("test-processor", ingestDocumentConsumer); + } + + public TestProcessor(String type, Consumer ingestDocumentConsumer) { + this.ingestDocumentConsumer = ingestDocumentConsumer; + this.type = type; + } + + @Override + public void execute(IngestDocument ingestDocument) throws Exception { + invokedCounter.incrementAndGet(); + ingestDocumentConsumer.accept(ingestDocument); + } + + @Override + public String getType() { + return type; + } + + public int getInvokedCounter() { + return invokedCounter.get(); + } + + public static final class Factory implements Processor.Factory { + @Override + public TestProcessor create(Map config) throws Exception { + return new TestProcessor(ingestDocument -> {}); + } + } +} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/TestTemplateService.java b/core/src/test/java/org/elasticsearch/ingest/TestTemplateService.java similarity index 99% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/TestTemplateService.java rename to core/src/test/java/org/elasticsearch/ingest/TestTemplateService.java index 5ef2c8e4bdd..d44764fa8ac 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/TestTemplateService.java +++ b/core/src/test/java/org/elasticsearch/ingest/TestTemplateService.java @@ -52,7 +52,5 @@ public class TestTemplateService implements TemplateService { public String getKey() { return expected; } - } - } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java b/core/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java similarity index 99% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java rename to core/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java index f21f1f2ad44..63eae63a400 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java @@ -67,5 +67,4 @@ public class ValueSourceTests extends ESTestCase { assertThat(myPreciousList.size(), equalTo(1)); assertThat(myPreciousList.get(0), equalTo("value")); } - } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java index 2769a1dd419..cc704c19984 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java @@ -19,7 +19,9 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java index f8d53f07af3..46bba1b9edb 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java @@ -22,6 +22,8 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.ValueSource; +import org.elasticsearch.ingest.ConfigurationUtils; +import org.elasticsearch.ingest.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java index b3c287dc9ba..68386b18a33 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java @@ -20,6 +20,8 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.ConfigurationUtils; +import org.elasticsearch.ingest.Processor; import java.util.ArrayList; import java.util.List; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java index 6fce95436f9..e919f4f49c3 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java @@ -20,6 +20,8 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.ConfigurationUtils; +import org.elasticsearch.ingest.Processor; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java index 727736fd283..6d2bdf8b7d7 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java @@ -19,7 +19,9 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.TemplateService; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java index bfeaad4e15e..adb76799d4b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java @@ -32,6 +32,7 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.io.Closeable; import java.io.IOException; @@ -55,8 +56,8 @@ import java.util.Map; import java.util.Set; import java.util.stream.Stream; -import static org.elasticsearch.ingest.processor.ConfigurationUtils.readOptionalList; -import static org.elasticsearch.ingest.processor.ConfigurationUtils.readStringProperty; +import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalList; +import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty; public final class GeoIpProcessor implements Processor { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokProcessor.java index 60c7cee7ead..d85cde13d05 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokProcessor.java @@ -20,6 +20,8 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.ConfigurationUtils; +import org.elasticsearch.ingest.Processor; import java.io.BufferedReader; import java.io.IOException; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java index 897592a4f15..b831ec511e1 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java @@ -20,6 +20,8 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.ConfigurationUtils; +import org.elasticsearch.ingest.Processor; import java.util.Map; import java.util.regex.Matcher; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java index fdc9aade1f8..3582dce278a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java @@ -20,6 +20,8 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.ConfigurationUtils; +import org.elasticsearch.ingest.Processor; import java.util.List; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java index bbcf3e3f3d5..efad3707398 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java @@ -21,6 +21,8 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.ingest.ConfigurationUtils; +import org.elasticsearch.ingest.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java index 9aa76ab2d69..5b134069070 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java @@ -20,6 +20,8 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.ConfigurationUtils; +import org.elasticsearch.ingest.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java index 2bf69b73772..0f2ac29be49 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java @@ -22,6 +22,8 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.ValueSource; +import org.elasticsearch.ingest.ConfigurationUtils; +import org.elasticsearch.ingest.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java index 8838a384530..b1e50b20cd6 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java @@ -20,6 +20,8 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.ConfigurationUtils; +import org.elasticsearch.ingest.Processor; import java.util.Arrays; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java index d07b3a8528a..4f114fc61fe 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.ingest.ProcessorFactoryProvider; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java index a63ebdf1ed5..e168173cca0 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java @@ -20,31 +20,11 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.multibindings.MapBinder; -import org.elasticsearch.ingest.processor.AppendProcessor; -import org.elasticsearch.ingest.processor.ConvertProcessor; -import org.elasticsearch.ingest.processor.DateProcessor; -import org.elasticsearch.ingest.processor.FailProcessor; -import org.elasticsearch.ingest.processor.GeoIpProcessor; -import org.elasticsearch.ingest.processor.GrokProcessor; -import org.elasticsearch.ingest.processor.GsubProcessor; -import org.elasticsearch.ingest.processor.JoinProcessor; -import org.elasticsearch.ingest.processor.LowercaseProcessor; -import org.elasticsearch.ingest.processor.RemoveProcessor; -import org.elasticsearch.ingest.processor.RenameProcessor; -import org.elasticsearch.ingest.processor.SetProcessor; -import org.elasticsearch.ingest.processor.SplitProcessor; -import org.elasticsearch.ingest.processor.TrimProcessor; -import org.elasticsearch.ingest.processor.UppercaseProcessor; import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; -import java.util.HashMap; -import java.util.Map; - public class IngestModule extends AbstractModule { private final boolean ingestEnabled; - private final Map processorFactoryProviders = new HashMap<>(); public IngestModule(boolean ingestEnabled) { this.ingestEnabled = ingestEnabled; @@ -52,41 +32,12 @@ public class IngestModule extends AbstractModule { @Override protected void configure() { - // Even if ingest isn't enable we still need to make sure that rest requests with pipeline + // Even if ingest isn't enabled we still need to make sure that rest requests with pipeline // param copy the pipeline into the context, so that in IngestDisabledActionFilter // index/bulk requests can be failed binder().bind(IngestRestFilter.class).asEagerSingleton(); if (ingestEnabled) { binder().bind(IngestBootstrapper.class).asEagerSingleton(); - - addProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); - addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); - addProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); - addProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); - addProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); - addProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); - addProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); - addProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); - addProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); - addProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); - addProcessor(LowercaseProcessor.TYPE, (environment, mustacheFactory) -> new LowercaseProcessor.Factory()); - addProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); - addProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); - addProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); - addProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); - - MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, ProcessorFactoryProvider.class); - for (Map.Entry entry : processorFactoryProviders.entrySet()) { - mapBinder.addBinding(entry.getKey()).toInstance(entry.getValue()); - } } } - - /** - * Adds a processor factory under a specific type name. - */ - public void addProcessor(String type, ProcessorFactoryProvider processorFactoryProvider) { - processorFactoryProviders.put(type, processorFactoryProvider); - } - } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index f8eb044d881..a5f4b78cd45 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -26,11 +26,27 @@ import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.ingest.ProcessorsModule; +import org.elasticsearch.ingest.processor.AppendProcessor; +import org.elasticsearch.ingest.processor.ConvertProcessor; +import org.elasticsearch.ingest.processor.DateProcessor; +import org.elasticsearch.ingest.processor.FailProcessor; +import org.elasticsearch.ingest.processor.GeoIpProcessor; +import org.elasticsearch.ingest.processor.GrokProcessor; +import org.elasticsearch.ingest.processor.GsubProcessor; +import org.elasticsearch.ingest.processor.JoinProcessor; +import org.elasticsearch.ingest.processor.LowercaseProcessor; +import org.elasticsearch.ingest.processor.RemoveProcessor; +import org.elasticsearch.ingest.processor.RenameProcessor; +import org.elasticsearch.ingest.processor.SetProcessor; +import org.elasticsearch.ingest.processor.SplitProcessor; +import org.elasticsearch.ingest.processor.TrimProcessor; +import org.elasticsearch.ingest.processor.UppercaseProcessor; import org.elasticsearch.plugin.ingest.rest.RestDeletePipelineAction; import org.elasticsearch.plugin.ingest.rest.RestGetPipelineAction; +import org.elasticsearch.plugin.ingest.rest.RestIngestDisabledAction; import org.elasticsearch.plugin.ingest.rest.RestPutPipelineAction; import org.elasticsearch.plugin.ingest.rest.RestSimulatePipelineAction; -import org.elasticsearch.plugin.ingest.rest.RestIngestDisabledAction; import org.elasticsearch.plugin.ingest.transport.IngestActionFilter; import org.elasticsearch.plugin.ingest.transport.IngestDisabledActionFilter; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; @@ -44,6 +60,7 @@ import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineTransp import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptModule; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -82,7 +99,25 @@ public class IngestPlugin extends Plugin { if (transportClient) { return Collections.emptyList(); } else { - return Collections.singletonList(new IngestModule(ingestEnabled)); + ProcessorsModule processorsModule = new ProcessorsModule(); + if (ingestEnabled) { + processorsModule.addProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); + processorsModule.addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); + processorsModule.addProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); + processorsModule.addProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); + processorsModule.addProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); + processorsModule.addProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); + processorsModule.addProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); + processorsModule.addProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); + processorsModule.addProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); + processorsModule.addProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); + processorsModule.addProcessor(LowercaseProcessor.TYPE, (environment, mustacheFactory) -> new LowercaseProcessor.Factory()); + processorsModule.addProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); + processorsModule.addProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); + processorsModule.addProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); + processorsModule.addProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); + } + return Arrays.asList(new IngestModule(ingestEnabled), processorsModule); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index ba59d8af314..8c575a5b4a8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -41,8 +41,9 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.ProcessorFactoryProvider; import org.elasticsearch.ingest.TemplateService; -import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequest; import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequest; import org.elasticsearch.plugin.ingest.transport.reload.ReloadPipelinesAction; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java index 0c548888ab2..751ff0fcc68 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java @@ -20,15 +20,13 @@ package org.elasticsearch.plugin.ingest.rest; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestFilter; import org.elasticsearch.rest.RestFilterChain; import org.elasticsearch.rest.RestRequest; -import static org.elasticsearch.plugin.ingest.IngestPlugin.PIPELINE_ID_PARAM; -import static org.elasticsearch.plugin.ingest.IngestPlugin.PIPELINE_ID_PARAM_CONTEXT_KEY; - public class IngestRestFilter extends RestFilter { @Inject @@ -38,8 +36,8 @@ public class IngestRestFilter extends RestFilter { @Override public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { - if (request.hasParam(PIPELINE_ID_PARAM)) { - request.putInContext(PIPELINE_ID_PARAM_CONTEXT_KEY, request.param(PIPELINE_ID_PARAM)); + if (request.hasParam(IngestPlugin.PIPELINE_ID_PARAM)) { + request.putInContext(IngestPlugin.PIPELINE_ID_PARAM_CONTEXT_KEY, request.param(IngestPlugin.PIPELINE_ID_PARAM)); } filterChain.continueProcessing(request, channel); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java index 430d6fd7234..1d2fc5d7793 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java @@ -22,7 +22,7 @@ package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.action.ActionListener; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java index 2d81fa71f63..a4601cb746b 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.processor.ConfigurationUtils; +import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.plugin.ingest.PipelineStore; import java.io.IOException; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java index 0d5f21ff712..a8840b42d7a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java index df9cc4074c4..8c27d881252 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java @@ -24,8 +24,7 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.ingest.ValueSource; -import org.elasticsearch.ingest.processor.AppendProcessor; -import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/CompoundProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/CompoundProcessorTests.java deleted file mode 100644 index 85bbee1e6d3..00000000000 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/CompoundProcessorTests.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest.processor; - -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; -import org.mockito.stubbing.Answer; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -import static org.elasticsearch.mock.orig.Mockito.verify; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.when; - -public class CompoundProcessorTests extends ESTestCase { - private IngestDocument ingestDocument; - - @Before - public void init() { - ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); - } - - public void testEmpty() throws Exception { - CompoundProcessor processor = new CompoundProcessor(); - assertThat(processor.getProcessors().isEmpty(), is(true)); - assertThat(processor.getOnFailureProcessors().isEmpty(), is(true)); - processor.execute(ingestDocument); - } - - public void testSingleProcessor() throws Exception { - Processor processor = mock(Processor.class); - CompoundProcessor compoundProcessor = new CompoundProcessor(processor); - assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); - assertThat(compoundProcessor.getProcessors().get(0), equalTo(processor)); - assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true)); - compoundProcessor.execute(ingestDocument); - verify(processor, times(1)).execute(ingestDocument); - } - - public void testSingleProcessorWithException() throws Exception { - Processor processor = mock(Processor.class); - when(processor.getType()).thenReturn("failed_processor"); - doThrow(new RuntimeException("error")).doNothing().when(processor).execute(ingestDocument); - CompoundProcessor compoundProcessor = new CompoundProcessor(processor); - assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); - assertThat(compoundProcessor.getProcessors().get(0), equalTo(processor)); - assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true)); - try { - compoundProcessor.execute(ingestDocument); - fail("should throw exception"); - } catch (Exception e) { - assertThat(e.getMessage(), equalTo("error")); - } - - verify(processor, times(1)).execute(ingestDocument); - } - - public void testSingleProcessorWithOnFailureProcessor() throws Exception { - Exception error = new RuntimeException("error"); - - Processor processor = mock(Processor.class); - when(processor.getType()).thenReturn("first"); - doThrow(error).doNothing().when(processor).execute(ingestDocument); - - Processor processorNext = mock(Processor.class); - Answer checkMetadataAnswer = invocationOnMock -> { - @SuppressWarnings("unchecked") - IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; - Map ingestMetadata = ingestDocument.getIngestMetadata(); - assertThat(ingestMetadata.size(), equalTo(2)); - assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); - assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_FIELD), equalTo("first")); - return null; - }; - doAnswer(checkMetadataAnswer).when(processorNext).execute(ingestDocument); - - CompoundProcessor compoundProcessor = spy(new CompoundProcessor(Arrays.asList(processor), Arrays.asList(processorNext))); - assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); - assertThat(compoundProcessor.getProcessors().get(0), equalTo(processor)); - assertThat(compoundProcessor.getOnFailureProcessors().size(), equalTo(1)); - assertThat(compoundProcessor.getOnFailureProcessors().get(0), equalTo(processorNext)); - compoundProcessor.execute(ingestDocument); - verify(compoundProcessor).executeOnFailure(ingestDocument, error, "first"); - verify(processor, times(1)).execute(ingestDocument); - verify(processorNext, times(1)).execute(ingestDocument); - - } - - public void testSingleProcessorWithNestedFailures() throws Exception { - Exception error = new RuntimeException("error"); - Processor processor = mock(Processor.class); - when(processor.getType()).thenReturn("first"); - doThrow(error).doNothing().when(processor).execute(ingestDocument); - Processor processorToFail = mock(Processor.class); - Answer checkMetadataAnswer = invocationOnMock -> { - @SuppressWarnings("unchecked") - IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; - Map ingestMetadata = ingestDocument.getIngestMetadata(); - assertThat(ingestMetadata.size(), equalTo(2)); - assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); - assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_FIELD), equalTo("first")); - return null; - }; - doAnswer(checkMetadataAnswer).when(processorToFail).execute(ingestDocument); - when(processorToFail.getType()).thenReturn("second"); - doThrow(error).doNothing().when(processorToFail).execute(ingestDocument); - Processor lastProcessor = mock(Processor.class); - Answer checkLastMetadataAnswer = invocationOnMock -> { - @SuppressWarnings("unchecked") - IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; - Map ingestMetadata = ingestDocument.getIngestMetadata(); - assertThat(ingestMetadata.size(), equalTo(2)); - assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); - assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_FIELD), equalTo("second")); - return null; - }; - doAnswer(checkLastMetadataAnswer).when(lastProcessor).execute(ingestDocument); - - CompoundProcessor innerCompoundOnFailProcessor = new CompoundProcessor(Arrays.asList(processorToFail), Arrays.asList(lastProcessor)); - CompoundProcessor compoundOnFailProcessor = spy(innerCompoundOnFailProcessor); - - CompoundProcessor innerCompoundProcessor = new CompoundProcessor(Arrays.asList(processor), Arrays.asList(compoundOnFailProcessor)); - CompoundProcessor compoundProcessor = spy(innerCompoundProcessor); - - assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); - assertThat(compoundProcessor.getProcessors().get(0), equalTo(processor)); - assertThat(compoundProcessor.getOnFailureProcessors().size(), equalTo(1)); - assertThat(compoundProcessor.getOnFailureProcessors().get(0), equalTo(compoundOnFailProcessor)); - compoundProcessor.execute(ingestDocument); - verify(processor, times(1)).execute(ingestDocument); - verify(compoundProcessor, times(1)).executeOnFailure(ingestDocument, error, "first"); - verify(compoundOnFailProcessor, times(1)).execute(ingestDocument); - verify(processorToFail, times(1)).execute(ingestDocument); - verify(compoundOnFailProcessor, times(1)).executeOnFailure(ingestDocument, error, "second"); - verify(lastProcessor, times(1)).execute(ingestDocument); - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java index 77ecd5056c3..487dca4c232 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java @@ -21,8 +21,7 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.processor.ConvertProcessor; -import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java index c79cb7b73e5..b6d4df9103a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java @@ -20,11 +20,9 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.ingest.processor.FailProcessor; -import org.elasticsearch.ingest.processor.FailProcessorException; -import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java index 76e00b218e1..c2873ebf569 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java @@ -21,8 +21,7 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.processor.GsubProcessor; -import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.test.ESTestCase; import java.util.Collections; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java index 3afc8132999..65552b41c02 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java index 39d31d3a0c2..952354cc800 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java index 75a5ec8e36b..c9e2e8be51c 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java index bf91da0dab5..7de742c908f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.ingest.ValueSource; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java index 7c1f1a13047..b4487a6a09b 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java index 9a31b4f9006..41b989088db 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java @@ -29,9 +29,9 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.ingest.processor.CompoundProcessor; -import org.elasticsearch.ingest.processor.Processor; import org.elasticsearch.ingest.processor.SetProcessor; +import org.elasticsearch.ingest.CompoundProcessor; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java index 336c02f7b0d..c841f8cce69 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.plugin.ingest.transport; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; @@ -32,8 +31,8 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.processor.CompoundProcessor; -import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.CompoundProcessor; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.plugin.ingest.IngestBootstrapper; import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugin.ingest.PipelineExecutionService; @@ -45,14 +44,12 @@ import org.junit.Before; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.function.Consumer; import static org.elasticsearch.plugin.ingest.transport.IngestActionFilter.BulkRequestModifier; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Matchers.any; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java index c4d12b23a47..c913db72ca1 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java @@ -23,14 +23,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.processor.CompoundProcessor; +import org.elasticsearch.ingest.CompoundProcessor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; -import java.util.Arrays; - import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java index bee6ddd141c..985e7ef1424 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java @@ -21,8 +21,8 @@ package org.elasticsearch.plugin.ingest.transport.simulate; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.processor.CompoundProcessor; -import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.CompoundProcessor; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.plugin.ingest.PipelineStore; import org.elasticsearch.test.ESTestCase; import org.junit.Before; diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java index b496d189f18..47fef1a2228 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java @@ -22,7 +22,7 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.ValueSource; -import org.elasticsearch.ingest.processor.Processor; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.processor.SetProcessor; import org.hamcrest.Matchers; From 456351de2e6a8092d96e14f4e774c94c3afa595f Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 5 Jan 2016 14:24:41 +0100 Subject: [PATCH 154/347] move common test classes to test module --- .../main}/java/org/elasticsearch/ingest/RandomDocumentPicks.java | 0 .../src/main}/java/org/elasticsearch/ingest/TestProcessor.java | 0 .../main}/java/org/elasticsearch/ingest/TestTemplateService.java | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename {core/src/test => test/framework/src/main}/java/org/elasticsearch/ingest/RandomDocumentPicks.java (100%) rename {core/src/test => test/framework/src/main}/java/org/elasticsearch/ingest/TestProcessor.java (100%) rename {core/src/test => test/framework/src/main}/java/org/elasticsearch/ingest/TestTemplateService.java (100%) diff --git a/core/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java b/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java similarity index 100% rename from core/src/test/java/org/elasticsearch/ingest/RandomDocumentPicks.java rename to test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java diff --git a/core/src/test/java/org/elasticsearch/ingest/TestProcessor.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java similarity index 100% rename from core/src/test/java/org/elasticsearch/ingest/TestProcessor.java rename to test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java diff --git a/core/src/test/java/org/elasticsearch/ingest/TestTemplateService.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestTemplateService.java similarity index 100% rename from core/src/test/java/org/elasticsearch/ingest/TestTemplateService.java rename to test/framework/src/main/java/org/elasticsearch/ingest/TestTemplateService.java From fa4dbdaea17e9c0333a8388e90d1e05d10e6f3d3 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 5 Jan 2016 14:25:33 +0100 Subject: [PATCH 155/347] create ProcessorsModule during Node creation rather than as part of IngestPlugin initialization If we createProcessorsModule as part of the plugin, other plugins will not be able to register their own processors. --- .../java/org/elasticsearch/node/Node.java | 3 +- .../plugin/ingest/IngestPlugin.java | 41 ++++++++++--------- 2 files changed, 23 insertions(+), 21 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index a9651eace33..e1de2802e83 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -70,9 +70,9 @@ import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.indices.cluster.IndicesClusterStateService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.memory.IndexingMemoryController; -import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.ttl.IndicesTTLService; +import org.elasticsearch.ingest.ProcessorsModule; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.node.internal.InternalSettingsPreparer; @@ -197,6 +197,7 @@ public class Node implements Releasable { modules.add(new RepositoriesModule()); modules.add(new TribeModule()); modules.add(new AnalysisModule(environment)); + modules.add(new ProcessorsModule()); pluginsService.processModules(modules); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index a5f4b78cd45..6b8e44f34de 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -60,7 +60,6 @@ import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineTransp import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptModule; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -99,25 +98,7 @@ public class IngestPlugin extends Plugin { if (transportClient) { return Collections.emptyList(); } else { - ProcessorsModule processorsModule = new ProcessorsModule(); - if (ingestEnabled) { - processorsModule.addProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); - processorsModule.addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); - processorsModule.addProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); - processorsModule.addProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); - processorsModule.addProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); - processorsModule.addProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); - processorsModule.addProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); - processorsModule.addProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); - processorsModule.addProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); - processorsModule.addProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); - processorsModule.addProcessor(LowercaseProcessor.TYPE, (environment, mustacheFactory) -> new LowercaseProcessor.Factory()); - processorsModule.addProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); - processorsModule.addProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); - processorsModule.addProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); - processorsModule.addProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); - } - return Arrays.asList(new IngestModule(ingestEnabled), processorsModule); + return Collections.singletonList(new IngestModule(ingestEnabled)); } } @@ -137,6 +118,26 @@ public class IngestPlugin extends Plugin { .build(); } + public void onModule(ProcessorsModule processorsModule) { + if (ingestEnabled) { + processorsModule.addProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); + processorsModule.addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); + processorsModule.addProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); + processorsModule.addProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); + processorsModule.addProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); + processorsModule.addProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); + processorsModule.addProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); + processorsModule.addProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); + processorsModule.addProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); + processorsModule.addProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); + processorsModule.addProcessor(LowercaseProcessor.TYPE, (environment, mustacheFactory) -> new LowercaseProcessor.Factory()); + processorsModule.addProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); + processorsModule.addProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); + processorsModule.addProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); + processorsModule.addProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); + } + } + public void onModule(ActionModule module) { if (transportClient == false) { if (ingestEnabled) { From 635b9b5a46a08455a1da1ca7794a6843ae008a8e Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 5 Jan 2016 15:30:40 +0100 Subject: [PATCH 156/347] clarified TemplateService comments We will keep this abstractions as it's convenient, otherwise IngestDocument would depend on ScriptService directly, and would explicitly rely on mustache which is not even part of core. better to have the interface in core, and the impl as part of the ingest plugin, which relies on mustache, shipped with core by default. --- .../java/org/elasticsearch/ingest/TemplateService.java | 7 ++----- .../java/org/elasticsearch/plugin/ingest/IngestPlugin.java | 2 +- ...alTemplateService.java => MustacheTemplateService.java} | 4 ++-- .../org/elasticsearch/plugin/ingest/PipelineStore.java | 2 +- .../elasticsearch/plugin/ingest/AbstractMustacheTests.java | 4 ++-- 5 files changed, 8 insertions(+), 11 deletions(-) rename plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/{InternalTemplateService.java => MustacheTemplateService.java} (96%) diff --git a/core/src/main/java/org/elasticsearch/ingest/TemplateService.java b/core/src/main/java/org/elasticsearch/ingest/TemplateService.java index afc89dedfce..fccdb556a53 100644 --- a/core/src/main/java/org/elasticsearch/ingest/TemplateService.java +++ b/core/src/main/java/org/elasticsearch/ingest/TemplateService.java @@ -21,10 +21,9 @@ package org.elasticsearch.ingest; import java.util.Map; /** - * Abstraction for the template engine. + * Abstraction for the ingest template engine: allows to compile a template into a {@link Template} object. + * A compiled template can be executed by calling its {@link Template#execute(Map)} method. */ -// NOTE: this abstraction is added because the 'org.elasticsearch.ingest' has the requirement to be ES agnostic -//TODO this abstraction could be removed once ingest-core is part of es core? public interface TemplateService { Template compile(String template); @@ -34,7 +33,5 @@ public interface TemplateService { String execute(Map model); String getKey(); - } - } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 6b8e44f34de..e547af68bb2 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -170,6 +170,6 @@ public class IngestPlugin extends Plugin { } public void onModule(ScriptModule module) { - module.registerScriptContext(InternalTemplateService.INGEST_SCRIPT_CONTEXT); + module.registerScriptContext(MustacheTemplateService.INGEST_SCRIPT_CONTEXT); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/MustacheTemplateService.java similarity index 96% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/MustacheTemplateService.java index 58bcfc0a269..a74b999bb32 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/MustacheTemplateService.java @@ -30,13 +30,13 @@ import org.elasticsearch.script.ScriptService; import java.util.Collections; import java.util.Map; -class InternalTemplateService implements TemplateService { +class MustacheTemplateService implements TemplateService { public static final ScriptContext.Plugin INGEST_SCRIPT_CONTEXT = new ScriptContext.Plugin("elasticsearch-ingest", "ingest"); private final ScriptService scriptService; - InternalTemplateService(ScriptService scriptService) { + MustacheTemplateService(ScriptService scriptService) { this.scriptService = scriptService; } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index 8c575a5b4a8..5e5276044de 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -87,7 +87,7 @@ public class PipelineStore extends AbstractComponent implements Closeable { public void buildProcessorFactoryRegistry(Map processorFactoryProviders, Environment environment, ScriptService scriptService) { Map processorFactories = new HashMap<>(); - TemplateService templateService = new InternalTemplateService(scriptService); + TemplateService templateService = new MustacheTemplateService(scriptService); for (Map.Entry entry : processorFactoryProviders.entrySet()) { Processor.Factory processorFactory = entry.getValue().get(environment, templateService); processorFactories.put(entry.getKey(), processorFactory); diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java index bdd37c86d58..bec71f2ffce 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java @@ -42,12 +42,12 @@ public abstract class AbstractMustacheTests extends ESTestCase { .build(); MustacheScriptEngineService mustache = new MustacheScriptEngineService(settings); ScriptContextRegistry registry = new ScriptContextRegistry( - Collections.singletonList(InternalTemplateService.INGEST_SCRIPT_CONTEXT) + Collections.singletonList(MustacheTemplateService.INGEST_SCRIPT_CONTEXT) ); ScriptService scriptService = new ScriptService( settings, new Environment(settings), Collections.singleton(mustache), null, registry ); - templateService = new InternalTemplateService(scriptService); + templateService = new MustacheTemplateService(scriptService); } } From 8251a506676b9dd27e035a0867e128aaedde7cb3 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 5 Jan 2016 17:12:53 +0100 Subject: [PATCH 157/347] make ProcessorFactoryProvider extend BiFunction --- .../ingest/ProcessorFactoryProvider.java | 14 +++++++------- .../elasticsearch/plugin/ingest/PipelineStore.java | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorFactoryProvider.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorFactoryProvider.java index e1126b305b1..efecf11d0b8 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ProcessorFactoryProvider.java +++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorFactoryProvider.java @@ -21,14 +21,14 @@ package org.elasticsearch.ingest; import org.elasticsearch.env.Environment; +import java.util.function.BiFunction; + /** - * The ingest framework (pipeline, processor and processor factory) can't rely on ES specific code. However some - * processors rely on reading files from the config directory. We can't add Environment as a constructor parameter, - * so we need some code that provides the physical location of the configuration directory to the processor factories - * that need this and this is what this processor factory provider does. + * Functional interface that allows to create a {@link org.elasticsearch.ingest.Processor.Factory} once all the needed + * components are available at a later stage, more specifically the {@link Environment} and the {@link TemplateService} + * which some processors need. */ -//TODO this abstraction could be removed once ingest-core is part of es core? @FunctionalInterface -public interface ProcessorFactoryProvider { - Processor.Factory get(Environment environment, TemplateService templateService); +public interface ProcessorFactoryProvider extends BiFunction { + } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index 5e5276044de..32a1973c113 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -89,7 +89,7 @@ public class PipelineStore extends AbstractComponent implements Closeable { Map processorFactories = new HashMap<>(); TemplateService templateService = new MustacheTemplateService(scriptService); for (Map.Entry entry : processorFactoryProviders.entrySet()) { - Processor.Factory processorFactory = entry.getValue().get(environment, templateService); + Processor.Factory processorFactory = entry.getValue().apply(environment, templateService); processorFactories.put(entry.getKey(), processorFactory); } this.processorFactoryRegistry = Collections.unmodifiableMap(processorFactories); From 94469d75f95d85d5695dfb0b6f25348b70c37d30 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 6 Jan 2016 11:08:23 +0100 Subject: [PATCH 158/347] revert rename InternalTemplateService -> MustacheTemplateService --- .../java/org/elasticsearch/plugin/ingest/IngestPlugin.java | 4 ++-- ...tacheTemplateService.java => InternalTemplateService.java} | 4 ++-- .../java/org/elasticsearch/plugin/ingest/PipelineStore.java | 2 +- .../elasticsearch/plugin/ingest/AbstractMustacheTests.java | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) rename plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/{MustacheTemplateService.java => InternalTemplateService.java} (96%) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index e547af68bb2..514fb44d1da 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -130,7 +130,7 @@ public class IngestPlugin extends Plugin { processorsModule.addProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); processorsModule.addProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); processorsModule.addProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); - processorsModule.addProcessor(LowercaseProcessor.TYPE, (environment, mustacheFactory) -> new LowercaseProcessor.Factory()); + processorsModule.addProcessor(LowercaseProcessor.TYPE, (environment, templateService) -> new LowercaseProcessor.Factory()); processorsModule.addProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); processorsModule.addProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); processorsModule.addProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); @@ -170,6 +170,6 @@ public class IngestPlugin extends Plugin { } public void onModule(ScriptModule module) { - module.registerScriptContext(MustacheTemplateService.INGEST_SCRIPT_CONTEXT); + module.registerScriptContext(InternalTemplateService.INGEST_SCRIPT_CONTEXT); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/MustacheTemplateService.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java similarity index 96% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/MustacheTemplateService.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java index a74b999bb32..58bcfc0a269 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/MustacheTemplateService.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java @@ -30,13 +30,13 @@ import org.elasticsearch.script.ScriptService; import java.util.Collections; import java.util.Map; -class MustacheTemplateService implements TemplateService { +class InternalTemplateService implements TemplateService { public static final ScriptContext.Plugin INGEST_SCRIPT_CONTEXT = new ScriptContext.Plugin("elasticsearch-ingest", "ingest"); private final ScriptService scriptService; - MustacheTemplateService(ScriptService scriptService) { + InternalTemplateService(ScriptService scriptService) { this.scriptService = scriptService; } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index 32a1973c113..a0daf06deff 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -87,7 +87,7 @@ public class PipelineStore extends AbstractComponent implements Closeable { public void buildProcessorFactoryRegistry(Map processorFactoryProviders, Environment environment, ScriptService scriptService) { Map processorFactories = new HashMap<>(); - TemplateService templateService = new MustacheTemplateService(scriptService); + TemplateService templateService = new InternalTemplateService(scriptService); for (Map.Entry entry : processorFactoryProviders.entrySet()) { Processor.Factory processorFactory = entry.getValue().apply(environment, templateService); processorFactories.put(entry.getKey(), processorFactory); diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java index bec71f2ffce..bdd37c86d58 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java @@ -42,12 +42,12 @@ public abstract class AbstractMustacheTests extends ESTestCase { .build(); MustacheScriptEngineService mustache = new MustacheScriptEngineService(settings); ScriptContextRegistry registry = new ScriptContextRegistry( - Collections.singletonList(MustacheTemplateService.INGEST_SCRIPT_CONTEXT) + Collections.singletonList(InternalTemplateService.INGEST_SCRIPT_CONTEXT) ); ScriptService scriptService = new ScriptService( settings, new Environment(settings), Collections.singleton(mustache), null, registry ); - templateService = new MustacheTemplateService(scriptService); + templateService = new InternalTemplateService(scriptService); } } From f651f5a531dbd681c118c92b264d78c079ebfcc0 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 6 Jan 2016 11:28:29 +0100 Subject: [PATCH 159/347] remove MapBinder guice binding for processors, use ProcessorsRegistry instead --- .../ingest/ProcessorsModule.java | 17 +++---- .../ingest/ProcessorsRegistry.java | 44 +++++++++++++++++++ .../plugin/ingest/IngestBootstrapper.java | 11 ++--- .../plugin/ingest/PipelineStore.java | 5 ++- .../plugin/ingest/IngestTemplateTests.java | 5 +-- 5 files changed, 62 insertions(+), 20 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java index c8dd515c81f..22d5ba92bbc 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java +++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java @@ -20,10 +20,6 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.multibindings.MapBinder; - -import java.util.HashMap; -import java.util.Map; /** * Registry for processor factories @@ -32,20 +28,21 @@ import java.util.Map; */ public class ProcessorsModule extends AbstractModule { - private final Map processorFactoryProviders = new HashMap<>(); + private final ProcessorsRegistry processorsRegistry; + + public ProcessorsModule() { + this.processorsRegistry = new ProcessorsRegistry(); + } @Override protected void configure() { - MapBinder mapBinder = MapBinder.newMapBinder(binder(), String.class, ProcessorFactoryProvider.class); - for (Map.Entry entry : processorFactoryProviders.entrySet()) { - mapBinder.addBinding(entry.getKey()).toInstance(entry.getValue()); - } + bind(ProcessorsRegistry.class).toInstance(processorsRegistry); } /** * Adds a processor factory under a specific type name. */ public void addProcessor(String type, ProcessorFactoryProvider processorFactoryProvider) { - processorFactoryProviders.put(type, processorFactoryProvider); + processorsRegistry.addProcessor(type, processorFactoryProvider); } } diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java new file mode 100644 index 00000000000..f747414e891 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +public class ProcessorsRegistry { + + private final Map processorFactoryProviders = new HashMap<>(); + + /** + * Adds a processor factory under a specific type name. + */ + public void addProcessor(String type, ProcessorFactoryProvider processorFactoryProvider) { + processorFactoryProviders.put(type, processorFactoryProvider); + } + + public ProcessorFactoryProvider getProcessor(String type) { + return processorFactoryProviders.get(type); + } + + public Set> entrySet() { + return processorFactoryProviders.entrySet(); + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java index 4f114fc61fe..b13c95a3681 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java @@ -37,6 +37,7 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.ingest.ProcessorFactoryProvider; +import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -58,16 +59,16 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl private final Environment environment; private final PipelineStore pipelineStore; private final PipelineExecutionService pipelineExecutionService; - private final Map processorFactoryProvider; + private final ProcessorsRegistry processorsRegistry; @Inject public IngestBootstrapper(Settings settings, ThreadPool threadPool, Environment environment, ClusterService clusterService, TransportService transportService, - Map processorFactoryProvider) { + ProcessorsRegistry processorsRegistry) { super(settings); this.threadPool = threadPool; this.environment = environment; - this.processorFactoryProvider = processorFactoryProvider; + this.processorsRegistry = processorsRegistry; this.pipelineStore = new PipelineStore(settings, clusterService, transportService); this.pipelineExecutionService = new PipelineExecutionService(pipelineStore, threadPool); @@ -83,7 +84,7 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl clusterService.add(this); this.pipelineStore = pipelineStore; this.pipelineExecutionService = pipelineExecutionService; - this.processorFactoryProvider = null; + this.processorsRegistry = null; } public PipelineStore getPipelineStore() { @@ -102,7 +103,7 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl @Inject public void setScriptService(ScriptService scriptService) { - pipelineStore.buildProcessorFactoryRegistry(processorFactoryProvider, environment, scriptService); + pipelineStore.buildProcessorFactoryRegistry(processorsRegistry, environment, scriptService); } @Override diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index a0daf06deff..1f52bd6b41e 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -42,6 +42,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.ProcessorFactoryProvider; +import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.Processor; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequest; @@ -85,10 +86,10 @@ public class PipelineStore extends AbstractComponent implements Closeable { this.client = client; } - public void buildProcessorFactoryRegistry(Map processorFactoryProviders, Environment environment, ScriptService scriptService) { + public void buildProcessorFactoryRegistry(ProcessorsRegistry processorsRegistry, Environment environment, ScriptService scriptService) { Map processorFactories = new HashMap<>(); TemplateService templateService = new InternalTemplateService(scriptService); - for (Map.Entry entry : processorFactoryProviders.entrySet()) { + for (Map.Entry entry : processorsRegistry.entrySet()) { Processor.Factory processorFactory = entry.getValue().apply(environment, templateService); processorFactories.put(entry.getKey(), processorFactory); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestTemplateTests.java b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestTemplateTests.java index d7456cd9152..49ef9d8a50d 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestTemplateTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestTemplateTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -32,8 +33,6 @@ import org.hamcrest.Matchers; import org.junit.Before; import org.mockito.Mockito; -import java.util.Collections; - public class IngestTemplateTests extends ESSingleNodeTestCase { private IngestBootstrapper bootstrapper; @@ -51,7 +50,7 @@ public class IngestTemplateTests extends ESSingleNodeTestCase { ClusterService clusterService = Mockito.mock(ClusterService.class); TransportService transportService = Mockito.mock(TransportService.class); bootstrapper = new IngestBootstrapper( - Settings.EMPTY, threadPool, environment, clusterService, transportService, Collections.emptyMap() + Settings.EMPTY, threadPool, environment, clusterService, transportService, new ProcessorsRegistry() ); bootstrapper.setClient(client()); } From 931d321d58c8887bb907c9033f96f9e2964a895e Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 6 Jan 2016 11:34:02 +0100 Subject: [PATCH 160/347] prevent registering processor factories with same name as an existing one --- .../elasticsearch/ingest/ProcessorsRegistry.java | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java index f747414e891..a9936567592 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java +++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java @@ -28,14 +28,13 @@ public class ProcessorsRegistry { private final Map processorFactoryProviders = new HashMap<>(); /** - * Adds a processor factory under a specific type name. + * Adds a processor factory under a specific name. */ - public void addProcessor(String type, ProcessorFactoryProvider processorFactoryProvider) { - processorFactoryProviders.put(type, processorFactoryProvider); - } - - public ProcessorFactoryProvider getProcessor(String type) { - return processorFactoryProviders.get(type); + public void addProcessor(String name, ProcessorFactoryProvider processorFactoryProvider) { + ProcessorFactoryProvider provider = processorFactoryProviders.putIfAbsent(name, processorFactoryProvider); + if (provider != null) { + throw new IllegalArgumentException("Processor factory already registered for name [" + name + "]"); + } } public Set> entrySet() { From e9235452692248b18ca1b2e7ad71873b9d1aa835 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 6 Jan 2016 11:50:55 +0100 Subject: [PATCH 161/347] add test for ProcessorsRegistry --- .../ingest/ProcessorsRegistryTests.java | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java diff --git a/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java b/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java new file mode 100644 index 00000000000..91847a07118 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class ProcessorsRegistryTests extends ESTestCase { + + public void testAddProcessor() { + ProcessorsRegistry processorsRegistry = new ProcessorsRegistry(); + TestProcessor.Factory factory1 = new TestProcessor.Factory(); + processorsRegistry.addProcessor("1", (environment, templateService) -> factory1); + TestProcessor.Factory factory2 = new TestProcessor.Factory(); + processorsRegistry.addProcessor("2", (environment, templateService) -> factory2); + TestProcessor.Factory factory3 = new TestProcessor.Factory(); + try { + processorsRegistry.addProcessor("1", (environment, templateService) -> factory3); + fail("addProcessor should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Processor factory already registered for name [1]")); + } + + Set> entrySet = processorsRegistry.entrySet(); + assertThat(entrySet.size(), equalTo(2)); + for (Map.Entry entry : entrySet) { + if (entry.getKey().equals("1")) { + assertThat(entry.getValue().apply(null, null), equalTo(factory1)); + } else if (entry.getKey().equals("2")) { + assertThat(entry.getValue().apply(null, null), equalTo(factory2)); + } else { + fail("unexpected processor id [" + entry.getKey() + "]"); + } + } + } +} From da3f460bd105b3022ce92d614d0eacf6c254e01b Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 6 Jan 2016 12:20:10 +0100 Subject: [PATCH 162/347] remove ProcessorFactoryProvider --- .../ingest/ProcessorFactoryProvider.java | 34 ------------------- .../ingest/ProcessorsModule.java | 6 ++-- .../ingest/ProcessorsRegistry.java | 11 +++--- .../ingest/ProcessorsRegistryTests.java | 6 ++-- .../plugin/ingest/IngestBootstrapper.java | 2 -- .../plugin/ingest/PipelineStore.java | 6 ++-- 6 files changed, 18 insertions(+), 47 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/ingest/ProcessorFactoryProvider.java diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorFactoryProvider.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorFactoryProvider.java deleted file mode 100644 index efecf11d0b8..00000000000 --- a/core/src/main/java/org/elasticsearch/ingest/ProcessorFactoryProvider.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest; - -import org.elasticsearch.env.Environment; - -import java.util.function.BiFunction; - -/** - * Functional interface that allows to create a {@link org.elasticsearch.ingest.Processor.Factory} once all the needed - * components are available at a later stage, more specifically the {@link Environment} and the {@link TemplateService} - * which some processors need. - */ -@FunctionalInterface -public interface ProcessorFactoryProvider extends BiFunction { - -} diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java index 22d5ba92bbc..c56f7c571c4 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java +++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java @@ -20,11 +20,13 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.env.Environment; + +import java.util.function.BiFunction; /** * Registry for processor factories * @see org.elasticsearch.ingest.Processor.Factory - * @see ProcessorFactoryProvider */ public class ProcessorsModule extends AbstractModule { @@ -42,7 +44,7 @@ public class ProcessorsModule extends AbstractModule { /** * Adds a processor factory under a specific type name. */ - public void addProcessor(String type, ProcessorFactoryProvider processorFactoryProvider) { + public void addProcessor(String type, BiFunction> processorFactoryProvider) { processorsRegistry.addProcessor(type, processorFactoryProvider); } } diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java index a9936567592..465d5c5f047 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java +++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java @@ -19,25 +19,28 @@ package org.elasticsearch.ingest; +import org.elasticsearch.env.Environment; + import java.util.HashMap; import java.util.Map; import java.util.Set; +import java.util.function.BiFunction; public class ProcessorsRegistry { - private final Map processorFactoryProviders = new HashMap<>(); + private final Map>> processorFactoryProviders = new HashMap<>(); /** * Adds a processor factory under a specific name. */ - public void addProcessor(String name, ProcessorFactoryProvider processorFactoryProvider) { - ProcessorFactoryProvider provider = processorFactoryProviders.putIfAbsent(name, processorFactoryProvider); + public void addProcessor(String name, BiFunction> processorFactoryProvider) { + BiFunction> provider = processorFactoryProviders.putIfAbsent(name, processorFactoryProvider); if (provider != null) { throw new IllegalArgumentException("Processor factory already registered for name [" + name + "]"); } } - public Set> entrySet() { + public Set>>> entrySet() { return processorFactoryProviders.entrySet(); } } diff --git a/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java b/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java index 91847a07118..d9a7086412a 100644 --- a/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java @@ -19,10 +19,12 @@ package org.elasticsearch.ingest; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import java.util.Map; import java.util.Set; +import java.util.function.BiFunction; import static org.hamcrest.CoreMatchers.equalTo; @@ -42,9 +44,9 @@ public class ProcessorsRegistryTests extends ESTestCase { assertThat(e.getMessage(), equalTo("Processor factory already registered for name [1]")); } - Set> entrySet = processorsRegistry.entrySet(); + Set>>> entrySet = processorsRegistry.entrySet(); assertThat(entrySet.size(), equalTo(2)); - for (Map.Entry entry : entrySet) { + for (Map.Entry>> entry : entrySet) { if (entry.getKey().equals("1")) { assertThat(entry.getValue().apply(null, null), equalTo(factory1)); } else if (entry.getKey().equals("2")) { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java index b13c95a3681..2561638c107 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java @@ -36,7 +36,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; -import org.elasticsearch.ingest.ProcessorFactoryProvider; import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; @@ -44,7 +43,6 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.io.InputStream; -import java.util.Map; /** * Instantiates and wires all the services that the ingest plugin will be needing. diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java index 1f52bd6b41e..633667aaa22 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java @@ -41,10 +41,9 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.ProcessorFactoryProvider; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.ingest.TemplateService; -import org.elasticsearch.ingest.Processor; import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequest; import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequest; import org.elasticsearch.plugin.ingest.transport.reload.ReloadPipelinesAction; @@ -61,6 +60,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.BiFunction; public class PipelineStore extends AbstractComponent implements Closeable { @@ -89,7 +89,7 @@ public class PipelineStore extends AbstractComponent implements Closeable { public void buildProcessorFactoryRegistry(ProcessorsRegistry processorsRegistry, Environment environment, ScriptService scriptService) { Map processorFactories = new HashMap<>(); TemplateService templateService = new InternalTemplateService(scriptService); - for (Map.Entry entry : processorsRegistry.entrySet()) { + for (Map.Entry>> entry : processorsRegistry.entrySet()) { Processor.Factory processorFactory = entry.getValue().apply(environment, templateService); processorFactories.put(entry.getKey(), processorFactory); } From 9079a7e891ff36c31867df5f37f64a8ea4a245c8 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 6 Jan 2016 19:10:43 +0100 Subject: [PATCH 163/347] wip: move all the ingest infra to core --- .../elasticsearch/action/ActionModule.java | 25 +++- .../action/ingest}/IngestActionFilter.java | 16 +-- .../ingest}/IngestDisabledActionFilter.java | 8 +- .../ingest}/delete/DeletePipelineAction.java | 2 +- .../ingest}/delete/DeletePipelineRequest.java | 2 +- .../delete/DeletePipelineRequestBuilder.java | 2 +- .../delete/DeletePipelineTransportAction.java | 6 +- .../action/ingest}/get/GetPipelineAction.java | 2 +- .../ingest}/get/GetPipelineRequest.java | 2 +- .../get/GetPipelineRequestBuilder.java | 2 +- .../ingest}/get/GetPipelineResponse.java | 4 +- .../get/GetPipelineTransportAction.java | 8 +- .../action/ingest}/put/PutPipelineAction.java | 2 +- .../ingest}/put/PutPipelineRequest.java | 2 +- .../put/PutPipelineRequestBuilder.java | 2 +- .../put/PutPipelineTransportAction.java | 7 +- .../ingest}/reload/ReloadPipelinesAction.java | 4 +- .../simulate/SimulateDocumentResult.java | 2 +- .../SimulateDocumentSimpleResult.java | 4 +- .../SimulateDocumentVerboseResult.java | 2 +- .../simulate/SimulateExecutionService.java | 8 +- .../simulate/SimulatePipelineAction.java | 2 +- .../simulate/SimulatePipelineRequest.java | 12 +- .../SimulatePipelineRequestBuilder.java | 2 +- .../simulate/SimulatePipelineResponse.java | 2 +- .../SimulatePipelineTransportAction.java | 6 +- .../simulate/SimulateProcessorResult.java | 4 +- .../simulate/WriteableIngestDocument.java | 4 +- .../client/transport/TransportClient.java | 2 +- .../common/network/NetworkModule.java | 12 +- .../ingest/IngestBootstrapper.java | 5 +- ...rocessorsModule.java => IngestModule.java} | 11 +- .../ingest/InternalTemplateService.java | 12 +- .../ingest/PipelineDefinition.java | 4 +- .../ingest/PipelineExecutionService.java | 30 +--- .../elasticsearch}/ingest/PipelineStore.java | 15 +- .../ingest/ProcessorsRegistry.java | 2 + .../ingest/{ => core}/CompoundProcessor.java | 2 +- .../ingest/{ => core}/ConfigurationUtils.java | 6 +- .../ingest/{ => core}/IngestDocument.java | 2 +- .../ingest/{ => core}/Pipeline.java | 2 +- .../ingest/{ => core}/Processor.java | 2 +- .../ingest/{ => core}/TemplateService.java | 2 +- .../ingest/{ => core}/ValueSource.java | 2 +- .../java/org/elasticsearch/node/Node.java | 6 +- .../rest/action/ingest}/IngestRestFilter.java | 8 +- .../ingest}/RestDeletePipelineAction.java | 6 +- .../action/ingest}/RestGetPipelineAction.java | 6 +- .../action/ingest}/RestPutPipelineAction.java | 6 +- .../ingest}/RestSimulatePipelineAction.java | 6 +- .../elasticsearch/script/ScriptContext.java | 2 +- .../elasticsearch/threadpool/ThreadPool.java | 3 + .../org/elasticsearch/ingest}/ingest.json | 0 .../ingest}/BulkRequestModifierTests.java | 2 +- .../ingest}/IngestActionFilterTests.java | 57 ++++---- .../reload/ReloadPipelinesActionTests.java | 11 +- .../SimulateDocumentSimpleResultTests.java | 4 +- .../SimulateExecutionServiceTests.java | 43 +++--- .../SimulatePipelineRequestParsingTests.java | 26 ++-- .../SimulatePipelineResponseTests.java | 4 +- .../SimulateProcessorResultTests.java | 4 +- .../WriteableIngestDocumentTests.java | 4 +- .../ingest/IngestBootstrapperTests.java | 31 ++-- .../elasticsearch/ingest/IngestClientIT.java | 93 ++++++------ .../ingest/IngestTemplateTests.java | 18 +-- .../ingest/PipelineExecutionServiceTests.java | 118 ++++++++-------- .../ingest/PipelineFactoryTests.java | 2 + .../ingest/PipelineStoreTests.java | 15 +- .../ingest/ProcessorsRegistryTests.java | 2 + .../{ => core}/CompoundProcessorTests.java | 3 +- .../{ => core}/ConfigurationUtilsTests.java | 2 +- .../{ => core}/IngestDocumentTests.java | 4 +- .../ingest/{ => core}/ValueSourceTests.java | 4 +- .../processor/AbstractStringProcessor.java | 6 +- .../ingest/processor/AppendProcessor.java | 10 +- .../ingest/processor/ConvertProcessor.java | 6 +- .../ingest/processor/DateProcessor.java | 6 +- .../ingest/processor/FailProcessor.java | 8 +- .../ingest/processor/GeoIpProcessor.java | 8 +- .../ingest/processor/GrokProcessor.java | 6 +- .../ingest/processor/GsubProcessor.java | 6 +- .../ingest/processor/JoinProcessor.java | 6 +- .../ingest/processor/RemoveProcessor.java | 8 +- .../ingest/processor/RenameProcessor.java | 6 +- .../ingest/processor/SetProcessor.java | 10 +- .../ingest/processor/SplitProcessor.java | 6 +- .../plugin/ingest/IngestModule.java | 43 ------ .../plugin/ingest/IngestPlugin.java | 133 +++--------------- .../ingest/rest/RestIngestDisabledAction.java | 48 ------- .../AbstractStringProcessorTestCase.java | 4 +- .../processor/AppendProcessorTests.java | 8 +- .../processor/ConvertProcessorTests.java | 4 +- .../ingest/processor/DateProcessorTests.java | 2 +- .../ingest/processor/FailProcessorTests.java | 4 +- .../ingest/processor/GeoIpProcessorTests.java | 3 +- .../ingest/processor/GrokProcessorTests.java | 4 +- .../ingest/processor/GsubProcessorTests.java | 4 +- .../ingest/processor/JoinProcessorTests.java | 4 +- .../processor/RemoveProcessorTests.java | 4 +- .../processor/RenameProcessorTests.java | 4 +- .../ingest/processor/SetProcessorTests.java | 8 +- .../ingest/processor/SplitProcessorTests.java | 4 +- .../plugin/ingest/AbstractMustacheTests.java | 7 +- .../ingest/IngestDocumentMustacheIT.java | 4 +- .../ingest/IngestMustacheSetProcessorIT.java | 6 +- .../plugin/ingest/TemplateServiceIT.java | 2 +- .../plugin/ingest/ValueSourceMustacheIT.java | 4 +- .../ingest/RandomDocumentPicks.java | 1 + .../elasticsearch/ingest/TestProcessor.java | 3 + .../ingest/TestTemplateService.java | 2 + 110 files changed, 490 insertions(+), 657 deletions(-) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/IngestActionFilter.java (94%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/IngestDisabledActionFilter.java (87%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/delete/DeletePipelineAction.java (96%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/delete/DeletePipelineRequest.java (97%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/delete/DeletePipelineRequestBuilder.java (95%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/delete/DeletePipelineTransportAction.java (92%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/get/GetPipelineAction.java (96%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/get/GetPipelineRequest.java (97%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/get/GetPipelineRequestBuilder.java (95%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/get/GetPipelineResponse.java (95%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/get/GetPipelineTransportAction.java (90%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/put/PutPipelineAction.java (96%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/put/PutPipelineRequest.java (97%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/put/PutPipelineRequestBuilder.java (96%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/put/PutPipelineTransportAction.java (92%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/reload/ReloadPipelinesAction.java (97%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/simulate/SimulateDocumentResult.java (94%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/simulate/SimulateDocumentSimpleResult.java (96%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/simulate/SimulateDocumentVerboseResult.java (98%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/simulate/SimulateExecutionService.java (94%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/simulate/SimulatePipelineAction.java (96%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/simulate/SimulatePipelineRequest.java (94%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/simulate/SimulatePipelineRequestBuilder.java (96%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/simulate/SimulatePipelineResponse.java (98%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/simulate/SimulatePipelineTransportAction.java (94%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/simulate/SimulateProcessorResult.java (97%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport => core/src/main/java/org/elasticsearch/action/ingest}/simulate/WriteableIngestDocument.java (97%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin => core/src/main/java/org/elasticsearch}/ingest/IngestBootstrapper.java (98%) rename core/src/main/java/org/elasticsearch/ingest/{ProcessorsModule.java => IngestModule.java} (79%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin => core/src/main/java/org/elasticsearch}/ingest/InternalTemplateService.java (88%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin => core/src/main/java/org/elasticsearch}/ingest/PipelineDefinition.java (97%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin => core/src/main/java/org/elasticsearch}/ingest/PipelineExecutionService.java (78%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin => core/src/main/java/org/elasticsearch}/ingest/PipelineStore.java (96%) rename core/src/main/java/org/elasticsearch/ingest/{ => core}/CompoundProcessor.java (98%) rename core/src/main/java/org/elasticsearch/ingest/{ => core}/ConfigurationUtils.java (96%) rename core/src/main/java/org/elasticsearch/ingest/{ => core}/IngestDocument.java (99%) rename core/src/main/java/org/elasticsearch/ingest/{ => core}/Pipeline.java (99%) rename core/src/main/java/org/elasticsearch/ingest/{ => core}/Processor.java (97%) rename core/src/main/java/org/elasticsearch/ingest/{ => core}/TemplateService.java (96%) rename core/src/main/java/org/elasticsearch/ingest/{ => core}/ValueSource.java (99%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest => core/src/main/java/org/elasticsearch/rest/action/ingest}/IngestRestFilter.java (82%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest => core/src/main/java/org/elasticsearch/rest/action/ingest}/RestDeletePipelineAction.java (90%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest => core/src/main/java/org/elasticsearch/rest/action/ingest}/RestGetPipelineAction.java (90%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest => core/src/main/java/org/elasticsearch/rest/action/ingest}/RestPutPipelineAction.java (91%) rename {plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest => core/src/main/java/org/elasticsearch/rest/action/ingest}/RestSimulatePipelineAction.java (92%) rename {plugins/ingest/src/main/resources => core/src/main/resources/org/elasticsearch/ingest}/ingest.json (100%) rename {plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport => core/src/test/java/org/elasticsearch/action/ingest}/BulkRequestModifierTests.java (98%) rename {plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport => core/src/test/java/org/elasticsearch/action/ingest}/IngestActionFilterTests.java (86%) rename {plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport => core/src/test/java/org/elasticsearch/action/ingest}/reload/ReloadPipelinesActionTests.java (94%) rename {plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport => core/src/test/java/org/elasticsearch/action/ingest}/simulate/SimulateDocumentSimpleResultTests.java (95%) rename {plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport => core/src/test/java/org/elasticsearch/action/ingest}/simulate/SimulateExecutionServiceTests.java (81%) rename {plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport => core/src/test/java/org/elasticsearch/action/ingest}/simulate/SimulatePipelineRequestParsingTests.java (91%) rename {plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport => core/src/test/java/org/elasticsearch/action/ingest}/simulate/SimulatePipelineResponseTests.java (98%) rename {plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport => core/src/test/java/org/elasticsearch/action/ingest}/simulate/SimulateProcessorResultTests.java (96%) rename {plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport => core/src/test/java/org/elasticsearch/action/ingest}/simulate/WriteableIngestDocumentTests.java (98%) rename {plugins/ingest/src/test/java/org/elasticsearch/plugin => core/src/test/java/org/elasticsearch}/ingest/IngestBootstrapperTests.java (92%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/IngestClientIT.java (77%) rename {plugins/ingest/src/test/java/org/elasticsearch/plugin => core/src/test/java/org/elasticsearch}/ingest/IngestTemplateTests.java (91%) rename {plugins/ingest/src/test/java/org/elasticsearch/plugin => core/src/test/java/org/elasticsearch}/ingest/PipelineExecutionServiceTests.java (79%) rename {plugins/ingest/src/test/java/org/elasticsearch/plugin => core/src/test/java/org/elasticsearch}/ingest/PipelineStoreTests.java (93%) rename core/src/test/java/org/elasticsearch/ingest/{ => core}/CompoundProcessorTests.java (98%) rename core/src/test/java/org/elasticsearch/ingest/{ => core}/ConfigurationUtilsTests.java (98%) rename core/src/test/java/org/elasticsearch/ingest/{ => core}/IngestDocumentTests.java (99%) rename core/src/test/java/org/elasticsearch/ingest/{ => core}/ValueSourceTests.java (95%) delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java delete mode 100644 plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestIngestDisabledAction.java diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 11cafb326a0..0e5e15071ea 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -155,6 +155,12 @@ import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptAction; import org.elasticsearch.action.indexedscripts.get.TransportGetIndexedScriptAction; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptAction; import org.elasticsearch.action.indexedscripts.put.TransportPutIndexedScriptAction; +import org.elasticsearch.action.ingest.delete.DeletePipelineAction; +import org.elasticsearch.action.ingest.delete.DeletePipelineTransportAction; +import org.elasticsearch.action.ingest.get.GetPipelineAction; +import org.elasticsearch.action.ingest.get.GetPipelineTransportAction; +import org.elasticsearch.action.ingest.put.PutPipelineAction; +import org.elasticsearch.action.ingest.put.PutPipelineTransportAction; import org.elasticsearch.action.percolate.MultiPercolateAction; import org.elasticsearch.action.percolate.PercolateAction; import org.elasticsearch.action.percolate.TransportMultiPercolateAction; @@ -192,6 +198,11 @@ import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.common.inject.multibindings.Multibinder; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.action.ingest.IngestActionFilter; +import org.elasticsearch.action.ingest.IngestDisabledActionFilter; +import org.elasticsearch.action.ingest.simulate.SimulatePipelineAction; +import org.elasticsearch.action.ingest.simulate.SimulatePipelineTransportAction; import java.util.ArrayList; import java.util.HashMap; @@ -220,9 +231,11 @@ public class ActionModule extends AbstractModule { } + private final boolean ingestEnabled; private final boolean proxy; - public ActionModule(boolean proxy) { + public ActionModule(Settings settings, boolean proxy) { + this.ingestEnabled = settings.getAsBoolean("node.ingest", false); this.proxy = proxy; } @@ -349,6 +362,11 @@ public class ActionModule extends AbstractModule { registerAction(FieldStatsAction.INSTANCE, TransportFieldStatsTransportAction.class); + registerAction(PutPipelineAction.INSTANCE, PutPipelineTransportAction.class); + registerAction(GetPipelineAction.INSTANCE, GetPipelineTransportAction.class); + registerAction(DeletePipelineAction.INSTANCE, DeletePipelineTransportAction.class); + registerAction(SimulatePipelineAction.INSTANCE, SimulatePipelineTransportAction.class); + // register Name -> GenericAction Map that can be injected to instances. MapBinder actionsBinder = MapBinder.newMapBinder(binder(), String.class, GenericAction.class); @@ -359,6 +377,11 @@ public class ActionModule extends AbstractModule { // register GenericAction -> transportAction Map that can be injected to instances. // also register any supporting classes if (!proxy) { + if (ingestEnabled) { + registerFilter(IngestActionFilter.class); + } else { + registerFilter(IngestDisabledActionFilter.class); + } bind(TransportLivenessAction.class).asEagerSingleton(); MapBinder transportActionsBinder = MapBinder.newMapBinder(binder(), GenericAction.class, TransportAction.class); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java similarity index 94% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java rename to core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java index bce3b5cdaa2..41a8af45120 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; @@ -31,9 +31,9 @@ import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugin.ingest.IngestBootstrapper; -import org.elasticsearch.plugin.ingest.IngestPlugin; -import org.elasticsearch.plugin.ingest.PipelineExecutionService; +import org.elasticsearch.ingest.IngestBootstrapper; +import org.elasticsearch.ingest.PipelineExecutionService; +import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.tasks.Task; import java.util.ArrayList; @@ -54,9 +54,9 @@ public final class IngestActionFilter extends AbstractComponent implements Actio @Override public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { - String pipelineId = request.getFromContext(IngestPlugin.PIPELINE_ID_PARAM_CONTEXT_KEY); + String pipelineId = request.getFromContext(ConfigurationUtils.PIPELINE_ID_PARAM_CONTEXT_KEY); if (pipelineId == null) { - pipelineId = request.getHeader(IngestPlugin.PIPELINE_ID_PARAM); + pipelineId = request.getHeader(ConfigurationUtils.PIPELINE_ID_PARAM); if (pipelineId == null) { chain.proceed(task, action, request, listener); return; @@ -84,7 +84,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio // The IndexRequest has the same type on the node that receives the request and the node that // processes the primary action. This could lead to a pipeline being executed twice for the same // index request, hence this check - if (indexRequest.hasHeader(IngestPlugin.PIPELINE_ALREADY_PROCESSED)) { + if (indexRequest.hasHeader(ConfigurationUtils.PIPELINE_ALREADY_PROCESSED)) { chain.proceed(task, action, indexRequest, listener); return; } @@ -92,7 +92,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio logger.error("failed to execute pipeline [{}]", t, pipelineId); listener.onFailure(t); }, success -> { - indexRequest.putHeader(IngestPlugin.PIPELINE_ALREADY_PROCESSED, true); + indexRequest.putHeader(ConfigurationUtils.PIPELINE_ALREADY_PROCESSED, true); chain.proceed(task, action, indexRequest, listener); }); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestDisabledActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java similarity index 87% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestDisabledActionFilter.java rename to core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java index 63ff584988d..bd62969ec16 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/IngestDisabledActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java @@ -16,25 +16,25 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugin.ingest.transport; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilterChain; -import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.tasks.Task; +import org.elasticsearch.ingest.core.ConfigurationUtils; public final class IngestDisabledActionFilter implements ActionFilter { @Override public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { - String pipelineId = request.getFromContext(IngestPlugin.PIPELINE_ID_PARAM_CONTEXT_KEY); + String pipelineId = request.getFromContext(ConfigurationUtils.PIPELINE_ID_PARAM_CONTEXT_KEY); if (pipelineId != null) { failRequest(pipelineId); } - pipelineId = request.getHeader(IngestPlugin.PIPELINE_ID_PARAM); + pipelineId = request.getHeader(ConfigurationUtils.PIPELINE_ID_PARAM); if (pipelineId != null) { failRequest(pipelineId); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineAction.java b/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineAction.java similarity index 96% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineAction.java index c1fba7fc89f..a27340427e5 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.delete; +package org.elasticsearch.action.ingest.delete; import org.elasticsearch.action.Action; import org.elasticsearch.action.delete.DeleteResponse; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineRequest.java similarity index 97% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineRequest.java rename to core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineRequest.java index 1b31d5f44b2..9d1877aa404 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineRequest.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.delete; +package org.elasticsearch.action.ingest.delete; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineRequestBuilder.java similarity index 95% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineRequestBuilder.java rename to core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineRequestBuilder.java index ee8089ab54e..8f878dd4957 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineRequestBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.delete; +package org.elasticsearch.action.ingest.delete; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.delete.DeleteResponse; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineTransportAction.java similarity index 92% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineTransportAction.java index 8a472d9a527..1aba6e6decd 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/delete/DeletePipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineTransportAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.delete; +package org.elasticsearch.action.ingest.delete; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.delete.DeleteResponse; @@ -26,8 +26,8 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugin.ingest.IngestBootstrapper; -import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.ingest.IngestBootstrapper; +import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineAction.java b/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineAction.java similarity index 96% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineAction.java index 0904a8a3f9f..8025eaebfcb 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.get; +package org.elasticsearch.action.ingest.get; import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineRequest.java similarity index 97% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineRequest.java rename to core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineRequest.java index 0ff673a7bdb..69c15112536 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineRequest.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.get; +package org.elasticsearch.action.ingest.get; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineRequestBuilder.java similarity index 95% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineRequestBuilder.java rename to core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineRequestBuilder.java index 4269b6ceccd..07099562695 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineRequestBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.get; +package org.elasticsearch.action.ingest.get; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java b/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineResponse.java similarity index 95% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java rename to core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineResponse.java index 3508f6c0c55..ff2eafc8172 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineResponse.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineResponse.java @@ -17,14 +17,14 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.get; +package org.elasticsearch.action.ingest.get; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.StatusToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.plugin.ingest.PipelineDefinition; +import org.elasticsearch.ingest.PipelineDefinition; import org.elasticsearch.rest.RestStatus; import java.io.IOException; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineTransportAction.java similarity index 90% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineTransportAction.java index e3b00697b16..01b401d1fee 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/get/GetPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineTransportAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.get; +package org.elasticsearch.action.ingest.get; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -25,9 +25,9 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugin.ingest.IngestBootstrapper; -import org.elasticsearch.plugin.ingest.PipelineDefinition; -import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.ingest.IngestBootstrapper; +import org.elasticsearch.ingest.PipelineDefinition; +import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineAction.java b/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineAction.java similarity index 96% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineAction.java index 1356503b673..ed36d067187 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.put; +package org.elasticsearch.action.ingest.put; import org.elasticsearch.action.Action; import org.elasticsearch.action.index.IndexResponse; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineRequest.java similarity index 97% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineRequest.java rename to core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineRequest.java index b9ef9c17e45..9084632dd0a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineRequest.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.put; +package org.elasticsearch.action.ingest.put; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineRequestBuilder.java similarity index 96% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineRequestBuilder.java rename to core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineRequestBuilder.java index cb6a74de6b1..848e20c38a9 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineRequestBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.put; +package org.elasticsearch.action.ingest.put; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.index.IndexResponse; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineTransportAction.java similarity index 92% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineTransportAction.java index 9de4107c7db..325f4b235d1 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/put/PutPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineTransportAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.put; +package org.elasticsearch.action.ingest.put; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexResponse; @@ -26,11 +26,12 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugin.ingest.IngestBootstrapper; -import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.ingest.IngestBootstrapper; +import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; + public class PutPipelineTransportAction extends HandledTransportAction { private final PipelineStore pipelineStore; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesAction.java b/core/src/main/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesAction.java similarity index 97% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesAction.java index 8a9a15c221c..c3770a94e5d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesAction.java @@ -17,13 +17,13 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.reload; +package org.elasticsearch.action.ingest.reload; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentResult.java b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentResult.java similarity index 94% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentResult.java rename to core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentResult.java index ff9ad829aad..f817fe392e8 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentResult.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentResult.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentSimpleResult.java similarity index 96% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java rename to core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentSimpleResult.java index eb6170e1fd1..115a68eeac9 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResult.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentSimpleResult.java @@ -16,13 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import java.io.IOException; import java.util.Collections; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentVerboseResult.java b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentVerboseResult.java similarity index 98% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentVerboseResult.java rename to core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentVerboseResult.java index eac308d9f35..08984ed0f03 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentVerboseResult.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentVerboseResult.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateExecutionService.java similarity index 94% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java rename to core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateExecutionService.java index 1d2fc5d7793..8d3e96694d1 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionService.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateExecutionService.java @@ -17,12 +17,12 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Pipeline; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineAction.java b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineAction.java similarity index 96% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineAction.java index 7c671a442f6..7f92f0ea35a 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequest.java similarity index 94% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java rename to core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequest.java index a4601cb746b..e0ba5574e4f 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequest.java @@ -17,17 +17,17 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Pipeline; +import org.elasticsearch.ingest.PipelineStore; import java.io.IOException; import java.util.ArrayList; @@ -36,7 +36,7 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.action.ValidateActions.addValidationError; -import static org.elasticsearch.ingest.IngestDocument.MetaData; +import static org.elasticsearch.ingest.core.IngestDocument.MetaData; public class SimulatePipelineRequest extends ActionRequest { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequestBuilder.java similarity index 96% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java rename to core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequestBuilder.java index 07998291922..e77ed3fecc1 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequestBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineResponse.java similarity index 98% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java rename to core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineResponse.java index 097595f3a32..9a5706dd17d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponse.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineResponse.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineTransportAction.java similarity index 94% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineTransportAction.java index dcff1e0e7f7..e12ac6f6a1d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineTransportAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -26,8 +26,8 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.plugin.ingest.IngestBootstrapper; -import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.ingest.IngestBootstrapper; +import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateProcessorResult.java similarity index 97% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java rename to core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateProcessorResult.java index 78eafd50655..d7612b0c3af 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResult.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateProcessorResult.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; @@ -25,7 +25,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import java.io.IOException; import java.util.Collections; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java b/core/src/main/java/org/elasticsearch/action/ingest/simulate/WriteableIngestDocument.java similarity index 97% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java rename to core/src/main/java/org/elasticsearch/action/ingest/simulate/WriteableIngestDocument.java index 2b9ac56b341..532d2ff3e60 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocument.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/simulate/WriteableIngestDocument.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -25,7 +25,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import java.io.IOException; import java.util.Collections; diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 3b8be668f43..daed566b787 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -147,7 +147,7 @@ public class TransportClient extends AbstractClient { // noop } }); - modules.add(new ActionModule(true)); + modules.add(new ActionModule(this.settings, true)); modules.add(new CircuitBreakerModule(this.settings)); pluginsService.processModules(modules); diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index 12e22a7693b..13ea25b51f1 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -116,6 +116,10 @@ import org.elasticsearch.rest.action.get.RestGetSourceAction; import org.elasticsearch.rest.action.get.RestHeadAction; import org.elasticsearch.rest.action.get.RestMultiGetAction; import org.elasticsearch.rest.action.index.RestIndexAction; +import org.elasticsearch.rest.action.ingest.RestDeletePipelineAction; +import org.elasticsearch.rest.action.ingest.RestGetPipelineAction; +import org.elasticsearch.rest.action.ingest.RestPutPipelineAction; +import org.elasticsearch.rest.action.ingest.RestSimulatePipelineAction; import org.elasticsearch.rest.action.main.RestMainAction; import org.elasticsearch.rest.action.percolate.RestMultiPercolateAction; import org.elasticsearch.rest.action.percolate.RestPercolateAction; @@ -263,7 +267,13 @@ public class NetworkModule extends AbstractModule { RestCatAction.class, // Tasks API - RestListTasksAction.class + RestListTasksAction.class, + + // Ingest API + RestPutPipelineAction.class, + RestGetPipelineAction.class, + RestDeletePipelineAction.class, + RestSimulatePipelineAction.class ); private static final List> builtinCatHandlers = Arrays.asList( diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java similarity index 98% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java rename to core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java index 2561638c107..ec1e7a243d4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestBootstrapper.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; @@ -36,7 +36,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; -import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -164,7 +163,7 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl void installIngestIndexTemplate() throws IOException { logger.debug("installing .ingest index template..."); - try (InputStream is = IngestBootstrapper.class.getResourceAsStream("/ingest.json")) { + try (InputStream is = IngestBootstrapper.class.getResourceAsStream("ingest.json")) { final byte[] template; try (BytesStreamOutput out = new BytesStreamOutput()) { Streams.copy(is, out); diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java b/core/src/main/java/org/elasticsearch/ingest/IngestModule.java similarity index 79% rename from core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java rename to core/src/main/java/org/elasticsearch/ingest/IngestModule.java index c56f7c571c4..ad1f0afb123 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ProcessorsModule.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestModule.java @@ -21,24 +21,29 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.env.Environment; +import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.ingest.core.TemplateService; +import org.elasticsearch.rest.action.ingest.IngestRestFilter; import java.util.function.BiFunction; /** * Registry for processor factories - * @see org.elasticsearch.ingest.Processor.Factory + * @see Processor.Factory */ -public class ProcessorsModule extends AbstractModule { +public class IngestModule extends AbstractModule { private final ProcessorsRegistry processorsRegistry; - public ProcessorsModule() { + public IngestModule() { this.processorsRegistry = new ProcessorsRegistry(); } @Override protected void configure() { + binder().bind(IngestRestFilter.class).asEagerSingleton(); bind(ProcessorsRegistry.class).toInstance(processorsRegistry); + binder().bind(IngestBootstrapper.class).asEagerSingleton(); } /** diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java b/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java similarity index 88% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java rename to core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java index 58bcfc0a269..531ffccf888 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/InternalTemplateService.java +++ b/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java @@ -17,10 +17,10 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; @@ -30,13 +30,11 @@ import org.elasticsearch.script.ScriptService; import java.util.Collections; import java.util.Map; -class InternalTemplateService implements TemplateService { - - public static final ScriptContext.Plugin INGEST_SCRIPT_CONTEXT = new ScriptContext.Plugin("elasticsearch-ingest", "ingest"); +public class InternalTemplateService implements TemplateService { private final ScriptService scriptService; - InternalTemplateService(ScriptService scriptService) { + public InternalTemplateService(ScriptService scriptService) { this.scriptService = scriptService; } @@ -48,7 +46,7 @@ class InternalTemplateService implements TemplateService { Script script = new Script(template, ScriptService.ScriptType.INLINE, "mustache", Collections.emptyMap()); CompiledScript compiledScript = scriptService.compile( script, - INGEST_SCRIPT_CONTEXT, + ScriptContext.Standard.INGEST, null /* we can supply null here, because ingest doesn't use indexed scripts */, Collections.emptyMap() ); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineDefinition.java b/core/src/main/java/org/elasticsearch/ingest/PipelineDefinition.java similarity index 97% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineDefinition.java rename to core/src/main/java/org/elasticsearch/ingest/PipelineDefinition.java index f8e94463327..94c584ad121 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineDefinition.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineDefinition.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -26,7 +26,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.core.Pipeline; import java.io.IOException; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java similarity index 78% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java rename to core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index c55faf3b09e..d0d0896e7e4 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineExecutionService.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -17,14 +17,12 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.threadpool.ThreadPool; import java.util.Map; @@ -32,8 +30,6 @@ import java.util.function.Consumer; public class PipelineExecutionService { - static final String THREAD_POOL_NAME = IngestPlugin.NAME; - private final PipelineStore store; private final ThreadPool threadPool; @@ -44,7 +40,7 @@ public class PipelineExecutionService { public void execute(IndexRequest request, String pipelineId, Consumer failureHandler, Consumer completionHandler) { Pipeline pipeline = getPipeline(pipelineId); - threadPool.executor(THREAD_POOL_NAME).execute(() -> { + threadPool.executor(ThreadPool.Names.INGEST).execute(() -> { try { innerExecute(request, pipeline); completionHandler.accept(true); @@ -57,7 +53,7 @@ public class PipelineExecutionService { public void execute(Iterable actionRequests, String pipelineId, Consumer itemFailureHandler, Consumer completionHandler) { Pipeline pipeline = getPipeline(pipelineId); - threadPool.executor(THREAD_POOL_NAME).execute(() -> { + threadPool.executor(ThreadPool.Names.INGEST).execute(() -> { for (ActionRequest actionRequest : actionRequests) { if ((actionRequest instanceof IndexRequest) == false) { continue; @@ -108,20 +104,4 @@ public class PipelineExecutionService { } return pipeline; } - - public static Settings additionalSettings(Settings nodeSettings) { - Settings settings = nodeSettings.getAsSettings("threadpool." + THREAD_POOL_NAME); - if (!settings.names().isEmpty()) { - // the TP is already configured in the node settings - // no need for additional settings - return Settings.EMPTY; - } - int availableProcessors = EsExecutors.boundedNumberOfProcessors(nodeSettings); - return Settings.builder() - .put("threadpool." + THREAD_POOL_NAME + ".type", "fixed") - .put("threadpool." + THREAD_POOL_NAME + ".size", availableProcessors) - .put("threadpool." + THREAD_POOL_NAME + ".queue_size", 200) - .build(); - } - } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java similarity index 96% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java rename to core/src/main/java/org/elasticsearch/ingest/PipelineStore.java index 633667aaa22..217ef42e4a2 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/PipelineStore.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.ActionListener; @@ -40,13 +40,12 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.Processor; -import org.elasticsearch.ingest.ProcessorsRegistry; -import org.elasticsearch.ingest.TemplateService; -import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequest; -import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequest; -import org.elasticsearch.plugin.ingest.transport.reload.ReloadPipelinesAction; +import org.elasticsearch.action.ingest.delete.DeletePipelineRequest; +import org.elasticsearch.action.ingest.put.PutPipelineRequest; +import org.elasticsearch.action.ingest.reload.ReloadPipelinesAction; +import org.elasticsearch.ingest.core.Pipeline; +import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java index 465d5c5f047..2aa9a1238b1 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java +++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java @@ -20,6 +20,8 @@ package org.elasticsearch.ingest; import org.elasticsearch.env.Environment; +import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.ingest.core.TemplateService; import java.util.HashMap; import java.util.Map; diff --git a/core/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java similarity index 98% rename from core/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java index 7c71324300e..28cfc957b1f 100644 --- a/core/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java @@ -18,7 +18,7 @@ */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.core; import java.util.Arrays; import java.util.Collections; diff --git a/core/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java b/core/src/main/java/org/elasticsearch/ingest/core/ConfigurationUtils.java similarity index 96% rename from core/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java rename to core/src/main/java/org/elasticsearch/ingest/core/ConfigurationUtils.java index c51714615a8..5845a1299d3 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/ConfigurationUtils.java @@ -17,13 +17,17 @@ * under the License. */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.core; import java.util.List; import java.util.Map; public final class ConfigurationUtils { + public static final String PIPELINE_ID_PARAM_CONTEXT_KEY = "__pipeline_id__"; + public static final String PIPELINE_ID_PARAM = "pipeline"; + public static final String PIPELINE_ALREADY_PROCESSED = "ingest_already_processed"; + private ConfigurationUtils() { } diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java similarity index 99% rename from core/src/main/java/org/elasticsearch/ingest/IngestDocument.java rename to core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java index 565ee9242ff..b5c40e172af 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.core; import org.elasticsearch.common.Strings; import org.elasticsearch.index.mapper.internal.IdFieldMapper; diff --git a/core/src/main/java/org/elasticsearch/ingest/Pipeline.java b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java similarity index 99% rename from core/src/main/java/org/elasticsearch/ingest/Pipeline.java rename to core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java index 56b49a5b063..14c33e7b148 100644 --- a/core/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java @@ -18,7 +18,7 @@ */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.core; import java.util.ArrayList; import java.util.Arrays; diff --git a/core/src/main/java/org/elasticsearch/ingest/Processor.java b/core/src/main/java/org/elasticsearch/ingest/core/Processor.java similarity index 97% rename from core/src/main/java/org/elasticsearch/ingest/Processor.java rename to core/src/main/java/org/elasticsearch/ingest/core/Processor.java index 5b97a485d9c..9c29894fa5f 100644 --- a/core/src/main/java/org/elasticsearch/ingest/Processor.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/Processor.java @@ -18,7 +18,7 @@ */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.core; import java.util.Map; diff --git a/core/src/main/java/org/elasticsearch/ingest/TemplateService.java b/core/src/main/java/org/elasticsearch/ingest/core/TemplateService.java similarity index 96% rename from core/src/main/java/org/elasticsearch/ingest/TemplateService.java rename to core/src/main/java/org/elasticsearch/ingest/core/TemplateService.java index fccdb556a53..df77453881c 100644 --- a/core/src/main/java/org/elasticsearch/ingest/TemplateService.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/TemplateService.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.core; import java.util.Map; diff --git a/core/src/main/java/org/elasticsearch/ingest/ValueSource.java b/core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java similarity index 99% rename from core/src/main/java/org/elasticsearch/ingest/ValueSource.java rename to core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java index 45f03d01130..987002f0354 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ValueSource.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.core; import java.util.ArrayList; import java.util.HashMap; diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index e1de2802e83..11726a59d3b 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -72,7 +72,7 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.memory.IndexingMemoryController; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.ttl.IndicesTTLService; -import org.elasticsearch.ingest.ProcessorsModule; +import org.elasticsearch.ingest.IngestModule; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.node.internal.InternalSettingsPreparer; @@ -189,7 +189,7 @@ public class Node implements Releasable { modules.add(new ClusterModule(this.settings)); modules.add(new IndicesModule()); modules.add(new SearchModule()); - modules.add(new ActionModule(false)); + modules.add(new ActionModule(this.settings, false)); modules.add(new GatewayModule(settings)); modules.add(new NodeClientModule()); modules.add(new PercolatorModule()); @@ -197,7 +197,7 @@ public class Node implements Releasable { modules.add(new RepositoriesModule()); modules.add(new TribeModule()); modules.add(new AnalysisModule(environment)); - modules.add(new ProcessorsModule()); + modules.add(new IngestModule()); pluginsService.processModules(modules); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/IngestRestFilter.java similarity index 82% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java rename to core/src/main/java/org/elasticsearch/rest/action/ingest/IngestRestFilter.java index 751ff0fcc68..1c44ec323dc 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/IngestRestFilter.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/IngestRestFilter.java @@ -17,10 +17,10 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.rest; +package org.elasticsearch.rest.action.ingest; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.plugin.ingest.IngestPlugin; +import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestFilter; @@ -36,8 +36,8 @@ public class IngestRestFilter extends RestFilter { @Override public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { - if (request.hasParam(IngestPlugin.PIPELINE_ID_PARAM)) { - request.putInContext(IngestPlugin.PIPELINE_ID_PARAM_CONTEXT_KEY, request.param(IngestPlugin.PIPELINE_ID_PARAM)); + if (request.hasParam(ConfigurationUtils.PIPELINE_ID_PARAM)) { + request.putInContext(ConfigurationUtils.PIPELINE_ID_PARAM_CONTEXT_KEY, request.param(ConfigurationUtils.PIPELINE_ID_PARAM)); } filterChain.continueProcessing(request, channel); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestDeletePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java similarity index 90% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestDeletePipelineAction.java rename to core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java index bf8645377f9..d5c258ff47d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestDeletePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java @@ -17,13 +17,13 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.rest; +package org.elasticsearch.rest.action.ingest; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; -import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequest; +import org.elasticsearch.action.ingest.delete.DeletePipelineAction; +import org.elasticsearch.action.ingest.delete.DeletePipelineRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestGetPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java similarity index 90% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestGetPipelineAction.java rename to core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java index 6d444739900..fed1f8fc448 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestGetPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java @@ -17,14 +17,14 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.rest; +package org.elasticsearch.rest.action.ingest; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugin.ingest.transport.get.GetPipelineAction; -import org.elasticsearch.plugin.ingest.transport.get.GetPipelineRequest; +import org.elasticsearch.action.ingest.get.GetPipelineAction; +import org.elasticsearch.action.ingest.get.GetPipelineRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestPutPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java similarity index 91% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestPutPipelineAction.java rename to core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java index 2fc5508e15e..2a36773bc79 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestPutPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java @@ -17,13 +17,13 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.rest; +package org.elasticsearch.rest.action.ingest; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; -import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequest; +import org.elasticsearch.action.ingest.put.PutPipelineAction; +import org.elasticsearch.action.ingest.put.PutPipelineRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java similarity index 92% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java rename to core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java index 0b86e35b522..80f0d013b72 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestSimulatePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java @@ -17,13 +17,13 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.rest; +package org.elasticsearch.rest.action.ingest; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineAction; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.simulate.SimulatePipelineAction; +import org.elasticsearch.action.ingest.simulate.SimulatePipelineRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; diff --git a/core/src/main/java/org/elasticsearch/script/ScriptContext.java b/core/src/main/java/org/elasticsearch/script/ScriptContext.java index 4b1b6de63f2..3ab2bb52c9b 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptContext.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptContext.java @@ -37,7 +37,7 @@ public interface ScriptContext { */ enum Standard implements ScriptContext { - AGGS("aggs"), SEARCH("search"), UPDATE("update"); + AGGS("aggs"), SEARCH("search"), UPDATE("update"), INGEST("ingest"); private final String key; diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 5d0c814a285..1176f3fe49f 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -87,6 +87,7 @@ public class ThreadPool extends AbstractComponent { public static final String FORCE_MERGE = "force_merge"; public static final String FETCH_SHARD_STARTED = "fetch_shard_started"; public static final String FETCH_SHARD_STORE = "fetch_shard_store"; + public static final String INGEST = "ingest"; } public enum ThreadPoolType { @@ -145,6 +146,7 @@ public class ThreadPool extends AbstractComponent { map.put(Names.FORCE_MERGE, ThreadPoolType.FIXED); map.put(Names.FETCH_SHARD_STARTED, ThreadPoolType.SCALING); map.put(Names.FETCH_SHARD_STORE, ThreadPoolType.SCALING); + map.put(Names.INGEST, ThreadPoolType.FIXED); THREAD_POOL_TYPES = Collections.unmodifiableMap(map); } @@ -234,6 +236,7 @@ public class ThreadPool extends AbstractComponent { add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.FORCE_MERGE).size(1)); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.FETCH_SHARD_STARTED).size(availableProcessors * 2).keepAlive("5m")); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.FETCH_SHARD_STORE).size(availableProcessors * 2).keepAlive("5m")); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.INGEST).size(availableProcessors).queueSize(200)); this.defaultExecutorTypeSettings = unmodifiableMap(defaultExecutorTypeSettings); diff --git a/plugins/ingest/src/main/resources/ingest.json b/core/src/main/resources/org/elasticsearch/ingest/ingest.json similarity index 100% rename from plugins/ingest/src/main/resources/ingest.json rename to core/src/main/resources/org/elasticsearch/ingest/ingest.json diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/BulkRequestModifierTests.java b/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java similarity index 98% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/BulkRequestModifierTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java index 6c4871a140a..a799b66678e 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/BulkRequestModifierTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java @@ -1,4 +1,4 @@ -package org.elasticsearch.plugin.ingest.transport; +package org.elasticsearch.action.ingest; /* * Licensed to Elasticsearch under one or more contributor diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java similarity index 86% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java index c841f8cce69..3e91e8b2d1a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/IngestActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; @@ -29,32 +29,32 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.CompoundProcessor; -import org.elasticsearch.ingest.Processor; -import org.elasticsearch.plugin.ingest.IngestBootstrapper; -import org.elasticsearch.plugin.ingest.IngestPlugin; -import org.elasticsearch.plugin.ingest.PipelineExecutionService; -import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.ingest.IngestBootstrapper; +import org.elasticsearch.ingest.PipelineExecutionService; +import org.elasticsearch.ingest.PipelineStore; +import org.elasticsearch.ingest.core.CompoundProcessor; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Pipeline; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; -import org.mockito.invocation.InvocationOnMock; +import org.mockito.Matchers; import org.mockito.stubbing.Answer; import java.util.HashSet; import java.util.Set; import java.util.function.Consumer; -import static org.elasticsearch.plugin.ingest.transport.IngestActionFilter.BulkRequestModifier; +import static org.elasticsearch.action.ingest.IngestActionFilter.BulkRequestModifier; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.any; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; @@ -89,13 +89,13 @@ public class IngestActionFilterTests extends ESTestCase { Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); + indexRequest.putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); + verify(executionService).execute(Matchers.any(IndexRequest.class), Matchers.eq("_id"), Matchers.any(Consumer.class), Matchers.any(Consumer.class)); verifyZeroInteractions(actionFilterChain); } @@ -103,13 +103,13 @@ public class IngestActionFilterTests extends ESTestCase { Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putInContext(IngestPlugin.PIPELINE_ID_PARAM_CONTEXT_KEY, "_id"); + indexRequest.putInContext(ConfigurationUtils.PIPELINE_ID_PARAM_CONTEXT_KEY, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); + verify(executionService).execute(Matchers.any(IndexRequest.class), Matchers.eq("_id"), Matchers.any(Consumer.class), Matchers.any(Consumer.class)); verifyZeroInteractions(actionFilterChain); } @@ -117,8 +117,8 @@ public class IngestActionFilterTests extends ESTestCase { Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); - indexRequest.putHeader(IngestPlugin.PIPELINE_ALREADY_PROCESSED, true); + indexRequest.putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id"); + indexRequest.putHeader(ConfigurationUtils.PIPELINE_ALREADY_PROCESSED, true); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -132,7 +132,7 @@ public class IngestActionFilterTests extends ESTestCase { Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); + indexRequest.putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -154,23 +154,20 @@ public class IngestActionFilterTests extends ESTestCase { Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); + indexRequest.putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); RuntimeException exception = new RuntimeException(); - Answer answer = new Answer() { - @Override - public Object answer(InvocationOnMock invocationOnMock) throws Throwable { - Consumer handler = (Consumer) invocationOnMock.getArguments()[2]; - handler.accept(exception); - return null; - } + Answer answer = invocationOnMock -> { + Consumer handler = (Consumer) invocationOnMock.getArguments()[2]; + handler.accept(exception); + return null; }; doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); + verify(executionService).execute(Matchers.any(IndexRequest.class), Matchers.eq("_id"), Matchers.any(Consumer.class), Matchers.any(Consumer.class)); verify(actionListener).onFailure(exception); verifyZeroInteractions(actionFilterChain); } @@ -199,7 +196,7 @@ public class IngestActionFilterTests extends ESTestCase { filter = new IngestActionFilter(Settings.EMPTY, bootstrapper); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); + bulkRequest.putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id"); int numRequest = scaledRandomIntBetween(8, 64); for (int i = 0; i < numRequest; i++) { if (rarely()) { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java b/core/src/test/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesActionTests.java similarity index 94% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesActionTests.java index 87a2554ede1..abd66fc08d7 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/reload/ReloadPipelinesActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesActionTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.reload; +package org.elasticsearch.action.ingest.reload; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; @@ -27,20 +27,19 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; -import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import org.junit.Before; +import org.mockito.Matchers; import java.util.Collections; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -83,7 +82,7 @@ public class ReloadPipelinesActionTests extends ESTestCase { handler.handleResponse(new ReloadPipelinesAction.ReloadPipelinesResponse()); } return mock; - }).when(transportService).sendRequest(any(), eq(ReloadPipelinesAction.ACTION_NAME), any(), any()); + }).when(transportService).sendRequest(Matchers.any(), Matchers.eq(ReloadPipelinesAction.ACTION_NAME), Matchers.any(), Matchers.any()); reloadPipelinesAction.reloadPipelinesOnAllNodes(result -> assertThat(result, is(true))); } @@ -115,7 +114,7 @@ public class ReloadPipelinesActionTests extends ESTestCase { } } return mock; - }).when(transportService).sendRequest(any(), eq(ReloadPipelinesAction.ACTION_NAME), any(), any()); + }).when(transportService).sendRequest(Matchers.any(), Matchers.eq(ReloadPipelinesAction.ACTION_NAME), Matchers.any(), Matchers.any()); reloadPipelinesAction.reloadPipelinesOnAllNodes(result -> assertThat(result, is(false))); } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentSimpleResultTests.java similarity index 95% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentSimpleResultTests.java index 38c1e88bdb3..685db51c7cd 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateDocumentSimpleResultTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentSimpleResultTests.java @@ -17,11 +17,11 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateExecutionServiceTests.java similarity index 81% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateExecutionServiceTests.java index c913db72ca1..2189faa8749 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateExecutionServiceTests.java @@ -17,13 +17,14 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.CompoundProcessor; +import org.elasticsearch.ingest.TestProcessor; +import org.elasticsearch.ingest.core.CompoundProcessor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; @@ -34,18 +35,11 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; public class SimulateExecutionServiceTests extends ESTestCase { private ThreadPool threadPool; private SimulateExecutionService executionService; - private Pipeline pipeline; - private CompoundProcessor processor; private IngestDocument ingestDocument; @Before @@ -56,9 +50,6 @@ public class SimulateExecutionServiceTests extends ESTestCase { .build() ); executionService = new SimulateExecutionService(threadPool); - processor = mock(CompoundProcessor.class); - when(processor.getType()).thenReturn("mock"); - pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor)); ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); } @@ -68,8 +59,10 @@ public class SimulateExecutionServiceTests extends ESTestCase { } public void testExecuteVerboseItem() throws Exception { + TestProcessor processor = new TestProcessor("mock", ingestDocument -> {}); + Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor)); SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); - verify(processor, times(2)).execute(ingestDocument); + assertThat(processor.getInvokedCounter(), equalTo(2)); assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(2)); @@ -89,8 +82,10 @@ public class SimulateExecutionServiceTests extends ESTestCase { } public void testExecuteItem() throws Exception { + TestProcessor processor = new TestProcessor("mock", ingestDocument -> {}); + Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor)); SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, false); - verify(processor, times(2)).execute(ingestDocument); + assertThat(processor.getInvokedCounter(), equalTo(2)); assertThat(actualItemResponse, instanceOf(SimulateDocumentSimpleResult.class)); SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) actualItemResponse; assertThat(simulateDocumentSimpleResult.getIngestDocument(), equalTo(ingestDocument)); @@ -98,10 +93,12 @@ public class SimulateExecutionServiceTests extends ESTestCase { } public void testExecuteVerboseItemWithFailure() throws Exception { - Exception e = new RuntimeException("processor failed"); - doThrow(e).doNothing().when(processor).execute(ingestDocument); + TestProcessor processor1 = new TestProcessor("mock", ingestDocument -> { throw new RuntimeException("processor failed"); }); + TestProcessor processor2 = new TestProcessor("mock", ingestDocument -> {}); + Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor1, processor2)); SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); - verify(processor, times(2)).execute(ingestDocument); + assertThat(processor1.getInvokedCounter(), equalTo(1)); + assertThat(processor2.getInvokedCounter(), equalTo(1)); assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(2)); @@ -114,15 +111,13 @@ public class SimulateExecutionServiceTests extends ESTestCase { assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), not(sameInstance(ingestDocument))); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), equalTo(ingestDocument)); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getFailure(), nullValue()); - runtimeException = (RuntimeException) simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(); - assertThat(runtimeException.getMessage(), equalTo("processor failed")); } public void testExecuteItemWithFailure() throws Exception { - Exception e = new RuntimeException("processor failed"); - doThrow(e).when(processor).execute(ingestDocument); + TestProcessor processor = new TestProcessor(ingestDocument -> { throw new RuntimeException("processor failed"); }); + Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor)); SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, false); - verify(processor, times(1)).execute(ingestDocument); + assertThat(processor.getInvokedCounter(), equalTo(1)); assertThat(actualItemResponse, instanceOf(SimulateDocumentSimpleResult.class)); SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) actualItemResponse; assertThat(simulateDocumentSimpleResult.getIngestDocument(), nullValue()); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java b/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequestParsingTests.java similarity index 91% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequestParsingTests.java index 985e7ef1424..f84cb550ed9 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineRequestParsingTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequestParsingTests.java @@ -17,29 +17,29 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.CompoundProcessor; -import org.elasticsearch.ingest.Processor; -import org.elasticsearch.plugin.ingest.PipelineStore; +import org.elasticsearch.ingest.PipelineStore; +import org.elasticsearch.ingest.TestProcessor; +import org.elasticsearch.ingest.core.CompoundProcessor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Pipeline; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; import org.junit.Before; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.ingest.IngestDocument.MetaData.ID; -import static org.elasticsearch.ingest.IngestDocument.MetaData.INDEX; -import static org.elasticsearch.ingest.IngestDocument.MetaData.TYPE; -import static org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequest.Fields; +import static org.elasticsearch.action.ingest.simulate.SimulatePipelineRequest.Fields; +import static org.elasticsearch.ingest.core.IngestDocument.MetaData.ID; +import static org.elasticsearch.ingest.core.IngestDocument.MetaData.INDEX; +import static org.elasticsearch.ingest.core.IngestDocument.MetaData.TYPE; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; @@ -51,8 +51,8 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { @Before public void init() throws IOException { - CompoundProcessor pipelineCompoundProcessor = mock(CompoundProcessor.class); - when(pipelineCompoundProcessor.getProcessors()).thenReturn(Arrays.asList(mock(Processor.class))); + TestProcessor processor = new TestProcessor(ingestDocument -> {}); + CompoundProcessor pipelineCompoundProcessor = new CompoundProcessor(processor); Pipeline pipeline = new Pipeline(SimulatePipelineRequest.SIMULATED_PIPELINE_ID, null, pipelineCompoundProcessor); Map processorRegistry = new HashMap<>(); processorRegistry.put("mock_processor", mock(Processor.Factory.class)); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java b/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineResponseTests.java similarity index 98% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineResponseTests.java index 47dd12dc75c..65c9996d533 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulatePipelineResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineResponseTests.java @@ -17,11 +17,11 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateProcessorResultTests.java similarity index 96% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateProcessorResultTests.java index a2af6056fa2..312d6ec3a96 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/SimulateProcessorResultTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateProcessorResultTests.java @@ -17,11 +17,11 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocumentTests.java b/core/src/test/java/org/elasticsearch/action/ingest/simulate/WriteableIngestDocumentTests.java similarity index 98% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocumentTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/simulate/WriteableIngestDocumentTests.java index b153cced84c..17a6e15bdf6 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/transport/simulate/WriteableIngestDocumentTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/simulate/WriteableIngestDocumentTests.java @@ -17,11 +17,11 @@ * under the License. */ -package org.elasticsearch.plugin.ingest.transport.simulate; +package org.elasticsearch.action.ingest.simulate; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestBootstrapperTests.java similarity index 92% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java rename to core/src/test/java/org/elasticsearch/ingest/IngestBootstrapperTests.java index 3f966d09f39..8f414ea1e67 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestBootstrapperTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestBootstrapperTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.Version; import org.elasticsearch.client.Client; @@ -46,6 +46,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.junit.Before; +import org.mockito.Matchers; import java.util.ArrayList; import java.util.Collections; @@ -54,8 +55,6 @@ import java.util.List; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.core.Is.is; -import static org.mockito.Matchers.any; -import static org.mockito.Mockito.anyString; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -71,7 +70,7 @@ public class IngestBootstrapperTests extends ESTestCase { @Before public void init() { ThreadPool threadPool = mock(ThreadPool.class); - when(threadPool.executor(any())).thenReturn(Runnable::run); + when(threadPool.executor(Matchers.any())).thenReturn(Runnable::run); ClusterService clusterService = mock(ClusterService.class); store = mock(PipelineStore.class); when(store.isStarted()).thenReturn(false); @@ -85,8 +84,8 @@ public class IngestBootstrapperTests extends ESTestCase { TransportService transportService = mock(TransportService.class); ClusterService clusterService = mock(ClusterService.class); - when(client.search(any())).thenReturn(PipelineStoreTests.expectedSearchReponse(Collections.emptyList())); - when(client.searchScroll(any())).thenReturn(PipelineStoreTests.expectedSearchReponse(Collections.emptyList())); + when(client.search(Matchers.any())).thenReturn(PipelineStoreTests.expectedSearchReponse(Collections.emptyList())); + when(client.searchScroll(Matchers.any())).thenReturn(PipelineStoreTests.expectedSearchReponse(Collections.emptyList())); Settings settings = Settings.EMPTY; PipelineStore store = new PipelineStore(settings, clusterService, transportService); IngestBootstrapper bootstrapper = new IngestBootstrapper( @@ -98,8 +97,8 @@ public class IngestBootstrapperTests extends ESTestCase { hits.add(new InternalSearchHit(0, "1", new Text("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) ); - when(client.search(any())).thenReturn(PipelineStoreTests.expectedSearchReponse(hits)); - when(client.get(any())).thenReturn(PipelineStoreTests.expectedGetResponse(true)); + when(client.search(Matchers.any())).thenReturn(PipelineStoreTests.expectedSearchReponse(hits)); + when(client.get(Matchers.any())).thenReturn(PipelineStoreTests.expectedGetResponse(true)); try { store.get("1"); @@ -146,7 +145,7 @@ public class IngestBootstrapperTests extends ESTestCase { ClusterState cs = csBuilder.metaData(MetaData.builder()).build(); bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, never()).start(); - verify(store, never()).stop(anyString()); + verify(store, never()).stop(Matchers.anyString()); } public void testPipelineStoreBootstrappingGlobalStateNoMasterBlock() throws Exception { @@ -161,13 +160,13 @@ public class IngestBootstrapperTests extends ESTestCase { // We're not started and there is a no master block, doing nothing: bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, never()).start(); - verify(store, never()).stop(anyString()); + verify(store, never()).stop(Matchers.anyString()); // We're started and there is a no master block, so we stop the store: when(store.isStarted()).thenReturn(true); bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, never()).start(); - verify(store, times(1)).stop(anyString()); + verify(store, times(1)).stop(Matchers.anyString()); } public void testPipelineStoreBootstrappingNoIngestIndex() throws Exception { @@ -203,13 +202,13 @@ public class IngestBootstrapperTests extends ESTestCase { // We're not running and the cluster state isn't ready, so we don't start. bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, never()).start(); - verify(store, never()).stop(anyString()); + verify(store, never()).stop(Matchers.anyString()); // We're running and the cluster state indicates that all our shards are unassigned, so we stop. when(store.isStarted()).thenReturn(true); bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, never()).start(); - verify(store, times(1)).stop(anyString()); + verify(store, times(1)).stop(Matchers.anyString()); } public void testPipelineStoreBootstrappingIngestIndexShardsStarted() throws Exception { @@ -236,13 +235,13 @@ public class IngestBootstrapperTests extends ESTestCase { // We're not running and the cluster state is ready, so we start. bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, times(1)).start(); - verify(store, never()).stop(anyString()); + verify(store, never()).stop(Matchers.anyString()); // We're running and the cluster state is good, so we do nothing. when(store.isStarted()).thenReturn(true); bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, times(1)).start(); - verify(store, never()).stop(anyString()); + verify(store, never()).stop(Matchers.anyString()); } public void testPipelineStoreBootstrappingFailure() throws Exception { @@ -270,7 +269,7 @@ public class IngestBootstrapperTests extends ESTestCase { doThrow(new RuntimeException()).doNothing().when(store).start(); bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, times(2)).start(); - verify(store, never()).stop(anyString()); + verify(store, never()).stop(Matchers.anyString()); } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java similarity index 77% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java rename to core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 13934f5a83d..a42635d2d79 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -25,35 +25,35 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.ingest.delete.DeletePipelineAction; +import org.elasticsearch.action.ingest.delete.DeletePipelineRequestBuilder; +import org.elasticsearch.action.ingest.get.GetPipelineAction; +import org.elasticsearch.action.ingest.get.GetPipelineRequestBuilder; +import org.elasticsearch.action.ingest.get.GetPipelineResponse; +import org.elasticsearch.action.ingest.put.PutPipelineAction; +import org.elasticsearch.action.ingest.put.PutPipelineRequestBuilder; +import org.elasticsearch.action.ingest.simulate.SimulateDocumentSimpleResult; +import org.elasticsearch.action.ingest.simulate.SimulatePipelineAction; +import org.elasticsearch.action.ingest.simulate.SimulatePipelineRequestBuilder; +import org.elasticsearch.action.ingest.simulate.SimulatePipelineResponse; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.plugin.ingest.IngestPlugin; -import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; -import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineRequestBuilder; -import org.elasticsearch.plugin.ingest.transport.get.GetPipelineAction; -import org.elasticsearch.plugin.ingest.transport.get.GetPipelineRequestBuilder; -import org.elasticsearch.plugin.ingest.transport.get.GetPipelineResponse; -import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; -import org.elasticsearch.plugin.ingest.transport.put.PutPipelineRequestBuilder; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulateDocumentSimpleResult; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineAction; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequestBuilder; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineResponse; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.Map; -import java.util.Collections; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; @@ -73,7 +73,7 @@ public class IngestClientIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put(IngestPlugin.NODE_INGEST_SETTING, true) + .put("node.ingest", true) .build(); } @@ -81,7 +81,7 @@ public class IngestClientIT extends ESIntegTestCase { protected Settings externalClusterClientSettings() { return Settings.builder() .put(super.transportClientSettings()) - .put(IngestPlugin.NODE_INGEST_SETTING, true) + .put("node.ingest", true) .build(); } @@ -92,9 +92,7 @@ public class IngestClientIT extends ESIntegTestCase { .field("description", "my_pipeline") .startArray("processors") .startObject() - .startObject("grok") - .field("field", "field1") - .field("pattern", "%{NUMBER:val:float} %{NUMBER:status:int} <%{WORD:msg}>") + .startObject("test") .endObject() .endObject() .endArray() @@ -128,35 +126,9 @@ public class IngestClientIT extends ESIntegTestCase { assertThat(response.getResults().size(), equalTo(1)); assertThat(response.getResults().get(0), instanceOf(SimulateDocumentSimpleResult.class)); SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) response.getResults().get(0); - assertThat(simulateDocumentSimpleResult.getIngestDocument(), nullValue()); - assertThat(simulateDocumentSimpleResult.getFailure(), notNullValue()); - - response = new SimulatePipelineRequestBuilder(client(), SimulatePipelineAction.INSTANCE) - .setId("_id") - .setSource(jsonBuilder().startObject() - .startArray("docs") - .startObject() - .field("_index", "index") - .field("_type", "type") - .field("_id", "id") - .startObject("_source") - .field("field1", "123.42 400 ") - .endObject() - .endObject() - .endArray() - .endObject().bytes()) - .get(); - - assertThat(response.isVerbose(), equalTo(false)); - assertThat(response.getPipelineId(), equalTo("_id")); - assertThat(response.getResults().size(), equalTo(1)); - assertThat(response.getResults().get(0), instanceOf(SimulateDocumentSimpleResult.class)); - simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) response.getResults().get(0); Map source = new HashMap<>(); - source.put("field1", "123.42 400 "); - source.put("val", 123.42f); - source.put("status", 400); - source.put("msg", "foo"); + source.put("foo", "bar"); + source.put("processed", true); IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, source); assertThat(simulateDocumentSimpleResult.getIngestDocument().getSourceAndMetadata(), equalTo(ingestDocument.getSourceAndMetadata())); assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); @@ -182,7 +154,7 @@ public class IngestClientIT extends ESIntegTestCase { int numRequests = scaledRandomIntBetween(32, 128); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id"); + bulkRequest.putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id"); for (int i = 0; i < numRequests; i++) { IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i)); if (i % 2 == 0) { @@ -240,7 +212,7 @@ public class IngestClientIT extends ESIntegTestCase { assertAcked(putMappingResponse); client().prepareIndex("test", "type", "1").setSource("field1", "123.42 400 ") - .putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id") + .putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id") .get(); Map doc = client().prepareGet("test", "type", "1") @@ -251,7 +223,7 @@ public class IngestClientIT extends ESIntegTestCase { client().prepareBulk().add( client().prepareIndex("test", "type", "2").setSource("field1", "123.42 400 ") - ).putHeader(IngestPlugin.PIPELINE_ID_PARAM, "_id").get(); + ).putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id").get(); doc = client().prepareGet("test", "type", "2").get().getSourceAsMap(); assertThat(doc.get("val"), equalTo(123.42)); assertThat(doc.get("status"), equalTo(400)); @@ -274,4 +246,23 @@ public class IngestClientIT extends ESIntegTestCase { protected Collection> getMockPlugins() { return Collections.emptyList(); } + + public static class IngestPlugin extends Plugin { + + @Override + public String name() { + return "ingest"; + } + + @Override + public String description() { + return "ingest mock"; + } + + public void onModule(IngestModule ingestModule) { + ingestModule.addProcessor("test", (environment, templateService) -> config -> + new TestProcessor("test", ingestDocument -> ingestDocument.setFieldValue("processed", true)) + ); + } + } } diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestTemplateTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java similarity index 91% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestTemplateTests.java rename to core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java index 49ef9d8a50d..8ef444d7e19 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/IngestTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -25,13 +25,15 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.hamcrest.Matchers; import org.junit.Before; -import org.mockito.Mockito; + +import static org.mockito.Mockito.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class IngestTemplateTests extends ESSingleNodeTestCase { @@ -44,11 +46,11 @@ public class IngestTemplateTests extends ESSingleNodeTestCase { @Before public void init() { - ThreadPool threadPool = Mockito.mock(ThreadPool.class); - Mockito.when(threadPool.executor(Mockito.anyString())).thenReturn(Runnable::run); - Environment environment = Mockito.mock(Environment.class); - ClusterService clusterService = Mockito.mock(ClusterService.class); - TransportService transportService = Mockito.mock(TransportService.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(threadPool.executor(anyString())).thenReturn(Runnable::run); + Environment environment = mock(Environment.class); + ClusterService clusterService = mock(ClusterService.class); + TransportService transportService = mock(TransportService.class); bootstrapper = new IngestBootstrapper( Settings.EMPTY, threadPool, environment, clusterService, transportService, new ProcessorsRegistry() ); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java similarity index 79% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java rename to core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java index 41b989088db..8e948d9f108 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequest; @@ -26,12 +26,10 @@ import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.ingest.processor.SetProcessor; -import org.elasticsearch.ingest.CompoundProcessor; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.CompoundProcessor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Pipeline; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; @@ -39,17 +37,12 @@ import org.mockito.ArgumentMatcher; import org.mockito.Matchers; import org.mockito.invocation.InvocationOnMock; -import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.function.Consumer; import static org.hamcrest.Matchers.equalTo; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyBoolean; -import static org.mockito.Mockito.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; @@ -67,14 +60,16 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void setup() { store = mock(PipelineStore.class); ThreadPool threadPool = mock(ThreadPool.class); - when(threadPool.executor(anyString())).thenReturn(Runnable::run); + when(threadPool.executor(Matchers.anyString())).thenReturn(Runnable::run); executionService = new PipelineExecutionService(store, threadPool); } public void testExecutePipelineDoesNotExist() { when(store.get("_id")).thenReturn(null); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); try { executionService.execute(indexRequest, "_id", failureHandler, completionHandler); @@ -82,8 +77,8 @@ public class PipelineExecutionServiceTests extends ESTestCase { } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("pipeline with id [_id] does not exist")); } - verify(failureHandler, never()).accept(any(Throwable.class)); - verify(completionHandler, never()).accept(anyBoolean()); + verify(failureHandler, never()).accept(Matchers.any(Throwable.class)); + verify(completionHandler, never()).accept(Matchers.anyBoolean()); } public void testExecuteSuccess() throws Exception { @@ -91,12 +86,12 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); - //TODO we remove metadata, this check is not valid anymore, what do we replace it with? - //verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(failureHandler, never()).accept(any()); + verify(failureHandler, never()).accept(Matchers.any()); verify(completionHandler, times(1)).accept(true); } @@ -113,15 +108,17 @@ public class PipelineExecutionServiceTests extends ESTestCase { } return null; - }).when(processor).execute(any()); + }).when(processor).execute(Matchers.any()); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); - verify(processor).execute(any()); - verify(failureHandler, never()).accept(any()); + verify(processor).execute(Matchers.any()); + verify(failureHandler, never()).accept(Matchers.any()); verify(completionHandler, times(1)).accept(true); assertThat(indexRequest.index(), equalTo("update_index")); @@ -138,12 +135,14 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(failureHandler, times(1)).accept(any(RuntimeException.class)); - verify(completionHandler, never()).accept(anyBoolean()); + verify(failureHandler, times(1)).accept(Matchers.any(RuntimeException.class)); + verify(completionHandler, never()).accept(Matchers.anyBoolean()); } public void testExecuteSuccessWithOnFailure() throws Exception { @@ -153,12 +152,12 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor)); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); - //TODO we remove metadata, this check is not valid anymore, what do we replace it with? - //verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(failureHandler, never()).accept(any(RuntimeException.class)); + verify(failureHandler, never()).accept(Matchers.any(RuntimeException.class)); verify(completionHandler, times(1)).accept(true); } @@ -170,12 +169,14 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); doThrow(new RuntimeException()).when(onFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(failureHandler, times(1)).accept(any(RuntimeException.class)); - verify(completionHandler, never()).accept(anyBoolean()); + verify(failureHandler, times(1)).accept(Matchers.any(RuntimeException.class)); + verify(completionHandler, never()).accept(Matchers.anyBoolean()); } public void testExecuteFailureWithNestedOnFailure() throws Exception { @@ -183,66 +184,65 @@ public class PipelineExecutionServiceTests extends ESTestCase { Processor onFailureProcessor = mock(Processor.class); Processor onFailureOnFailureProcessor = mock(Processor.class); CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), - Collections.singletonList(new CompoundProcessor(Arrays.asList(onFailureProcessor),Arrays.asList(onFailureOnFailureProcessor)))); + Collections.singletonList(new CompoundProcessor(Collections.singletonList(onFailureProcessor), Collections.singletonList(onFailureOnFailureProcessor)))); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor)); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); doThrow(new RuntimeException()).when(onFailureOnFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); doThrow(new RuntimeException()).when(onFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); + @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(failureHandler, times(1)).accept(any(RuntimeException.class)); - verify(completionHandler, never()).accept(anyBoolean()); + verify(failureHandler, times(1)).accept(Matchers.any(RuntimeException.class)); + verify(completionHandler, never()).accept(Matchers.anyBoolean()); } - @SuppressWarnings("unchecked") - public void testExecuteTTL() throws Exception { - // test with valid ttl - SetProcessor.Factory metaProcessorFactory = new SetProcessor.Factory(TestTemplateService.instance()); - Map config = new HashMap<>(); - config.put("field", "_ttl"); - config.put("value", "5d"); - Processor processor = metaProcessorFactory.create(config); + + public void testExecuteSetTTL() throws Exception { + Processor processor = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("_ttl", "5d")); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor))); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); assertThat(indexRequest.ttl(), equalTo(TimeValue.parseTimeValue("5d", null, "ttl"))); - verify(failureHandler, never()).accept(any()); + verify(failureHandler, never()).accept(Matchers.any()); verify(completionHandler, times(1)).accept(true); + } - // test with invalid ttl - metaProcessorFactory = new SetProcessor.Factory(TestTemplateService.instance()); - config = new HashMap<>(); - config.put("field", "_ttl"); - config.put("value", "abc"); - processor = metaProcessorFactory.create(config); + public void testExecuteSetInvalidTTL() throws Exception { + Processor processor = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("_ttl", "abc")); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor))); - indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); - failureHandler = mock(Consumer.class); - completionHandler = mock(Consumer.class); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + @SuppressWarnings("unchecked") + Consumer failureHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") + Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); - verify(failureHandler, times(1)).accept(any(ElasticsearchParseException.class)); - verify(completionHandler, never()).accept(anyBoolean()); + verify(failureHandler, times(1)).accept(Matchers.any(ElasticsearchParseException.class)); + verify(completionHandler, never()).accept(Matchers.anyBoolean()); + } - // test with provided ttl + public void testExecuteProvidedTTL() throws Exception { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", mock(CompoundProcessor.class))); - indexRequest = new IndexRequest("_index", "_type", "_id") + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id") .source(Collections.emptyMap()) .ttl(1000L); - failureHandler = mock(Consumer.class); - completionHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); assertThat(indexRequest.ttl(), equalTo(new TimeValue(1000L))); - verify(failureHandler, never()).accept(any()); + verify(failureHandler, never()).accept(Matchers.any()); verify(completionHandler, times(1)).accept(true); } @@ -272,7 +272,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { CompoundProcessor processor = mock(CompoundProcessor.class); Exception error = new RuntimeException(); - doThrow(error).when(processor).execute(any()); + doThrow(error).when(processor).execute(Matchers.any()); when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, processor)); Consumer requestItemErrorHandler = mock(Consumer.class); @@ -296,11 +296,13 @@ public class PipelineExecutionServiceTests extends ESTestCase { String pipelineId = "_id"; when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, new CompoundProcessor())); + @SuppressWarnings("unchecked") Consumer requestItemErrorHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.execute(bulkRequest.requests(), pipelineId, requestItemErrorHandler, completionHandler); - verify(requestItemErrorHandler, never()).accept(any()); + verify(requestItemErrorHandler, never()).accept(Matchers.any()); verify(completionHandler, times(1)).accept(true); } diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java index f3bb8f38451..e1a46e7f0d6 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.ingest; +import org.elasticsearch.ingest.core.Pipeline; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; import java.util.Collections; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java similarity index 93% rename from plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java rename to core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index 84ee9ad0ee9..e48dd124b4c 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/plugin/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.get.GetRequest; @@ -48,7 +48,6 @@ import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; -import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -64,8 +63,8 @@ public class PipelineStoreTests extends ESTestCase { TransportService transportService = mock(TransportService.class); client = mock(Client.class); - when(client.search(any())).thenReturn(expectedSearchReponse(Collections.emptyList())); - when(client.searchScroll(any())).thenReturn(expectedSearchReponse(Collections.emptyList())); + when(client.search(Matchers.any())).thenReturn(expectedSearchReponse(Collections.emptyList())); + when(client.searchScroll(Matchers.any())).thenReturn(expectedSearchReponse(Collections.emptyList())); store = new PipelineStore(settings, clusterService, transportService); store.setClient(client); store.start(); @@ -77,15 +76,15 @@ public class PipelineStoreTests extends ESTestCase { .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) ); - when(client.search(any())).thenReturn(expectedSearchReponse(hits)); - when(client.get(any())).thenReturn(expectedGetResponse(true)); + when(client.search(Matchers.any())).thenReturn(expectedSearchReponse(hits)); + when(client.get(Matchers.any())).thenReturn(expectedGetResponse(true)); assertThat(store.get("1"), nullValue()); store.updatePipelines(); assertThat(store.get("1").getId(), equalTo("1")); assertThat(store.get("1").getDescription(), equalTo("_description1")); - when(client.get(any())).thenReturn(expectedGetResponse(true)); + when(client.get(Matchers.any())).thenReturn(expectedGetResponse(true)); hits.add(new InternalSearchHit(0, "2", new Text("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description2\"}")) ); @@ -109,7 +108,7 @@ public class PipelineStoreTests extends ESTestCase { hits.add(new InternalSearchHit(0, "foo", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); hits.add(new InternalSearchHit(0, "bar", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); hits.add(new InternalSearchHit(0, "foobar", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); - when(client.search(any())).thenReturn(expectedSearchReponse(hits)); + when(client.search(Matchers.any())).thenReturn(expectedSearchReponse(hits)); store.updatePipelines(); List result = store.getReference("foo"); diff --git a/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java b/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java index d9a7086412a..0a44a9e6576 100644 --- a/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.ingest; import org.elasticsearch.env.Environment; +import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.test.ESTestCase; import java.util.Map; diff --git a/core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java similarity index 98% rename from core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java rename to core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java index b6a508eb9a1..6cc38e12536 100644 --- a/core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java @@ -17,8 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.core; +import org.elasticsearch.ingest.TestProcessor; import org.elasticsearch.test.ESTestCase; import org.junit.Before; diff --git a/core/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java b/core/src/test/java/org/elasticsearch/ingest/core/ConfigurationUtilsTests.java similarity index 98% rename from core/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java rename to core/src/test/java/org/elasticsearch/ingest/core/ConfigurationUtilsTests.java index a1d736d1c1e..958378f355a 100644 --- a/core/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/core/ConfigurationUtilsTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.core; import org.elasticsearch.test.ESTestCase; import org.junit.Before; diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/core/src/test/java/org/elasticsearch/ingest/core/IngestDocumentTests.java similarity index 99% rename from core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java rename to core/src/test/java/org/elasticsearch/ingest/core/IngestDocumentTests.java index 9076b2102cb..56d1fa76c64 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/core/IngestDocumentTests.java @@ -17,8 +17,10 @@ * under the License. */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.core; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.test.ESTestCase; import org.junit.Before; diff --git a/core/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java b/core/src/test/java/org/elasticsearch/ingest/core/ValueSourceTests.java similarity index 95% rename from core/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java rename to core/src/test/java/org/elasticsearch/ingest/core/ValueSourceTests.java index 63eae63a400..f2aa9f32bcd 100644 --- a/core/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/core/ValueSourceTests.java @@ -17,8 +17,10 @@ * under the License. */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.core; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java index cc704c19984..8fb73cff896 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java @@ -19,9 +19,9 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java index 46bba1b9edb..108cc5d40d0 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java @@ -19,11 +19,11 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.TemplateService; -import org.elasticsearch.ingest.ValueSource; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.TemplateService; +import org.elasticsearch.ingest.core.ValueSource; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java index 68386b18a33..c7f260c2e3d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java @@ -19,9 +19,9 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.Processor; import java.util.ArrayList; import java.util.List; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java index e919f4f49c3..1c047382a03 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java @@ -19,9 +19,9 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.Processor; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java index 6d2bdf8b7d7..574e41fedb7 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java @@ -19,10 +19,10 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Processor; -import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.ingest.core.TemplateService; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java index adb76799d4b..445853dccb3 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java @@ -31,8 +31,8 @@ import com.maxmind.geoip2.record.Subdivision; import org.apache.lucene.util.IOUtils; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.network.NetworkAddress; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Processor; import java.io.Closeable; import java.io.IOException; @@ -56,8 +56,8 @@ import java.util.Map; import java.util.Set; import java.util.stream.Stream; -import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalList; -import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty; +import static org.elasticsearch.ingest.core.ConfigurationUtils.readOptionalList; +import static org.elasticsearch.ingest.core.ConfigurationUtils.readStringProperty; public final class GeoIpProcessor implements Processor { diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokProcessor.java index d85cde13d05..5481a4dfffe 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokProcessor.java @@ -19,9 +19,9 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.Processor; import java.io.BufferedReader; import java.io.IOException; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java index b831ec511e1..c201729eb6d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java @@ -19,9 +19,9 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.Processor; import java.util.Map; import java.util.regex.Matcher; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java index 3582dce278a..08ac9d1939c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java @@ -19,9 +19,9 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.Processor; import java.util.List; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java index efad3707398..a3c5f761b65 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java @@ -19,10 +19,10 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.TemplateService; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.TemplateService; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java index 5b134069070..5a9e4d5d40c 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java @@ -19,9 +19,9 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java index 0f2ac29be49..a43e60587de 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java @@ -19,11 +19,11 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.TemplateService; -import org.elasticsearch.ingest.ValueSource; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.TemplateService; +import org.elasticsearch.ingest.core.ValueSource; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.Processor; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java index b1e50b20cd6..b1d9c23eb54 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java @@ -19,9 +19,9 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.Processor; import java.util.Arrays; import java.util.Map; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java deleted file mode 100644 index e168173cca0..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestModule.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugin.ingest; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.plugin.ingest.rest.IngestRestFilter; - -public class IngestModule extends AbstractModule { - - private final boolean ingestEnabled; - - public IngestModule(boolean ingestEnabled) { - this.ingestEnabled = ingestEnabled; - } - - @Override - protected void configure() { - // Even if ingest isn't enabled we still need to make sure that rest requests with pipeline - // param copy the pipeline into the context, so that in IngestDisabledActionFilter - // index/bulk requests can be failed - binder().bind(IngestRestFilter.class).asEagerSingleton(); - if (ingestEnabled) { - binder().bind(IngestBootstrapper.class).asEagerSingleton(); - } - } -} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 514fb44d1da..e08fca8b134 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -19,14 +19,8 @@ package org.elasticsearch.plugin.ingest; -import org.elasticsearch.action.ActionModule; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.inject.Module; -import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.ProcessorsModule; +import org.elasticsearch.ingest.IngestModule; import org.elasticsearch.ingest.processor.AppendProcessor; import org.elasticsearch.ingest.processor.ConvertProcessor; import org.elasticsearch.ingest.processor.DateProcessor; @@ -42,45 +36,16 @@ import org.elasticsearch.ingest.processor.SetProcessor; import org.elasticsearch.ingest.processor.SplitProcessor; import org.elasticsearch.ingest.processor.TrimProcessor; import org.elasticsearch.ingest.processor.UppercaseProcessor; -import org.elasticsearch.plugin.ingest.rest.RestDeletePipelineAction; -import org.elasticsearch.plugin.ingest.rest.RestGetPipelineAction; -import org.elasticsearch.plugin.ingest.rest.RestIngestDisabledAction; -import org.elasticsearch.plugin.ingest.rest.RestPutPipelineAction; -import org.elasticsearch.plugin.ingest.rest.RestSimulatePipelineAction; -import org.elasticsearch.plugin.ingest.transport.IngestActionFilter; -import org.elasticsearch.plugin.ingest.transport.IngestDisabledActionFilter; -import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineAction; -import org.elasticsearch.plugin.ingest.transport.delete.DeletePipelineTransportAction; -import org.elasticsearch.plugin.ingest.transport.get.GetPipelineAction; -import org.elasticsearch.plugin.ingest.transport.get.GetPipelineTransportAction; -import org.elasticsearch.plugin.ingest.transport.put.PutPipelineAction; -import org.elasticsearch.plugin.ingest.transport.put.PutPipelineTransportAction; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineAction; -import org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineTransportAction; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.ScriptModule; - -import java.util.Collection; -import java.util.Collections; - -import static org.elasticsearch.common.settings.Settings.settingsBuilder; public class IngestPlugin extends Plugin { - public static final String PIPELINE_ID_PARAM_CONTEXT_KEY = "__pipeline_id__"; - public static final String PIPELINE_ID_PARAM = "pipeline"; - public static final String PIPELINE_ALREADY_PROCESSED = "ingest_already_processed"; public static final String NAME = "ingest"; - public static final String NODE_INGEST_SETTING = "node.ingest"; - private final Settings nodeSettings; private final boolean ingestEnabled; - private final boolean transportClient; public IngestPlugin(Settings nodeSettings) { - this.nodeSettings = nodeSettings; - this.ingestEnabled = nodeSettings.getAsBoolean(NODE_INGEST_SETTING, false); - this.transportClient = TransportClient.CLIENT_TYPE.equals(nodeSettings.get(Client.CLIENT_TYPE_SETTING)); + this.ingestEnabled = nodeSettings.getAsBoolean("node.ingest", false); } @Override @@ -90,86 +55,26 @@ public class IngestPlugin extends Plugin { @Override public String description() { - return "Plugin that allows to configure pipelines to preprocess documents before indexing"; + return "Plugin that allows to plug in ingest processors"; } - @Override - public Collection nodeModules() { - if (transportClient) { - return Collections.emptyList(); - } else { - return Collections.singletonList(new IngestModule(ingestEnabled)); - } - } - - @Override - public Collection> nodeServices() { - if (transportClient|| ingestEnabled == false) { - return Collections.emptyList(); - } else { - return Collections.singletonList(IngestBootstrapper.class); - } - } - - @Override - public Settings additionalSettings() { - return settingsBuilder() - .put(PipelineExecutionService.additionalSettings(nodeSettings)) - .build(); - } - - public void onModule(ProcessorsModule processorsModule) { + public void onModule(IngestModule ingestModule) { if (ingestEnabled) { - processorsModule.addProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); - processorsModule.addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); - processorsModule.addProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); - processorsModule.addProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); - processorsModule.addProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); - processorsModule.addProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); - processorsModule.addProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); - processorsModule.addProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); - processorsModule.addProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); - processorsModule.addProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); - processorsModule.addProcessor(LowercaseProcessor.TYPE, (environment, templateService) -> new LowercaseProcessor.Factory()); - processorsModule.addProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); - processorsModule.addProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); - processorsModule.addProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); - processorsModule.addProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); + ingestModule.addProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); + ingestModule.addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); + ingestModule.addProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); + ingestModule.addProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); + ingestModule.addProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); + ingestModule.addProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); + ingestModule.addProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); + ingestModule.addProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); + ingestModule.addProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); + ingestModule.addProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); + ingestModule.addProcessor(LowercaseProcessor.TYPE, (environment, templateService) -> new LowercaseProcessor.Factory()); + ingestModule.addProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); + ingestModule.addProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); + ingestModule.addProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); + ingestModule.addProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); } } - - public void onModule(ActionModule module) { - if (transportClient == false) { - if (ingestEnabled) { - module.registerFilter(IngestActionFilter.class); - } else { - module.registerFilter(IngestDisabledActionFilter.class); - } - } - if (ingestEnabled) { - module.registerAction(PutPipelineAction.INSTANCE, PutPipelineTransportAction.class); - module.registerAction(GetPipelineAction.INSTANCE, GetPipelineTransportAction.class); - module.registerAction(DeletePipelineAction.INSTANCE, DeletePipelineTransportAction.class); - module.registerAction(SimulatePipelineAction.INSTANCE, SimulatePipelineTransportAction.class); - } - } - - public void onModule(NetworkModule networkModule) { - if (transportClient) { - return; - } - - if (ingestEnabled) { - networkModule.registerRestHandler(RestPutPipelineAction.class); - networkModule.registerRestHandler(RestGetPipelineAction.class); - networkModule.registerRestHandler(RestDeletePipelineAction.class); - networkModule.registerRestHandler(RestSimulatePipelineAction.class); - } else { - networkModule.registerRestHandler(RestIngestDisabledAction.class); - } - } - - public void onModule(ScriptModule module) { - module.registerScriptContext(InternalTemplateService.INGEST_SCRIPT_CONTEXT); - } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestIngestDisabledAction.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestIngestDisabledAction.java deleted file mode 100644 index 80d0784956d..00000000000 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/rest/RestIngestDisabledAction.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugin.ingest.rest; - -import org.elasticsearch.client.Client; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestChannel; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.BytesRestResponse; - -public class RestIngestDisabledAction extends BaseRestHandler { - - @Inject - public RestIngestDisabledAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); - controller.registerHandler(RestRequest.Method.DELETE, "/_ingest/pipeline/{id}", this); - controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}", this); - controller.registerHandler(RestRequest.Method.PUT, "/_ingest/pipeline/{id}", this); - controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/{id}/_simulate", this); - controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}/_simulate", this); - controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/_simulate", this); - controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/_simulate", this); - } - - @Override - protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception { - channel.sendResponse(new BytesRestResponse(channel, new IllegalArgumentException("ingest plugin is disabled, pipeline CRUD or simulate APIs cannot be used"))); - } -} diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java index a8840b42d7a..1113a4b402f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java @@ -19,8 +19,8 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java index 8c27d881252..7853709240b 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java @@ -19,12 +19,12 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.ingest.ValueSource; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.ValueSource; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java index 487dca4c232..040cac4851f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java @@ -19,9 +19,9 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java index 46a81ca48ba..f7aba42d549 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTime; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java index b6d4df9103a..0ee3068d367 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java @@ -19,8 +19,8 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorTests.java index 4f830fe5559..818e9054749 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorTests.java @@ -20,9 +20,8 @@ package org.elasticsearch.ingest.processor; import com.maxmind.geoip2.DatabaseReader; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.processor.GeoIpProcessor; import org.elasticsearch.test.ESTestCase; import java.io.InputStream; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorTests.java index c554b31aaf1..ed9b8f6e621 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorTests.java @@ -19,10 +19,8 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.processor.Grok; -import org.elasticsearch.ingest.processor.GrokProcessor; import org.elasticsearch.test.ESTestCase; import java.util.Collections; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java index c2873ebf569..9c7a9bd721c 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java @@ -19,9 +19,9 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; import java.util.Collections; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java index 65552b41c02..cbd4dd66143 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java @@ -19,9 +19,9 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java index 952354cc800..891dc57ffc6 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java @@ -19,10 +19,10 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java index c9e2e8be51c..c42ca825652 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java @@ -19,9 +19,9 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java index 7de742c908f..b66cc24202a 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java @@ -19,12 +19,12 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.ingest.ValueSource; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.ValueSource; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java index b4487a6a09b..d5a587fbd41 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java @@ -19,9 +19,9 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java index bdd37c86d58..326571fb17b 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java @@ -21,7 +21,8 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.ingest.InternalTemplateService; +import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.mustache.MustacheScriptEngineService; @@ -41,9 +42,7 @@ public abstract class AbstractMustacheTests extends ESTestCase { .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false) .build(); MustacheScriptEngineService mustache = new MustacheScriptEngineService(settings); - ScriptContextRegistry registry = new ScriptContextRegistry( - Collections.singletonList(InternalTemplateService.INGEST_SCRIPT_CONTEXT) - ); + ScriptContextRegistry registry = new ScriptContextRegistry(Collections.emptyList()); ScriptService scriptService = new ScriptService( settings, new Environment(settings), Collections.singleton(mustache), null, registry ); diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java index c7e76cb062b..6a6c8712d6e 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java @@ -19,8 +19,8 @@ package org.elasticsearch.plugin.ingest; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.ValueSource; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.ValueSource; import java.util.ArrayList; import java.util.Arrays; diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java index 47fef1a2228..e37cd323364 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java @@ -20,9 +20,9 @@ package org.elasticsearch.plugin.ingest; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.ValueSource; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.ValueSource; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.ingest.processor.SetProcessor; import org.hamcrest.Matchers; diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java index be34d25b726..ebd32d4c752 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java @@ -19,7 +19,7 @@ package org.elasticsearch.plugin.ingest; -import org.elasticsearch.ingest.TemplateService; +import org.elasticsearch.ingest.core.TemplateService; import java.util.Collections; import java.util.HashMap; diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java index 3b9e5245bb5..967c51be27d 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java @@ -19,8 +19,8 @@ package org.elasticsearch.plugin.ingest; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.ValueSource; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.ValueSource; import java.util.Arrays; import java.util.Collections; diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java b/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java index 5699cab7cfa..3f350cf425c 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java @@ -23,6 +23,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.elasticsearch.common.Strings; +import org.elasticsearch.ingest.core.IngestDocument; import java.util.ArrayList; import java.util.HashMap; diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java index 67a2406da75..5c4dd701a72 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java @@ -19,6 +19,9 @@ package org.elasticsearch.ingest; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Processor; + import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/TestTemplateService.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestTemplateService.java index d44764fa8ac..9330db1bfcb 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/TestTemplateService.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/TestTemplateService.java @@ -19,6 +19,8 @@ package org.elasticsearch.ingest; +import org.elasticsearch.ingest.core.TemplateService; + import java.util.Map; public class TestTemplateService implements TemplateService { From c711570fd36d97b56b94e33a067dbde05557a169 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 6 Jan 2016 19:22:28 +0100 Subject: [PATCH 164/347] remove needless changes (missing static imports) --- .../ingest/IngestBootstrapperTests.java | 29 +++++------ .../ingest/PipelineExecutionServiceTests.java | 48 ++++++++++--------- .../ingest/PipelineStoreTests.java | 17 +++---- 3 files changed, 49 insertions(+), 45 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestBootstrapperTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestBootstrapperTests.java index 8f414ea1e67..a352c3af723 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestBootstrapperTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestBootstrapperTests.java @@ -46,7 +46,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.junit.Before; -import org.mockito.Matchers; import java.util.ArrayList; import java.util.Collections; @@ -55,6 +54,8 @@ import java.util.List; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.core.Is.is; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -70,7 +71,7 @@ public class IngestBootstrapperTests extends ESTestCase { @Before public void init() { ThreadPool threadPool = mock(ThreadPool.class); - when(threadPool.executor(Matchers.any())).thenReturn(Runnable::run); + when(threadPool.executor(any())).thenReturn(Runnable::run); ClusterService clusterService = mock(ClusterService.class); store = mock(PipelineStore.class); when(store.isStarted()).thenReturn(false); @@ -84,8 +85,8 @@ public class IngestBootstrapperTests extends ESTestCase { TransportService transportService = mock(TransportService.class); ClusterService clusterService = mock(ClusterService.class); - when(client.search(Matchers.any())).thenReturn(PipelineStoreTests.expectedSearchReponse(Collections.emptyList())); - when(client.searchScroll(Matchers.any())).thenReturn(PipelineStoreTests.expectedSearchReponse(Collections.emptyList())); + when(client.search(any())).thenReturn(PipelineStoreTests.expectedSearchReponse(Collections.emptyList())); + when(client.searchScroll(any())).thenReturn(PipelineStoreTests.expectedSearchReponse(Collections.emptyList())); Settings settings = Settings.EMPTY; PipelineStore store = new PipelineStore(settings, clusterService, transportService); IngestBootstrapper bootstrapper = new IngestBootstrapper( @@ -97,8 +98,8 @@ public class IngestBootstrapperTests extends ESTestCase { hits.add(new InternalSearchHit(0, "1", new Text("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) ); - when(client.search(Matchers.any())).thenReturn(PipelineStoreTests.expectedSearchReponse(hits)); - when(client.get(Matchers.any())).thenReturn(PipelineStoreTests.expectedGetResponse(true)); + when(client.search(any())).thenReturn(PipelineStoreTests.expectedSearchReponse(hits)); + when(client.get(any())).thenReturn(PipelineStoreTests.expectedGetResponse(true)); try { store.get("1"); @@ -145,7 +146,7 @@ public class IngestBootstrapperTests extends ESTestCase { ClusterState cs = csBuilder.metaData(MetaData.builder()).build(); bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, never()).start(); - verify(store, never()).stop(Matchers.anyString()); + verify(store, never()).stop(anyString()); } public void testPipelineStoreBootstrappingGlobalStateNoMasterBlock() throws Exception { @@ -160,13 +161,13 @@ public class IngestBootstrapperTests extends ESTestCase { // We're not started and there is a no master block, doing nothing: bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, never()).start(); - verify(store, never()).stop(Matchers.anyString()); + verify(store, never()).stop(anyString()); // We're started and there is a no master block, so we stop the store: when(store.isStarted()).thenReturn(true); bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, never()).start(); - verify(store, times(1)).stop(Matchers.anyString()); + verify(store, times(1)).stop(anyString()); } public void testPipelineStoreBootstrappingNoIngestIndex() throws Exception { @@ -202,13 +203,13 @@ public class IngestBootstrapperTests extends ESTestCase { // We're not running and the cluster state isn't ready, so we don't start. bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, never()).start(); - verify(store, never()).stop(Matchers.anyString()); + verify(store, never()).stop(anyString()); // We're running and the cluster state indicates that all our shards are unassigned, so we stop. when(store.isStarted()).thenReturn(true); bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, never()).start(); - verify(store, times(1)).stop(Matchers.anyString()); + verify(store, times(1)).stop(anyString()); } public void testPipelineStoreBootstrappingIngestIndexShardsStarted() throws Exception { @@ -235,13 +236,13 @@ public class IngestBootstrapperTests extends ESTestCase { // We're not running and the cluster state is ready, so we start. bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, times(1)).start(); - verify(store, never()).stop(Matchers.anyString()); + verify(store, never()).stop(anyString()); // We're running and the cluster state is good, so we do nothing. when(store.isStarted()).thenReturn(true); bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, times(1)).start(); - verify(store, never()).stop(Matchers.anyString()); + verify(store, never()).stop(anyString()); } public void testPipelineStoreBootstrappingFailure() throws Exception { @@ -269,7 +270,7 @@ public class IngestBootstrapperTests extends ESTestCase { doThrow(new RuntimeException()).doNothing().when(store).start(); bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); verify(store, times(2)).start(); - verify(store, never()).stop(Matchers.anyString()); + verify(store, never()).stop(anyString()); } } diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java index 8e948d9f108..5f31dd24621 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java @@ -34,7 +34,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; import org.mockito.ArgumentMatcher; -import org.mockito.Matchers; import org.mockito.invocation.InvocationOnMock; import java.util.Collections; @@ -43,6 +42,10 @@ import java.util.Objects; import java.util.function.Consumer; import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyBoolean; +import static org.mockito.Matchers.anyString; +import static org.mockito.Matchers.argThat; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; @@ -60,7 +63,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void setup() { store = mock(PipelineStore.class); ThreadPool threadPool = mock(ThreadPool.class); - when(threadPool.executor(Matchers.anyString())).thenReturn(Runnable::run); + when(threadPool.executor(anyString())).thenReturn(Runnable::run); executionService = new PipelineExecutionService(store, threadPool); } @@ -77,8 +80,8 @@ public class PipelineExecutionServiceTests extends ESTestCase { } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("pipeline with id [_id] does not exist")); } - verify(failureHandler, never()).accept(Matchers.any(Throwable.class)); - verify(completionHandler, never()).accept(Matchers.anyBoolean()); + verify(failureHandler, never()).accept(any(Throwable.class)); + verify(completionHandler, never()).accept(anyBoolean()); } public void testExecuteSuccess() throws Exception { @@ -91,7 +94,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); - verify(failureHandler, never()).accept(Matchers.any()); + verify(failureHandler, never()).accept(any()); verify(completionHandler, times(1)).accept(true); } @@ -108,7 +111,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { } return null; - }).when(processor).execute(Matchers.any()); + }).when(processor).execute(any()); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); @@ -117,8 +120,8 @@ public class PipelineExecutionServiceTests extends ESTestCase { @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); - verify(processor).execute(Matchers.any()); - verify(failureHandler, never()).accept(Matchers.any()); + verify(processor).execute(any()); + verify(failureHandler, never()).accept(any()); verify(completionHandler, times(1)).accept(true); assertThat(indexRequest.index(), equalTo("update_index")); @@ -141,8 +144,8 @@ public class PipelineExecutionServiceTests extends ESTestCase { Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(failureHandler, times(1)).accept(Matchers.any(RuntimeException.class)); - verify(completionHandler, never()).accept(Matchers.anyBoolean()); + verify(failureHandler, times(1)).accept(any(RuntimeException.class)); + verify(completionHandler, never()).accept(anyBoolean()); } public void testExecuteSuccessWithOnFailure() throws Exception { @@ -157,7 +160,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); - verify(failureHandler, never()).accept(Matchers.any(RuntimeException.class)); + verify(failureHandler, never()).accept(any(RuntimeException.class)); verify(completionHandler, times(1)).accept(true); } @@ -175,8 +178,8 @@ public class PipelineExecutionServiceTests extends ESTestCase { Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(failureHandler, times(1)).accept(Matchers.any(RuntimeException.class)); - verify(completionHandler, never()).accept(Matchers.anyBoolean()); + verify(failureHandler, times(1)).accept(any(RuntimeException.class)); + verify(completionHandler, never()).accept(anyBoolean()); } public void testExecuteFailureWithNestedOnFailure() throws Exception { @@ -196,11 +199,10 @@ public class PipelineExecutionServiceTests extends ESTestCase { Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); - verify(failureHandler, times(1)).accept(Matchers.any(RuntimeException.class)); - verify(completionHandler, never()).accept(Matchers.anyBoolean()); + verify(failureHandler, times(1)).accept(any(RuntimeException.class)); + verify(completionHandler, never()).accept(anyBoolean()); } - public void testExecuteSetTTL() throws Exception { Processor processor = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("_ttl", "5d")); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor))); @@ -213,7 +215,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { executionService.execute(indexRequest, "_id", failureHandler, completionHandler); assertThat(indexRequest.ttl(), equalTo(TimeValue.parseTimeValue("5d", null, "ttl"))); - verify(failureHandler, never()).accept(Matchers.any()); + verify(failureHandler, never()).accept(any()); verify(completionHandler, times(1)).accept(true); } @@ -227,8 +229,8 @@ public class PipelineExecutionServiceTests extends ESTestCase { @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.execute(indexRequest, "_id", failureHandler, completionHandler); - verify(failureHandler, times(1)).accept(Matchers.any(ElasticsearchParseException.class)); - verify(completionHandler, never()).accept(Matchers.anyBoolean()); + verify(failureHandler, times(1)).accept(any(ElasticsearchParseException.class)); + verify(completionHandler, never()).accept(anyBoolean()); } public void testExecuteProvidedTTL() throws Exception { @@ -242,7 +244,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { executionService.execute(indexRequest, "_id", failureHandler, completionHandler); assertThat(indexRequest.ttl(), equalTo(new TimeValue(1000L))); - verify(failureHandler, never()).accept(Matchers.any()); + verify(failureHandler, never()).accept(any()); verify(completionHandler, times(1)).accept(true); } @@ -272,7 +274,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { CompoundProcessor processor = mock(CompoundProcessor.class); Exception error = new RuntimeException(); - doThrow(error).when(processor).execute(Matchers.any()); + doThrow(error).when(processor).execute(any()); when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, processor)); Consumer requestItemErrorHandler = mock(Consumer.class); @@ -302,12 +304,12 @@ public class PipelineExecutionServiceTests extends ESTestCase { Consumer completionHandler = mock(Consumer.class); executionService.execute(bulkRequest.requests(), pipelineId, requestItemErrorHandler, completionHandler); - verify(requestItemErrorHandler, never()).accept(Matchers.any()); + verify(requestItemErrorHandler, never()).accept(any()); verify(completionHandler, times(1)).accept(true); } private IngestDocument eqID(String index, String type, String id, Map source) { - return Matchers.argThat(new IngestDocumentMatcher(index, type, id, source)); + return argThat(new IngestDocumentMatcher(index, type, id, source)); } private class IngestDocumentMatcher extends ArgumentMatcher { diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index e48dd124b4c..bca06268000 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -37,7 +37,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.junit.Before; import org.mockito.ArgumentMatcher; -import org.mockito.Matchers; import java.util.ArrayList; import java.util.Collections; @@ -48,6 +47,8 @@ import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.argThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -63,8 +64,8 @@ public class PipelineStoreTests extends ESTestCase { TransportService transportService = mock(TransportService.class); client = mock(Client.class); - when(client.search(Matchers.any())).thenReturn(expectedSearchReponse(Collections.emptyList())); - when(client.searchScroll(Matchers.any())).thenReturn(expectedSearchReponse(Collections.emptyList())); + when(client.search(any())).thenReturn(expectedSearchReponse(Collections.emptyList())); + when(client.searchScroll(any())).thenReturn(expectedSearchReponse(Collections.emptyList())); store = new PipelineStore(settings, clusterService, transportService); store.setClient(client); store.start(); @@ -76,15 +77,15 @@ public class PipelineStoreTests extends ESTestCase { .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) ); - when(client.search(Matchers.any())).thenReturn(expectedSearchReponse(hits)); - when(client.get(Matchers.any())).thenReturn(expectedGetResponse(true)); + when(client.search(any())).thenReturn(expectedSearchReponse(hits)); + when(client.get(any())).thenReturn(expectedGetResponse(true)); assertThat(store.get("1"), nullValue()); store.updatePipelines(); assertThat(store.get("1").getId(), equalTo("1")); assertThat(store.get("1").getDescription(), equalTo("_description1")); - when(client.get(Matchers.any())).thenReturn(expectedGetResponse(true)); + when(client.get(any())).thenReturn(expectedGetResponse(true)); hits.add(new InternalSearchHit(0, "2", new Text("type"), Collections.emptyMap()) .sourceRef(new BytesArray("{\"description\": \"_description2\"}")) ); @@ -108,7 +109,7 @@ public class PipelineStoreTests extends ESTestCase { hits.add(new InternalSearchHit(0, "foo", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); hits.add(new InternalSearchHit(0, "bar", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); hits.add(new InternalSearchHit(0, "foobar", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); - when(client.search(Matchers.any())).thenReturn(expectedSearchReponse(hits)); + when(client.search(any())).thenReturn(expectedSearchReponse(hits)); store.updatePipelines(); List result = store.getReference("foo"); @@ -165,7 +166,7 @@ public class PipelineStoreTests extends ESTestCase { } static GetRequest eqGetRequest(String index, String type, String id) { - return Matchers.argThat(new GetRequestMatcher(index, type, id)); + return argThat(new GetRequestMatcher(index, type, id)); } static class GetRequestMatcher extends ArgumentMatcher { From e036b5896d6337b2015999cc035a2ac8d3a3aef3 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 11:29:43 +0100 Subject: [PATCH 165/347] move registerFilter up --- .../elasticsearch/action/ActionModule.java | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 0e5e15071ea..09a6dc6dceb 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -155,12 +155,16 @@ import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptAction; import org.elasticsearch.action.indexedscripts.get.TransportGetIndexedScriptAction; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptAction; import org.elasticsearch.action.indexedscripts.put.TransportPutIndexedScriptAction; +import org.elasticsearch.action.ingest.IngestActionFilter; +import org.elasticsearch.action.ingest.IngestDisabledActionFilter; import org.elasticsearch.action.ingest.delete.DeletePipelineAction; import org.elasticsearch.action.ingest.delete.DeletePipelineTransportAction; import org.elasticsearch.action.ingest.get.GetPipelineAction; import org.elasticsearch.action.ingest.get.GetPipelineTransportAction; import org.elasticsearch.action.ingest.put.PutPipelineAction; import org.elasticsearch.action.ingest.put.PutPipelineTransportAction; +import org.elasticsearch.action.ingest.simulate.SimulatePipelineAction; +import org.elasticsearch.action.ingest.simulate.SimulatePipelineTransportAction; import org.elasticsearch.action.percolate.MultiPercolateAction; import org.elasticsearch.action.percolate.PercolateAction; import org.elasticsearch.action.percolate.TransportMultiPercolateAction; @@ -199,10 +203,6 @@ import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.action.ingest.IngestActionFilter; -import org.elasticsearch.action.ingest.IngestDisabledActionFilter; -import org.elasticsearch.action.ingest.simulate.SimulatePipelineAction; -import org.elasticsearch.action.ingest.simulate.SimulatePipelineTransportAction; import java.util.ArrayList; import java.util.HashMap; @@ -259,6 +259,13 @@ public class ActionModule extends AbstractModule { @Override protected void configure() { + if (proxy == false) { + if (ingestEnabled) { + registerFilter(IngestActionFilter.class); + } else { + registerFilter(IngestDisabledActionFilter.class); + } + } Multibinder actionFilterMultibinder = Multibinder.newSetBinder(binder(), ActionFilter.class); for (Class actionFilter : actionFilters) { @@ -377,11 +384,6 @@ public class ActionModule extends AbstractModule { // register GenericAction -> transportAction Map that can be injected to instances. // also register any supporting classes if (!proxy) { - if (ingestEnabled) { - registerFilter(IngestActionFilter.class); - } else { - registerFilter(IngestDisabledActionFilter.class); - } bind(TransportLivenessAction.class).asEagerSingleton(); MapBinder transportActionsBinder = MapBinder.newMapBinder(binder(), GenericAction.class, TransportAction.class); From c185c1339ab9165404e1d5d7a43186900d4efca4 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 11:30:02 +0100 Subject: [PATCH 166/347] remove grok and join usage from IngestClientIT --- .../elasticsearch/ingest/IngestClientIT.java | 59 +++++++------------ 1 file changed, 22 insertions(+), 37 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index a42635d2d79..872180be17d 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.ingest; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; @@ -38,20 +37,17 @@ import org.elasticsearch.action.ingest.simulate.SimulatePipelineAction; import org.elasticsearch.action.ingest.simulate.SimulatePipelineRequestBuilder; import org.elasticsearch.action.ingest.simulate.SimulatePipelineResponse; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; @@ -81,7 +77,8 @@ public class IngestClientIT extends ESIntegTestCase { protected Settings externalClusterClientSettings() { return Settings.builder() .put(super.transportClientSettings()) - .put("node.ingest", true) + //TODO can we remove this? + .put("node.ingest", true) .build(); } @@ -115,6 +112,7 @@ public class IngestClientIT extends ESIntegTestCase { .field("_id", "id") .startObject("_source") .field("foo", "bar") + .field("fail", false) .endObject() .endObject() .endArray() @@ -128,6 +126,7 @@ public class IngestClientIT extends ESIntegTestCase { SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) response.getResults().get(0); Map source = new HashMap<>(); source.put("foo", "bar"); + source.put("fail", false); source.put("processed", true); IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, source); assertThat(simulateDocumentSimpleResult.getIngestDocument().getSourceAndMetadata(), equalTo(ingestDocument.getSourceAndMetadata())); @@ -143,9 +142,7 @@ public class IngestClientIT extends ESIntegTestCase { .field("description", "my_pipeline") .startArray("processors") .startObject() - .startObject("join") - .field("field", "field1") - .field("separator", "|") + .startObject("test") .endObject() .endObject() .endArray() @@ -157,11 +154,7 @@ public class IngestClientIT extends ESIntegTestCase { bulkRequest.putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id"); for (int i = 0; i < numRequests; i++) { IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i)); - if (i % 2 == 0) { - indexRequest.source("field1", Arrays.asList("value1", "value2")); - } else { - indexRequest.source("field2", Arrays.asList("value1", "value2")); - } + indexRequest.source("field", "value", "fail", i % 2 == 0); bulkRequest.add(indexRequest); } @@ -170,12 +163,12 @@ public class IngestClientIT extends ESIntegTestCase { for (int i = 0; i < bulkRequest.requests().size(); i++) { BulkItemResponse itemResponse = response.getItems()[i]; if (i % 2 == 0) { + BulkItemResponse.Failure failure = itemResponse.getFailure(); + assertThat(failure.getMessage(), equalTo("java.lang.IllegalArgumentException: test processor failed")); + } else { IndexResponse indexResponse = itemResponse.getResponse(); assertThat(indexResponse.getId(), equalTo(Integer.toString(i))); assertThat(indexResponse.isCreated(), is(true)); - } else { - BulkItemResponse.Failure failure = itemResponse.getFailure(); - assertThat(failure.getMessage(), equalTo("java.lang.IllegalArgumentException: field [field1] not present as part of path [field1]")); } } } @@ -187,9 +180,7 @@ public class IngestClientIT extends ESIntegTestCase { .field("description", "my_pipeline") .startArray("processors") .startObject() - .startObject("grok") - .field("field", "field1") - .field("pattern", "%{NUMBER:val:float} %{NUMBER:status:int} <%{WORD:msg}>") + .startObject("test") .endObject() .endObject() .endArray() @@ -202,32 +193,21 @@ public class IngestClientIT extends ESIntegTestCase { assertThat(getResponse.pipelines().size(), equalTo(1)); assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id")); - createIndex("test"); - XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("properties") - .startObject("status").field("type", "integer").endObject() - .startObject("val").field("type", "float").endObject() - .endObject(); - PutMappingResponse putMappingResponse = client().admin().indices() - .preparePutMapping("test").setType("type").setSource(updateMappingBuilder).get(); - assertAcked(putMappingResponse); - - client().prepareIndex("test", "type", "1").setSource("field1", "123.42 400 ") + client().prepareIndex("test", "type", "1").setSource("field", "value", "fail", false) .putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id") .get(); Map doc = client().prepareGet("test", "type", "1") .get().getSourceAsMap(); - assertThat(doc.get("val"), equalTo(123.42)); - assertThat(doc.get("status"), equalTo(400)); - assertThat(doc.get("msg"), equalTo("foo")); + assertThat(doc.get("field"), equalTo("value")); + assertThat(doc.get("processed"), equalTo(true)); client().prepareBulk().add( - client().prepareIndex("test", "type", "2").setSource("field1", "123.42 400 ") + client().prepareIndex("test", "type", "2").setSource("field", "value2", "fail", false) ).putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id").get(); doc = client().prepareGet("test", "type", "2").get().getSourceAsMap(); - assertThat(doc.get("val"), equalTo(123.42)); - assertThat(doc.get("status"), equalTo(400)); - assertThat(doc.get("msg"), equalTo("foo")); + assertThat(doc.get("field"), equalTo("value2")); + assertThat(doc.get("processed"), equalTo(true)); DeleteResponse response = new DeletePipelineRequestBuilder(client(), DeletePipelineAction.INSTANCE) .setId("_id") @@ -261,7 +241,12 @@ public class IngestClientIT extends ESIntegTestCase { public void onModule(IngestModule ingestModule) { ingestModule.addProcessor("test", (environment, templateService) -> config -> - new TestProcessor("test", ingestDocument -> ingestDocument.setFieldValue("processed", true)) + new TestProcessor("test", ingestDocument -> { + ingestDocument.setFieldValue("processed", true); + if (ingestDocument.getFieldValue("fail", Boolean.class)) { + throw new IllegalArgumentException("test processor failed"); + } + }) ); } } From 4dca3cb38f07bb6ef3e19839a622d5d76316d664 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 7 Jan 2016 12:14:13 +0100 Subject: [PATCH 167/347] fix thread pools, use management the do the operation and response handeling --- .../action/ingest/reload/ReloadPipelinesAction.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesAction.java b/core/src/main/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesAction.java index c3770a94e5d..6a764132cb9 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesAction.java @@ -55,7 +55,7 @@ public class ReloadPipelinesAction extends AbstractComponent implements Transpor this.pipelineStore = pipelineStore; this.clusterService = clusterService; this.transportService = transportService; - transportService.registerRequestHandler(ACTION_NAME, ReloadPipelinesRequest::new, ThreadPool.Names.SAME, this); + transportService.registerRequestHandler(ACTION_NAME, ReloadPipelinesRequest::new, ThreadPool.Names.MANAGEMENT, this); } public void reloadPipelinesOnAllNodes(Consumer listener) { @@ -101,7 +101,7 @@ public class ReloadPipelinesAction extends AbstractComponent implements Transpor @Override public String executor() { - return ThreadPool.Names.MANAGEMENT; + return ThreadPool.Names.SAME; } }); } From 7e56f65939e3091f905c13d7f6b7cc7de75b3008 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 7 Jan 2016 12:58:13 +0100 Subject: [PATCH 168/347] since ingest is part of core the ingest template always gets installed, so the assumptions this test had had to be updated --- .../ingest/IngestTemplateTests.java | 34 +------------------ 1 file changed, 1 insertion(+), 33 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java index 8ef444d7e19..d886b3f7294 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java @@ -57,43 +57,16 @@ public class IngestTemplateTests extends ESSingleNodeTestCase { bootstrapper.setClient(client()); } - public void testInstallIndexTemplate() throws Exception { - verifyNoIndexTemplates(); - ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); - bootstrapper.clusterChanged(new ClusterChangedEvent("test", clusterState, clusterState)); + public void testIngestIndexTemplateIsInstalled() throws Exception { verifyIngestIndexTemplateExist(); } public void testInstallTemplateAfterItHasBeenRemoved() throws Exception { - verifyNoIndexTemplates(); - ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); - bootstrapper.clusterChanged(new ClusterChangedEvent("test", clusterState, clusterState)); verifyIngestIndexTemplateExist(); - client().admin().indices().prepareDeleteTemplate(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME).get(); - verifyNoIndexTemplates(); - - clusterState = client().admin().cluster().prepareState().get().getState(); - bootstrapper.clusterChanged(new ClusterChangedEvent("test", clusterState, clusterState)); verifyIngestIndexTemplateExist(); } - public void testDoNotInstallTemplateBecauseIngestIndexTemplateAlreadyExists() throws Exception { - // add an empty template and check that it doesn't get overwritten: - client().admin().indices().preparePutTemplate(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME).setTemplate(".ingest").get(); - GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME).get(); - assertThat(response.getIndexTemplates().size(), Matchers.equalTo(1)); - assertThat(response.getIndexTemplates().get(0).getOrder(), Matchers.equalTo(0)); - - ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); - bootstrapper.clusterChanged(new ClusterChangedEvent("test", clusterState, clusterState)); - - response = client().admin().indices().prepareGetTemplates(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME).get(); - assertThat(response.getIndexTemplates().size(), Matchers.equalTo(1)); - assertThat("The empty index template shouldn't get overwritten", response.getIndexTemplates().get(0).getOrder(), Matchers.equalTo(0)); - assertThat("The empty index template shouldn't get overwritten", response.getIndexTemplates().get(0).getMappings().size(), Matchers.equalTo(0)); - } - private static void verifyIngestIndexTemplateExist() { GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME).get(); assertThat(response.getIndexTemplates().size(), Matchers.equalTo(1)); @@ -103,9 +76,4 @@ public class IngestTemplateTests extends ESSingleNodeTestCase { assertThat(response.getIndexTemplates().get(0).getMappings().get(PipelineStore.TYPE), Matchers.notNullValue()); } - private static void verifyNoIndexTemplates() { - GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates().get(); - assertThat(response.getIndexTemplates().size(), Matchers.equalTo(0)); - } - } From 52c2a273f9d110525121ebf086da62e3d95f0833 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 12:06:33 +0100 Subject: [PATCH 169/347] amended error message --- core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java index 14c33e7b148..7c3d673a218 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java @@ -91,7 +91,7 @@ public final class Pipeline { } return new CompoundProcessor(Collections.singletonList(processor), onFailureProcessors); } - throw new IllegalArgumentException("No processor type exist with name [" + type + "]"); + throw new IllegalArgumentException("No processor type exists with name [" + type + "]"); } private List readProcessors(String fieldName, Map processorRegistry, Map config) throws Exception { From 18aabd67c8c4be916a7b795233d6335019fff811 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 13:20:35 +0100 Subject: [PATCH 170/347] adapt qa tests for when ingest.node is set to false CRUD and simulate apis work now fine, every node has the pipelines in memory, but node.ingest disables ingestion, meaning that any index or bulk request with a pipeline id is going to fail --- .../ingest/reload/ReloadPipelinesAction.java | 20 +---- .../reload/ReloadPipelinesActionTests.java | 78 +++--------------- .../plugin/ingest/IngestPlugin.java | 32 ++++---- .../ingest_mustache/10_ingest_disabled.yaml | 81 +++++++++++++++---- 4 files changed, 97 insertions(+), 114 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesAction.java b/core/src/main/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesAction.java index 6a764132cb9..8d38c08c4b6 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesAction.java @@ -21,6 +21,7 @@ package org.elasticsearch.action.ingest.reload; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.PipelineStore; @@ -33,8 +34,6 @@ import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; -import java.util.ArrayList; -import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; @@ -59,21 +58,10 @@ public class ReloadPipelinesAction extends AbstractComponent implements Transpor } public void reloadPipelinesOnAllNodes(Consumer listener) { - List ingestNodes = new ArrayList<>(); - for (DiscoveryNode node : clusterService.state().getNodes()) { - String nodeEnabled = node.getAttributes().get("ingest"); - if ("true".equals(nodeEnabled)) { - ingestNodes.add(node); - } - } - - if (ingestNodes.isEmpty()) { - throw new IllegalStateException("There are no ingest nodes in this cluster"); - } - AtomicBoolean failed = new AtomicBoolean(); - AtomicInteger expectedResponses = new AtomicInteger(ingestNodes.size()); - for (DiscoveryNode node : ingestNodes) { + DiscoveryNodes nodes = clusterService.state().getNodes(); + AtomicInteger expectedResponses = new AtomicInteger(nodes.size()); + for (DiscoveryNode node : nodes) { ReloadPipelinesRequest nodeRequest = new ReloadPipelinesRequest(); transportService.sendRequest(node, ACTION_NAME, nodeRequest, new TransportResponseHandler() { @Override diff --git a/core/src/test/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesActionTests.java b/core/src/test/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesActionTests.java index abd66fc08d7..b6f767323c1 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesActionTests.java @@ -36,9 +36,7 @@ import org.junit.Before; import org.mockito.Matchers; import java.util.Collections; -import java.util.Map; -import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -61,24 +59,12 @@ public class ReloadPipelinesActionTests extends ESTestCase { public void testSuccess() { int numNodes = randomIntBetween(1, 10); - int numIngestNodes = 0; - - DiscoveryNodes.Builder discoNodes = DiscoveryNodes.builder(); - for (int i = 0; i < numNodes; i++) { - boolean ingestNode = i == 0 || randomBoolean(); - DiscoveryNode discoNode = generateDiscoNode(i, ingestNode); - discoNodes.put(discoNode); - if (ingestNode) { - numIngestNodes++; - } - } - ClusterState state = ClusterState.builder(new ClusterName("_name")).nodes(discoNodes).build(); + ClusterState state = ClusterState.builder(new ClusterName("_name")).nodes(generateDiscoNodes(numNodes)).build(); when(clusterService.state()).thenReturn(state); - final int finalNumIngestNodes = numIngestNodes; doAnswer(mock -> { TransportResponseHandler handler = (TransportResponseHandler) mock.getArguments()[3]; - for (int i = 0; i < finalNumIngestNodes; i++) { + for (int i = 0; i < numNodes; i++) { handler.handleResponse(new ReloadPipelinesAction.ReloadPipelinesResponse()); } return mock; @@ -88,25 +74,14 @@ public class ReloadPipelinesActionTests extends ESTestCase { public void testWithAtLeastOneFailure() { int numNodes = randomIntBetween(1, 10); - int numIngestNodes = 0; - DiscoveryNodes.Builder discoNodes = DiscoveryNodes.builder(); - for (int i = 0; i < numNodes; i++) { - boolean ingestNode = i == 0 || randomBoolean(); - DiscoveryNode discoNode = generateDiscoNode(i, ingestNode); - discoNodes.put(discoNode); - if (ingestNode) { - numIngestNodes++; - } - } - ClusterState state = ClusterState.builder(new ClusterName("_name")).nodes(discoNodes).build(); + ClusterState state = ClusterState.builder(new ClusterName("_name")).nodes(generateDiscoNodes(numNodes)).build(); when(clusterService.state()).thenReturn(state); - final int finalNumIngestNodes = numIngestNodes; doAnswer(mock -> { TransportResponseHandler handler = (TransportResponseHandler) mock.getArguments()[3]; handler.handleException(new TransportException("test failure")); - for (int i = 1; i < finalNumIngestNodes; i++) { + for (int i = 1; i < numNodes; i++) { if (randomBoolean()) { handler.handleResponse(new ReloadPipelinesAction.ReloadPipelinesResponse()); } else { @@ -118,44 +93,13 @@ public class ReloadPipelinesActionTests extends ESTestCase { reloadPipelinesAction.reloadPipelinesOnAllNodes(result -> assertThat(result, is(false))); } - public void testNoIngestNodes() { - // expected exception if there are no nodes: - DiscoveryNodes discoNodes = DiscoveryNodes.builder() - .build(); - ClusterState state = ClusterState.builder(new ClusterName("_name")).nodes(discoNodes).build(); - when(clusterService.state()).thenReturn(state); - - try { - reloadPipelinesAction.reloadPipelinesOnAllNodes(result -> fail("shouldn't be invoked")); - fail("exception expected"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), equalTo("There are no ingest nodes in this cluster")); - } - - // expected exception if there are no ingest nodes: - discoNodes = DiscoveryNodes.builder() - .put(new DiscoveryNode("_name", "_id", new LocalTransportAddress("_id"), Collections.singletonMap("ingest", "false"), Version.CURRENT)) - .build(); - state = ClusterState.builder(new ClusterName("_name")).nodes(discoNodes).build(); - when(clusterService.state()).thenReturn(state); - - try { - reloadPipelinesAction.reloadPipelinesOnAllNodes(result -> fail("shouldn't be invoked")); - fail("exception expected"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), equalTo("There are no ingest nodes in this cluster")); + private static DiscoveryNodes.Builder generateDiscoNodes(int numNodes) { + DiscoveryNodes.Builder discoNodes = DiscoveryNodes.builder(); + for (int i = 0; i < numNodes; i++) { + String id = Integer.toString(i); + DiscoveryNode discoNode = new DiscoveryNode(id, id, new LocalTransportAddress(id), Collections.emptyMap(), Version.CURRENT); + discoNodes.put(discoNode); } + return discoNodes; } - - private DiscoveryNode generateDiscoNode(int index, boolean ingestNode) { - Map attributes; - if (ingestNode) { - attributes = Collections.singletonMap("ingest", "true"); - } else { - attributes = randomBoolean() ? Collections.emptyMap() : Collections.singletonMap("ingest", "false"); - } - String id = String.valueOf(index); - return new DiscoveryNode(id, id, new LocalTransportAddress(id), attributes, Version.CURRENT); - } - } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index e08fca8b134..202acd56beb 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -59,22 +59,20 @@ public class IngestPlugin extends Plugin { } public void onModule(IngestModule ingestModule) { - if (ingestEnabled) { - ingestModule.addProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); - ingestModule.addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); - ingestModule.addProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); - ingestModule.addProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); - ingestModule.addProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); - ingestModule.addProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); - ingestModule.addProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); - ingestModule.addProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); - ingestModule.addProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); - ingestModule.addProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); - ingestModule.addProcessor(LowercaseProcessor.TYPE, (environment, templateService) -> new LowercaseProcessor.Factory()); - ingestModule.addProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); - ingestModule.addProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); - ingestModule.addProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); - ingestModule.addProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); - } + ingestModule.addProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); + ingestModule.addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); + ingestModule.addProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); + ingestModule.addProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); + ingestModule.addProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); + ingestModule.addProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); + ingestModule.addProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); + ingestModule.addProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); + ingestModule.addProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); + ingestModule.addProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); + ingestModule.addProcessor(LowercaseProcessor.TYPE, (environment, templateService) -> new LowercaseProcessor.Factory()); + ingestModule.addProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); + ingestModule.addProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); + ingestModule.addProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); + ingestModule.addProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); } } diff --git a/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml b/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml index f470b3152bd..a8eb7861efc 100644 --- a/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml +++ b/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml @@ -1,7 +1,6 @@ --- -"Test ingest APIS fail when is disabled": +"Test ingest CRUD APIS work fine when node.ingest is set to false": - do: - catch: /ingest plugin is disabled, pipeline CRUD or simulate APIs cannot be used/ ingest.put_pipeline: id: "my_pipeline" body: > @@ -10,26 +9,36 @@ "processors": [ { "set" : { - "field" : "field", - "value": "valie" + "field" : "field2", + "value": "_value" } } ] } + - match: { _index: ".ingest" } + - match: { _type: "pipeline" } + - match: { _version: 1 } + - match: { _id: "my_pipeline" } - do: - catch: /ingest plugin is disabled, pipeline CRUD or simulate APIs cannot be used/ - ingest.delete_pipeline: - id: "my_pipeline" - - - do: - catch: /ingest plugin is disabled, pipeline CRUD or simulate APIs cannot be used/ ingest.get_pipeline: id: "my_pipeline" + - match: { my_pipeline._source.description: "_description" } + - match: { my_pipeline._version: 1 } - do: - catch: /ingest plugin is disabled, pipeline CRUD or simulate APIs cannot be used/ - ingest.simulate: + ingest.delete_pipeline: + id: "my_pipeline" + - match: { _index: ".ingest" } + - match: { _type: "pipeline" } + - match: { _version: 2 } + - match: { _id: "my_pipeline" } + - match: { found: true } + +--- +"Test ingest simulate API works fine when node.ingest is set to false": + - do: + ingest.put_pipeline: id: "my_pipeline" body: > { @@ -37,13 +46,38 @@ "processors": [ { "set" : { - "field" : "field", - "value": "valie" + "field" : "field2", + "value" : "_value" } } ] } + - match: { _id: "my_pipeline" } + - do: + ingest.simulate: + id: "my_pipeline" + body: > + { + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "foo": "bar" + } + } + ] + } + - length: { docs: 1 } + - match: { docs.0.doc._source.foo: "bar" } + - match: { docs.0.doc._source.field2: "_value" } + - length: { docs.0.doc._ingest: 1 } + - is_true: docs.0.doc._ingest.timestamp + +--- +"Test index api with pipeline id fails when node.ingest is set to false": - do: catch: /ingest plugin is disabled, cannot execute pipeline with id \[my_pipeline_1\]/ ingest.index: @@ -56,3 +90,22 @@ field2: "2", field3: "3" } + +--- +"Test bulk api with pipeline id fails when node.ingest is set to false": + - do: + catch: /ingest plugin is disabled, cannot execute pipeline with id \[my_pipeline_1\]/ + ingest.bulk: + pipeline: "my_pipeline_1" + body: + - index: + _index: test_index + _type: test_type + _id: test_id + - f1: v1 + - index: + _index: test_index + _type: test_type + _id: test_id2 + - f1: v2 + From 2803ae09dcbf3b7b170aa2b6758ddbdc48193791 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 13:25:25 +0100 Subject: [PATCH 171/347] addProcessor -> registerProcessor --- .../elasticsearch/ingest/IngestModule.java | 4 +-- .../ingest/ProcessorsRegistry.java | 2 +- .../elasticsearch/ingest/IngestClientIT.java | 2 +- .../ingest/ProcessorsRegistryTests.java | 6 ++-- .../plugin/ingest/IngestPlugin.java | 30 +++++++++---------- 5 files changed, 22 insertions(+), 22 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestModule.java b/core/src/main/java/org/elasticsearch/ingest/IngestModule.java index ad1f0afb123..92a375a37e8 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestModule.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestModule.java @@ -49,7 +49,7 @@ public class IngestModule extends AbstractModule { /** * Adds a processor factory under a specific type name. */ - public void addProcessor(String type, BiFunction> processorFactoryProvider) { - processorsRegistry.addProcessor(type, processorFactoryProvider); + public void registerProcessor(String type, BiFunction> processorFactoryProvider) { + processorsRegistry.registerProcessor(type, processorFactoryProvider); } } diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java index 2aa9a1238b1..3561d8079c9 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java +++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java @@ -35,7 +35,7 @@ public class ProcessorsRegistry { /** * Adds a processor factory under a specific name. */ - public void addProcessor(String name, BiFunction> processorFactoryProvider) { + public void registerProcessor(String name, BiFunction> processorFactoryProvider) { BiFunction> provider = processorFactoryProviders.putIfAbsent(name, processorFactoryProvider); if (provider != null) { throw new IllegalArgumentException("Processor factory already registered for name [" + name + "]"); diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 872180be17d..cbcf3ed8d31 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -240,7 +240,7 @@ public class IngestClientIT extends ESIntegTestCase { } public void onModule(IngestModule ingestModule) { - ingestModule.addProcessor("test", (environment, templateService) -> config -> + ingestModule.registerProcessor("test", (environment, templateService) -> config -> new TestProcessor("test", ingestDocument -> { ingestDocument.setFieldValue("processed", true); if (ingestDocument.getFieldValue("fail", Boolean.class)) { diff --git a/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java b/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java index 0a44a9e6576..2869fffbafc 100644 --- a/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java @@ -35,12 +35,12 @@ public class ProcessorsRegistryTests extends ESTestCase { public void testAddProcessor() { ProcessorsRegistry processorsRegistry = new ProcessorsRegistry(); TestProcessor.Factory factory1 = new TestProcessor.Factory(); - processorsRegistry.addProcessor("1", (environment, templateService) -> factory1); + processorsRegistry.registerProcessor("1", (environment, templateService) -> factory1); TestProcessor.Factory factory2 = new TestProcessor.Factory(); - processorsRegistry.addProcessor("2", (environment, templateService) -> factory2); + processorsRegistry.registerProcessor("2", (environment, templateService) -> factory2); TestProcessor.Factory factory3 = new TestProcessor.Factory(); try { - processorsRegistry.addProcessor("1", (environment, templateService) -> factory3); + processorsRegistry.registerProcessor("1", (environment, templateService) -> factory3); fail("addProcessor should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("Processor factory already registered for name [1]")); diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 202acd56beb..21b57b11e43 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -59,20 +59,20 @@ public class IngestPlugin extends Plugin { } public void onModule(IngestModule ingestModule) { - ingestModule.addProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); - ingestModule.addProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); - ingestModule.addProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); - ingestModule.addProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); - ingestModule.addProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); - ingestModule.addProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); - ingestModule.addProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); - ingestModule.addProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); - ingestModule.addProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); - ingestModule.addProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); - ingestModule.addProcessor(LowercaseProcessor.TYPE, (environment, templateService) -> new LowercaseProcessor.Factory()); - ingestModule.addProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); - ingestModule.addProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); - ingestModule.addProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); - ingestModule.addProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); + ingestModule.registerProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); + ingestModule.registerProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); + ingestModule.registerProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); + ingestModule.registerProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); + ingestModule.registerProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); + ingestModule.registerProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); + ingestModule.registerProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); + ingestModule.registerProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); + ingestModule.registerProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); + ingestModule.registerProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); + ingestModule.registerProcessor(LowercaseProcessor.TYPE, (environment, templateService) -> new LowercaseProcessor.Factory()); + ingestModule.registerProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); + ingestModule.registerProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); + ingestModule.registerProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); + ingestModule.registerProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); } } From 95bc0ed7a2b3d83ebb1c1ee8850904d8cc464da7 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 13:40:28 +0100 Subject: [PATCH 172/347] move constants to IngestActionFilter --- .../action/ingest/IngestActionFilter.java | 13 ++++++++----- .../action/ingest/IngestDisabledActionFilter.java | 5 ++--- .../ingest/core/ConfigurationUtils.java | 4 ---- .../rest/action/ingest/IngestRestFilter.java | 6 +++--- .../action/ingest/IngestActionFilterTests.java | 15 +++++++-------- .../org/elasticsearch/ingest/IngestClientIT.java | 8 ++++---- 6 files changed, 24 insertions(+), 27 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java index 41a8af45120..d1550c09767 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java @@ -33,7 +33,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.IngestBootstrapper; import org.elasticsearch.ingest.PipelineExecutionService; -import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.tasks.Task; import java.util.ArrayList; @@ -44,6 +43,10 @@ import java.util.Set; public final class IngestActionFilter extends AbstractComponent implements ActionFilter { + public static final String PIPELINE_ID_PARAM_CONTEXT_KEY = "__pipeline_id__"; + public static final String PIPELINE_ID_PARAM = "pipeline"; + static final String PIPELINE_ALREADY_PROCESSED = "ingest_already_processed"; + private final PipelineExecutionService executionService; @Inject @@ -54,9 +57,9 @@ public final class IngestActionFilter extends AbstractComponent implements Actio @Override public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { - String pipelineId = request.getFromContext(ConfigurationUtils.PIPELINE_ID_PARAM_CONTEXT_KEY); + String pipelineId = request.getFromContext(PIPELINE_ID_PARAM_CONTEXT_KEY); if (pipelineId == null) { - pipelineId = request.getHeader(ConfigurationUtils.PIPELINE_ID_PARAM); + pipelineId = request.getHeader(PIPELINE_ID_PARAM); if (pipelineId == null) { chain.proceed(task, action, request, listener); return; @@ -84,7 +87,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio // The IndexRequest has the same type on the node that receives the request and the node that // processes the primary action. This could lead to a pipeline being executed twice for the same // index request, hence this check - if (indexRequest.hasHeader(ConfigurationUtils.PIPELINE_ALREADY_PROCESSED)) { + if (indexRequest.hasHeader(PIPELINE_ALREADY_PROCESSED)) { chain.proceed(task, action, indexRequest, listener); return; } @@ -92,7 +95,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio logger.error("failed to execute pipeline [{}]", t, pipelineId); listener.onFailure(t); }, success -> { - indexRequest.putHeader(ConfigurationUtils.PIPELINE_ALREADY_PROCESSED, true); + indexRequest.putHeader(PIPELINE_ALREADY_PROCESSED, true); chain.proceed(task, action, indexRequest, listener); }); } diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java index bd62969ec16..0dd7b4a1eff 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java @@ -24,17 +24,16 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.tasks.Task; -import org.elasticsearch.ingest.core.ConfigurationUtils; public final class IngestDisabledActionFilter implements ActionFilter { @Override public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { - String pipelineId = request.getFromContext(ConfigurationUtils.PIPELINE_ID_PARAM_CONTEXT_KEY); + String pipelineId = request.getFromContext(IngestActionFilter.PIPELINE_ID_PARAM_CONTEXT_KEY); if (pipelineId != null) { failRequest(pipelineId); } - pipelineId = request.getHeader(ConfigurationUtils.PIPELINE_ID_PARAM); + pipelineId = request.getHeader(IngestActionFilter.PIPELINE_ID_PARAM); if (pipelineId != null) { failRequest(pipelineId); } diff --git a/core/src/main/java/org/elasticsearch/ingest/core/ConfigurationUtils.java b/core/src/main/java/org/elasticsearch/ingest/core/ConfigurationUtils.java index 5845a1299d3..c6204166908 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/ConfigurationUtils.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/ConfigurationUtils.java @@ -24,10 +24,6 @@ import java.util.Map; public final class ConfigurationUtils { - public static final String PIPELINE_ID_PARAM_CONTEXT_KEY = "__pipeline_id__"; - public static final String PIPELINE_ID_PARAM = "pipeline"; - public static final String PIPELINE_ALREADY_PROCESSED = "ingest_already_processed"; - private ConfigurationUtils() { } diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/IngestRestFilter.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/IngestRestFilter.java index 1c44ec323dc..d278a727dd9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/IngestRestFilter.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/IngestRestFilter.java @@ -19,8 +19,8 @@ package org.elasticsearch.rest.action.ingest; +import org.elasticsearch.action.ingest.IngestActionFilter; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestFilter; @@ -36,8 +36,8 @@ public class IngestRestFilter extends RestFilter { @Override public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { - if (request.hasParam(ConfigurationUtils.PIPELINE_ID_PARAM)) { - request.putInContext(ConfigurationUtils.PIPELINE_ID_PARAM_CONTEXT_KEY, request.param(ConfigurationUtils.PIPELINE_ID_PARAM)); + if (request.hasParam(IngestActionFilter.PIPELINE_ID_PARAM)) { + request.putInContext(IngestActionFilter.PIPELINE_ID_PARAM_CONTEXT_KEY, request.param(IngestActionFilter.PIPELINE_ID_PARAM)); } filterChain.continueProcessing(request, channel); } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java index 3e91e8b2d1a..f2d46dd7095 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java @@ -33,7 +33,6 @@ import org.elasticsearch.ingest.IngestBootstrapper; import org.elasticsearch.ingest.PipelineExecutionService; import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.ingest.core.CompoundProcessor; -import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.ingest.core.Processor; @@ -89,7 +88,7 @@ public class IngestActionFilterTests extends ESTestCase { Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id"); + indexRequest.putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -103,7 +102,7 @@ public class IngestActionFilterTests extends ESTestCase { Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putInContext(ConfigurationUtils.PIPELINE_ID_PARAM_CONTEXT_KEY, "_id"); + indexRequest.putInContext(IngestActionFilter.PIPELINE_ID_PARAM_CONTEXT_KEY, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -117,8 +116,8 @@ public class IngestActionFilterTests extends ESTestCase { Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id"); - indexRequest.putHeader(ConfigurationUtils.PIPELINE_ALREADY_PROCESSED, true); + indexRequest.putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id"); + indexRequest.putHeader(IngestActionFilter.PIPELINE_ALREADY_PROCESSED, true); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -132,7 +131,7 @@ public class IngestActionFilterTests extends ESTestCase { Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id"); + indexRequest.putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -154,7 +153,7 @@ public class IngestActionFilterTests extends ESTestCase { Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id"); + indexRequest.putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -196,7 +195,7 @@ public class IngestActionFilterTests extends ESTestCase { filter = new IngestActionFilter(Settings.EMPTY, bootstrapper); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id"); + bulkRequest.putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id"); int numRequest = scaledRandomIntBetween(8, 64); for (int i = 0; i < numRequest; i++) { if (rarely()) { diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index cbcf3ed8d31..8ffa53d2b86 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.ingest.IngestActionFilter; import org.elasticsearch.action.ingest.delete.DeletePipelineAction; import org.elasticsearch.action.ingest.delete.DeletePipelineRequestBuilder; import org.elasticsearch.action.ingest.get.GetPipelineAction; @@ -37,7 +38,6 @@ import org.elasticsearch.action.ingest.simulate.SimulatePipelineAction; import org.elasticsearch.action.ingest.simulate.SimulatePipelineRequestBuilder; import org.elasticsearch.action.ingest.simulate.SimulatePipelineResponse; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -151,7 +151,7 @@ public class IngestClientIT extends ESIntegTestCase { int numRequests = scaledRandomIntBetween(32, 128); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id"); + bulkRequest.putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id"); for (int i = 0; i < numRequests; i++) { IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i)); indexRequest.source("field", "value", "fail", i % 2 == 0); @@ -194,7 +194,7 @@ public class IngestClientIT extends ESIntegTestCase { assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id")); client().prepareIndex("test", "type", "1").setSource("field", "value", "fail", false) - .putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id") + .putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id") .get(); Map doc = client().prepareGet("test", "type", "1") @@ -204,7 +204,7 @@ public class IngestClientIT extends ESIntegTestCase { client().prepareBulk().add( client().prepareIndex("test", "type", "2").setSource("field", "value2", "fail", false) - ).putHeader(ConfigurationUtils.PIPELINE_ID_PARAM, "_id").get(); + ).putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id").get(); doc = client().prepareGet("test", "type", "2").get().getSourceAsMap(); assertThat(doc.get("field"), equalTo("value2")); assertThat(doc.get("processed"), equalTo(true)); From e6cc79a976caeff9c3f38055600b7358961b5317 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 7 Jan 2016 13:48:36 +0100 Subject: [PATCH 173/347] test: added ingest script context --- .../test/java/org/elasticsearch/script/FileScriptTests.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java index 987aef90bc3..37f2ebb6dd3 100644 --- a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java @@ -63,7 +63,8 @@ public class FileScriptTests extends ESTestCase { .put("script.engine." + MockScriptEngine.NAME + ".file.aggs", false) .put("script.engine." + MockScriptEngine.NAME + ".file.search", false) .put("script.engine." + MockScriptEngine.NAME + ".file.mapping", false) - .put("script.engine." + MockScriptEngine.NAME + ".file.update", false).build(); + .put("script.engine." + MockScriptEngine.NAME + ".file.update", false) + .put("script.engine." + MockScriptEngine.NAME + ".file.ingest", false).build(); ScriptService scriptService = makeScriptService(settings); Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null); for (ScriptContext context : ScriptContext.Standard.values()) { From 0bfe6de75c62ee778ec6a5705cb7bbf6196f1fc0 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 13:52:14 +0100 Subject: [PATCH 174/347] move all transport actions under same package org.elasticsearch.action.ingest --- .../elasticsearch/action/ActionModule.java | 16 +++++++------- .../{delete => }/DeletePipelineAction.java | 2 +- .../{delete => }/DeletePipelineRequest.java | 2 +- .../DeletePipelineRequestBuilder.java | 2 +- .../DeletePipelineTransportAction.java | 2 +- .../ingest/{get => }/GetPipelineAction.java | 2 +- .../ingest/{get => }/GetPipelineRequest.java | 2 +- .../{get => }/GetPipelineRequestBuilder.java | 2 +- .../ingest/{get => }/GetPipelineResponse.java | 2 +- .../{get => }/GetPipelineTransportAction.java | 2 +- .../ingest/{put => }/PutPipelineAction.java | 2 +- .../ingest/{put => }/PutPipelineRequest.java | 2 +- .../{put => }/PutPipelineRequestBuilder.java | 2 +- .../{put => }/PutPipelineTransportAction.java | 2 +- .../{reload => }/ReloadPipelinesAction.java | 2 +- .../SimulateDocumentResult.java | 2 +- .../SimulateDocumentSimpleResult.java | 2 +- .../SimulateDocumentVerboseResult.java | 2 +- .../SimulateExecutionService.java | 2 +- .../SimulatePipelineAction.java | 2 +- .../SimulatePipelineRequest.java | 2 +- .../SimulatePipelineRequestBuilder.java | 2 +- .../SimulatePipelineResponse.java | 2 +- .../SimulatePipelineTransportAction.java | 2 +- .../SimulateProcessorResult.java | 2 +- .../WriteableIngestDocument.java | 2 +- .../elasticsearch/ingest/PipelineStore.java | 6 ++--- .../ingest/RestDeletePipelineAction.java | 4 ++-- .../action/ingest/RestGetPipelineAction.java | 4 ++-- .../action/ingest/RestPutPipelineAction.java | 4 ++-- .../ingest/RestSimulatePipelineAction.java | 4 ++-- .../ReloadPipelinesActionTests.java | 2 +- .../SimulateDocumentSimpleResultTests.java | 3 ++- .../SimulateExecutionServiceTests.java | 6 ++++- .../SimulatePipelineRequestParsingTests.java | 5 +++-- .../SimulatePipelineResponseTests.java | 7 +++++- .../SimulateProcessorResultTests.java | 3 ++- .../WriteableIngestDocumentTests.java | 4 ++-- .../elasticsearch/ingest/IngestClientIT.java | 22 +++++++++---------- 39 files changed, 76 insertions(+), 64 deletions(-) rename core/src/main/java/org/elasticsearch/action/ingest/{delete => }/DeletePipelineAction.java (97%) rename core/src/main/java/org/elasticsearch/action/ingest/{delete => }/DeletePipelineRequest.java (97%) rename core/src/main/java/org/elasticsearch/action/ingest/{delete => }/DeletePipelineRequestBuilder.java (96%) rename core/src/main/java/org/elasticsearch/action/ingest/{delete => }/DeletePipelineTransportAction.java (97%) rename core/src/main/java/org/elasticsearch/action/ingest/{get => }/GetPipelineAction.java (97%) rename core/src/main/java/org/elasticsearch/action/ingest/{get => }/GetPipelineRequest.java (97%) rename core/src/main/java/org/elasticsearch/action/ingest/{get => }/GetPipelineRequestBuilder.java (96%) rename core/src/main/java/org/elasticsearch/action/ingest/{get => }/GetPipelineResponse.java (98%) rename core/src/main/java/org/elasticsearch/action/ingest/{get => }/GetPipelineTransportAction.java (98%) rename core/src/main/java/org/elasticsearch/action/ingest/{put => }/PutPipelineAction.java (97%) rename core/src/main/java/org/elasticsearch/action/ingest/{put => }/PutPipelineRequest.java (98%) rename core/src/main/java/org/elasticsearch/action/ingest/{put => }/PutPipelineRequestBuilder.java (97%) rename core/src/main/java/org/elasticsearch/action/ingest/{put => }/PutPipelineTransportAction.java (97%) rename core/src/main/java/org/elasticsearch/action/ingest/{reload => }/ReloadPipelinesAction.java (98%) rename core/src/main/java/org/elasticsearch/action/ingest/{simulate => }/SimulateDocumentResult.java (95%) rename core/src/main/java/org/elasticsearch/action/ingest/{simulate => }/SimulateDocumentSimpleResult.java (98%) rename core/src/main/java/org/elasticsearch/action/ingest/{simulate => }/SimulateDocumentVerboseResult.java (98%) rename core/src/main/java/org/elasticsearch/action/ingest/{simulate => }/SimulateExecutionService.java (98%) rename core/src/main/java/org/elasticsearch/action/ingest/{simulate => }/SimulatePipelineAction.java (96%) rename core/src/main/java/org/elasticsearch/action/ingest/{simulate => }/SimulatePipelineRequest.java (99%) rename core/src/main/java/org/elasticsearch/action/ingest/{simulate => }/SimulatePipelineRequestBuilder.java (97%) rename core/src/main/java/org/elasticsearch/action/ingest/{simulate => }/SimulatePipelineResponse.java (98%) rename core/src/main/java/org/elasticsearch/action/ingest/{simulate => }/SimulatePipelineTransportAction.java (98%) rename core/src/main/java/org/elasticsearch/action/ingest/{simulate => }/SimulateProcessorResult.java (98%) rename core/src/main/java/org/elasticsearch/action/ingest/{simulate => }/WriteableIngestDocument.java (98%) rename core/src/test/java/org/elasticsearch/action/ingest/{reload => }/ReloadPipelinesActionTests.java (98%) rename core/src/test/java/org/elasticsearch/action/ingest/{simulate => }/SimulateDocumentSimpleResultTests.java (95%) rename core/src/test/java/org/elasticsearch/action/ingest/{simulate => }/SimulateExecutionServiceTests.java (96%) rename core/src/test/java/org/elasticsearch/action/ingest/{simulate => }/SimulatePipelineRequestParsingTests.java (98%) rename core/src/test/java/org/elasticsearch/action/ingest/{simulate => }/SimulatePipelineResponseTests.java (94%) rename core/src/test/java/org/elasticsearch/action/ingest/{simulate => }/SimulateProcessorResultTests.java (96%) rename core/src/test/java/org/elasticsearch/action/ingest/{simulate => }/WriteableIngestDocumentTests.java (99%) diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 09a6dc6dceb..67d17a2cc0d 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -157,14 +157,14 @@ import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptAction; import org.elasticsearch.action.indexedscripts.put.TransportPutIndexedScriptAction; import org.elasticsearch.action.ingest.IngestActionFilter; import org.elasticsearch.action.ingest.IngestDisabledActionFilter; -import org.elasticsearch.action.ingest.delete.DeletePipelineAction; -import org.elasticsearch.action.ingest.delete.DeletePipelineTransportAction; -import org.elasticsearch.action.ingest.get.GetPipelineAction; -import org.elasticsearch.action.ingest.get.GetPipelineTransportAction; -import org.elasticsearch.action.ingest.put.PutPipelineAction; -import org.elasticsearch.action.ingest.put.PutPipelineTransportAction; -import org.elasticsearch.action.ingest.simulate.SimulatePipelineAction; -import org.elasticsearch.action.ingest.simulate.SimulatePipelineTransportAction; +import org.elasticsearch.action.ingest.DeletePipelineAction; +import org.elasticsearch.action.ingest.DeletePipelineTransportAction; +import org.elasticsearch.action.ingest.GetPipelineAction; +import org.elasticsearch.action.ingest.GetPipelineTransportAction; +import org.elasticsearch.action.ingest.PutPipelineAction; +import org.elasticsearch.action.ingest.PutPipelineTransportAction; +import org.elasticsearch.action.ingest.SimulatePipelineAction; +import org.elasticsearch.action.ingest.SimulatePipelineTransportAction; import org.elasticsearch.action.percolate.MultiPercolateAction; import org.elasticsearch.action.percolate.PercolateAction; import org.elasticsearch.action.percolate.TransportMultiPercolateAction; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineAction.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java similarity index 97% rename from core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java index a27340427e5..8456d7e0e6a 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.delete; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.Action; import org.elasticsearch.action.delete.DeleteResponse; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java similarity index 97% rename from core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineRequest.java rename to core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java index 9d1877aa404..3d958f886ac 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.delete; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java similarity index 96% rename from core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineRequestBuilder.java rename to core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java index 8f878dd4957..29563fa05d5 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.delete; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.delete.DeleteResponse; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java similarity index 97% rename from core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineTransportAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java index 1aba6e6decd..4f25a9d330c 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/delete/DeletePipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.delete; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.delete.DeleteResponse; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineAction.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java similarity index 97% rename from core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java index 8025eaebfcb..f6bc3d9a778 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.get; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java similarity index 97% rename from core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineRequest.java rename to core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java index 69c15112536..e0bfca6cac4 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.get; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java similarity index 96% rename from core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineRequestBuilder.java rename to core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java index 07099562695..c339603104e 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.get; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineResponse.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java similarity index 98% rename from core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineResponse.java rename to core/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java index ff2eafc8172..9a12f4b1d03 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineResponse.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.get; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java similarity index 98% rename from core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineTransportAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java index 01b401d1fee..471238e0587 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/get/GetPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.get; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineAction.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java similarity index 97% rename from core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java index ed36d067187..7f37009577e 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.put; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.Action; import org.elasticsearch.action.index.IndexResponse; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java similarity index 98% rename from core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineRequest.java rename to core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java index 9084632dd0a..3ee46a0f71f 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.put; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java similarity index 97% rename from core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineRequestBuilder.java rename to core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java index 848e20c38a9..f2b5a8d9e1c 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.put; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.index.IndexResponse; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java similarity index 97% rename from core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineTransportAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index 325f4b235d1..067a03d2ae4 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/put/PutPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.put; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexResponse; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesAction.java b/core/src/main/java/org/elasticsearch/action/ingest/ReloadPipelinesAction.java similarity index 98% rename from core/src/main/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/ReloadPipelinesAction.java index 8d38c08c4b6..452f3a3341f 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/ReloadPipelinesAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.reload; +package org.elasticsearch.action.ingest; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentResult.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentResult.java similarity index 95% rename from core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentResult.java rename to core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentResult.java index f817fe392e8..7e7682bc250 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentResult.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentResult.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentSimpleResult.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResult.java similarity index 98% rename from core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentSimpleResult.java rename to core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResult.java index 115a68eeac9..3249775a8e4 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentSimpleResult.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResult.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentVerboseResult.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java similarity index 98% rename from core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentVerboseResult.java rename to core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java index 08984ed0f03..2b119afb9d5 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentVerboseResult.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateExecutionService.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java similarity index 98% rename from core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateExecutionService.java rename to core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java index 8d3e96694d1..ccfb6526ab0 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateExecutionService.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.ingest.core.IngestDocument; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineAction.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java similarity index 96% rename from core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java index 7f92f0ea35a..c1d219a4190 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java similarity index 99% rename from core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequest.java rename to core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java index e0ba5574e4f..ccc51e7bdd7 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java similarity index 97% rename from core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequestBuilder.java rename to core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java index e77ed3fecc1..d2e259fd578 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineResponse.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java similarity index 98% rename from core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineResponse.java rename to core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java index 9a5706dd17d..7a9ab0b5f8b 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineResponse.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java similarity index 98% rename from core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineTransportAction.java rename to core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java index e12ac6f6a1d..3d5e02a9332 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateProcessorResult.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java similarity index 98% rename from core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateProcessorResult.java rename to core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java index d7612b0c3af..afa85b4c219 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/simulate/SimulateProcessorResult.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/simulate/WriteableIngestDocument.java b/core/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java similarity index 98% rename from core/src/main/java/org/elasticsearch/action/ingest/simulate/WriteableIngestDocument.java rename to core/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java index 532d2ff3e60..0f33f00faf4 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/simulate/WriteableIngestDocument.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java index 217ef42e4a2..7860a84adda 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -40,9 +40,9 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.action.ingest.delete.DeletePipelineRequest; -import org.elasticsearch.action.ingest.put.PutPipelineRequest; -import org.elasticsearch.action.ingest.reload.ReloadPipelinesAction; +import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.ReloadPipelinesAction; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.ingest.core.TemplateService; diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java index d5c258ff47d..994e0300407 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java @@ -22,8 +22,8 @@ package org.elasticsearch.rest.action.ingest; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.action.ingest.delete.DeletePipelineAction; -import org.elasticsearch.action.ingest.delete.DeletePipelineRequest; +import org.elasticsearch.action.ingest.DeletePipelineAction; +import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java index fed1f8fc448..47f41fc437b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java @@ -23,8 +23,8 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.action.ingest.get.GetPipelineAction; -import org.elasticsearch.action.ingest.get.GetPipelineRequest; +import org.elasticsearch.action.ingest.GetPipelineAction; +import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java index 2a36773bc79..b63b2eb44a7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java @@ -22,8 +22,8 @@ package org.elasticsearch.rest.action.ingest; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.action.ingest.put.PutPipelineAction; -import org.elasticsearch.action.ingest.put.PutPipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineAction; +import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java index 80f0d013b72..ed859e2a442 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java @@ -22,8 +22,8 @@ package org.elasticsearch.rest.action.ingest; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.action.ingest.simulate.SimulatePipelineAction; -import org.elasticsearch.action.ingest.simulate.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineAction; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesActionTests.java b/core/src/test/java/org/elasticsearch/action/ingest/ReloadPipelinesActionTests.java similarity index 98% rename from core/src/test/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesActionTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/ReloadPipelinesActionTests.java index b6f767323c1..8a0284d80cf 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/reload/ReloadPipelinesActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/ReloadPipelinesActionTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.ingest.reload; +package org.elasticsearch.action.ingest; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentSimpleResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java similarity index 95% rename from core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentSimpleResultTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java index 685db51c7cd..dc8f7fcb7b9 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateDocumentSimpleResultTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java @@ -17,8 +17,9 @@ * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; +import org.elasticsearch.action.ingest.SimulateDocumentSimpleResult; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.ingest.core.IngestDocument; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java similarity index 96% rename from core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateExecutionServiceTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java index 2189faa8749..e292b45c47a 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java @@ -17,8 +17,12 @@ * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; +import org.elasticsearch.action.ingest.SimulateDocumentResult; +import org.elasticsearch.action.ingest.SimulateDocumentSimpleResult; +import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult; +import org.elasticsearch.action.ingest.SimulateExecutionService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TestProcessor; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequestParsingTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java similarity index 98% rename from core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequestParsingTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java index f84cb550ed9..eabc1821d2a 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineRequestParsingTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java @@ -17,8 +17,9 @@ * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.ingest.TestProcessor; import org.elasticsearch.ingest.core.CompoundProcessor; @@ -36,7 +37,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.action.ingest.simulate.SimulatePipelineRequest.Fields; +import static org.elasticsearch.action.ingest.SimulatePipelineRequest.Fields; import static org.elasticsearch.ingest.core.IngestDocument.MetaData.ID; import static org.elasticsearch.ingest.core.IngestDocument.MetaData.INDEX; import static org.elasticsearch.ingest.core.IngestDocument.MetaData.TYPE; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineResponseTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java similarity index 94% rename from core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineResponseTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java index 65c9996d533..905baa86485 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulatePipelineResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java @@ -17,8 +17,13 @@ * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; +import org.elasticsearch.action.ingest.SimulateDocumentResult; +import org.elasticsearch.action.ingest.SimulateDocumentSimpleResult; +import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult; +import org.elasticsearch.action.ingest.SimulatePipelineResponse; +import org.elasticsearch.action.ingest.SimulateProcessorResult; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.ingest.core.IngestDocument; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateProcessorResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java similarity index 96% rename from core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateProcessorResultTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java index 312d6ec3a96..208d2534a4c 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/simulate/SimulateProcessorResultTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java @@ -17,8 +17,9 @@ * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; +import org.elasticsearch.action.ingest.SimulateProcessorResult; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.ingest.core.IngestDocument; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/simulate/WriteableIngestDocumentTests.java b/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java similarity index 99% rename from core/src/test/java/org/elasticsearch/action/ingest/simulate/WriteableIngestDocumentTests.java rename to core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java index 17a6e15bdf6..120825a7beb 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/simulate/WriteableIngestDocumentTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java @@ -17,12 +17,12 @@ * under the License. */ -package org.elasticsearch.action.ingest.simulate; +package org.elasticsearch.action.ingest; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.test.ESTestCase; import java.io.IOException; diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 8ffa53d2b86..2d7d2021838 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -26,17 +26,17 @@ import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.ingest.IngestActionFilter; -import org.elasticsearch.action.ingest.delete.DeletePipelineAction; -import org.elasticsearch.action.ingest.delete.DeletePipelineRequestBuilder; -import org.elasticsearch.action.ingest.get.GetPipelineAction; -import org.elasticsearch.action.ingest.get.GetPipelineRequestBuilder; -import org.elasticsearch.action.ingest.get.GetPipelineResponse; -import org.elasticsearch.action.ingest.put.PutPipelineAction; -import org.elasticsearch.action.ingest.put.PutPipelineRequestBuilder; -import org.elasticsearch.action.ingest.simulate.SimulateDocumentSimpleResult; -import org.elasticsearch.action.ingest.simulate.SimulatePipelineAction; -import org.elasticsearch.action.ingest.simulate.SimulatePipelineRequestBuilder; -import org.elasticsearch.action.ingest.simulate.SimulatePipelineResponse; +import org.elasticsearch.action.ingest.DeletePipelineAction; +import org.elasticsearch.action.ingest.DeletePipelineRequestBuilder; +import org.elasticsearch.action.ingest.GetPipelineAction; +import org.elasticsearch.action.ingest.GetPipelineRequestBuilder; +import org.elasticsearch.action.ingest.GetPipelineResponse; +import org.elasticsearch.action.ingest.PutPipelineAction; +import org.elasticsearch.action.ingest.PutPipelineRequestBuilder; +import org.elasticsearch.action.ingest.SimulateDocumentSimpleResult; +import org.elasticsearch.action.ingest.SimulatePipelineAction; +import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder; +import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.plugins.Plugin; From 0b066482052a6031c4c0802a3b8bdf40aad9c864 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 7 Jan 2016 14:00:24 +0100 Subject: [PATCH 175/347] template installation is async, so use assertBusy --- .../ingest/IngestTemplateTests.java | 21 +++---------------- 1 file changed, 3 insertions(+), 18 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java index d886b3f7294..22e2bbe97a8 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java @@ -37,34 +37,19 @@ import static org.mockito.Mockito.when; public class IngestTemplateTests extends ESSingleNodeTestCase { - private IngestBootstrapper bootstrapper; - @Override protected boolean resetNodeAfterTest() { return true; } - @Before - public void init() { - ThreadPool threadPool = mock(ThreadPool.class); - when(threadPool.executor(anyString())).thenReturn(Runnable::run); - Environment environment = mock(Environment.class); - ClusterService clusterService = mock(ClusterService.class); - TransportService transportService = mock(TransportService.class); - bootstrapper = new IngestBootstrapper( - Settings.EMPTY, threadPool, environment, clusterService, transportService, new ProcessorsRegistry() - ); - bootstrapper.setClient(client()); - } - public void testIngestIndexTemplateIsInstalled() throws Exception { - verifyIngestIndexTemplateExist(); + assertBusy(IngestTemplateTests::verifyIngestIndexTemplateExist); } public void testInstallTemplateAfterItHasBeenRemoved() throws Exception { - verifyIngestIndexTemplateExist(); + assertBusy(IngestTemplateTests::verifyIngestIndexTemplateExist); client().admin().indices().prepareDeleteTemplate(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME).get(); - verifyIngestIndexTemplateExist(); + assertBusy(IngestTemplateTests::verifyIngestIndexTemplateExist); } private static void verifyIngestIndexTemplateExist() { From eca15949697c789895a49a63e4c090a6a2a50636 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 14:26:14 +0100 Subject: [PATCH 176/347] start ingest thread pool only when node.ingest is set to true --- .../elasticsearch/action/ActionModule.java | 5 +- .../elasticsearch/ingest/IngestModule.java | 5 ++ .../elasticsearch/threadpool/ThreadPool.java | 5 +- .../threadpool/ThreadPoolTests.java | 64 +++++++++++++++++++ .../plugin/ingest/IngestPlugin.java | 7 -- 5 files changed, 75 insertions(+), 11 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 67d17a2cc0d..264d8d6d0e3 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -203,6 +203,7 @@ import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.ingest.IngestModule; import java.util.ArrayList; import java.util.HashMap; @@ -227,15 +228,13 @@ public class ActionModule extends AbstractModule { this.transportAction = transportAction; this.supportTransportActions = supportTransportActions; } - - } private final boolean ingestEnabled; private final boolean proxy; public ActionModule(Settings settings, boolean proxy) { - this.ingestEnabled = settings.getAsBoolean("node.ingest", false); + this.ingestEnabled = IngestModule.isIngestEnabled(settings); this.proxy = proxy; } diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestModule.java b/core/src/main/java/org/elasticsearch/ingest/IngestModule.java index 92a375a37e8..0c294941922 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestModule.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestModule.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.ingest.core.TemplateService; @@ -52,4 +53,8 @@ public class IngestModule extends AbstractModule { public void registerProcessor(String type, BiFunction> processorFactoryProvider) { processorsRegistry.registerProcessor(type, processorFactoryProvider); } + + public static boolean isIngestEnabled(Settings settings) { + return settings.getAsBoolean("node.ingest", false); + } } diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 1176f3fe49f..c9055f34258 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.util.concurrent.XRejectedExecutionHandler; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.ingest.IngestModule; import java.io.IOException; import java.util.ArrayList; @@ -236,7 +237,9 @@ public class ThreadPool extends AbstractComponent { add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.FORCE_MERGE).size(1)); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.FETCH_SHARD_STARTED).size(availableProcessors * 2).keepAlive("5m")); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.FETCH_SHARD_STORE).size(availableProcessors * 2).keepAlive("5m")); - add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.INGEST).size(availableProcessors).queueSize(200)); + if (IngestModule.isIngestEnabled(settings)) { + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.INGEST).size(availableProcessors).queueSize(200)); + } this.defaultExecutorTypeSettings = unmodifiableMap(defaultExecutorTypeSettings); diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java new file mode 100644 index 00000000000..7488de3fada --- /dev/null +++ b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.threadpool; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; + +public class ThreadPoolTests extends ESTestCase { + + public void testIngestThreadPoolNotStartedWithIngestDisabled() throws Exception { + Settings settings = Settings.builder().put("name", "test").put("node.ingest", false).build(); + ThreadPool threadPool = null; + try { + threadPool = new ThreadPool(settings); + for (ThreadPool.Info info : threadPool.info()) { + assertThat(info.getName(), not(equalTo("ingest"))); + } + } finally { + if (threadPool != null) { + terminate(threadPool); + } + } + } + + public void testIngestThreadPoolStartedWithIngestEnabled() throws Exception { + Settings settings = Settings.builder().put("name", "test").put("node.ingest", true).build(); + ThreadPool threadPool = null; + try { + threadPool = new ThreadPool(settings); + boolean ingestFound = false; + for (ThreadPool.Info info : threadPool.info()) { + if (info.getName().equals("ingest")) { + ingestFound = true; + break; + } + } + assertThat(ingestFound, equalTo(true)); + } finally { + if (threadPool != null) { + terminate(threadPool); + } + } + } +} diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 21b57b11e43..e9e944d8a92 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -19,7 +19,6 @@ package org.elasticsearch.plugin.ingest; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.IngestModule; import org.elasticsearch.ingest.processor.AppendProcessor; import org.elasticsearch.ingest.processor.ConvertProcessor; @@ -42,12 +41,6 @@ public class IngestPlugin extends Plugin { public static final String NAME = "ingest"; - private final boolean ingestEnabled; - - public IngestPlugin(Settings nodeSettings) { - this.ingestEnabled = nodeSettings.getAsBoolean("node.ingest", false); - } - @Override public String name() { return NAME; From ba97b7f7bdc1554c0d35f2e2d44e9695991d77e5 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 7 Jan 2016 14:35:58 +0100 Subject: [PATCH 177/347] Don't activate ingest on tribe nodes --- .../java/org/elasticsearch/ingest/IngestBootstrapper.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java index ec1e7a243d4..f0e3f28df82 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java @@ -69,7 +69,10 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl this.pipelineStore = new PipelineStore(settings, clusterService, transportService); this.pipelineExecutionService = new PipelineExecutionService(pipelineStore, threadPool); - clusterService.add(this); + boolean isNoTribeNode = settings.getByPrefix("tribe.").getAsMap().isEmpty(); + if (isNoTribeNode) { + clusterService.add(this); + } } // for testing: From fa9aab91ebb1db2911bb696fe7fbde806595d1af Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 7 Jan 2016 14:52:15 +0100 Subject: [PATCH 178/347] Ignore rejected execution exception --- .../ingest/IngestBootstrapper.java | 49 +++++++++++-------- 1 file changed, 29 insertions(+), 20 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java index f0e3f28df82..a52e746bc84 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java @@ -42,6 +42,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.io.InputStream; +import java.util.concurrent.RejectedExecutionException; /** * Instantiates and wires all the services that the ingest plugin will be needing. @@ -197,30 +198,38 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl } void startPipelineStore(MetaData metaData) { - threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { - try { - // Before we start the pipeline store we check if the index template exists, - // if it doesn't we add it. If for some reason this fails we will try again later, - // but the pipeline store won't start before that happened - if (isIngestTemplateInstallationRequired(metaData)) { - installIngestIndexTemplate(); + try { + threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { + try { + // Before we start the pipeline store we check if the index template exists, + // if it doesn't we add it. If for some reason this fails we will try again later, + // but the pipeline store won't start before that happened + if (isIngestTemplateInstallationRequired(metaData)) { + installIngestIndexTemplate(); + } + pipelineStore.start(); + } catch (Exception e) { + logger.warn("pipeline store failed to start, retrying...", e); + startPipelineStore(metaData); } - pipelineStore.start(); - } catch (Exception e) { - logger.warn("pipeline store failed to start, retrying...", e); - startPipelineStore(metaData); - } - }); + }); + } catch (RejectedExecutionException e) { + logger.debug("async pipeline store start failed", e); + } } void stopPipelineStore(String reason) { - threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { - try { - pipelineStore.stop(reason); - } catch (Exception e) { - logger.error("pipeline store stop failure", e); - } - }); + try { + threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { + try { + pipelineStore.stop(reason); + } catch (Exception e) { + logger.error("pipeline store stop failure", e); + } + }); + } catch (RejectedExecutionException e) { + logger.debug("async pipeline store stop failed", e); + } } } From 234371811d3ab14a45efffd6f6b5a127f59267e6 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 15:05:17 +0100 Subject: [PATCH 179/347] awaited fix a couple of index template tests till we have removed the ingest index template --- .../elasticsearch/indices/template/SimpleIndexTemplateIT.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index b32cfef76b6..d1cb2193b07 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -63,6 +63,8 @@ import static org.hamcrest.Matchers.nullValue; * */ public class SimpleIndexTemplateIT extends ESIntegTestCase { + + @AwaitsFix(bugUrl = "temporarily ignored till we have removed the ingest index template") public void testSimpleIndexTemplateTests() throws Exception { // clean all templates setup by the framework. client().admin().indices().prepareDeleteTemplate("*").get(); @@ -313,6 +315,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { } } + @AwaitsFix(bugUrl = "temporarily ignored till we have removed the ingest index template") public void testInvalidSettings() throws Exception { // clean all templates setup by the framework. client().admin().indices().prepareDeleteTemplate("*").get(); From c44b83dc29b7886ad1dc740cfe346c3e8e6abef4 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 7 Jan 2016 15:21:21 +0100 Subject: [PATCH 180/347] Also ignore rejected execution exception when retying restart --- .../org/elasticsearch/ingest/IngestBootstrapper.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java index a52e746bc84..0ce43a94e92 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java @@ -208,9 +208,13 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl installIngestIndexTemplate(); } pipelineStore.start(); - } catch (Exception e) { - logger.warn("pipeline store failed to start, retrying...", e); - startPipelineStore(metaData); + } catch (Exception e1) { + logger.warn("pipeline store failed to start, retrying...", e1); + try { + startPipelineStore(metaData); + } catch (RejectedExecutionException e2) { + logger.debug("async pipeline store start retry failed", e2); + } } }); } catch (RejectedExecutionException e) { From 0d70d92aa082f325f5e2f658009a1e4d0c9c856c Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 7 Jan 2016 15:25:35 +0100 Subject: [PATCH 181/347] Also ignore rejected execution exception when installing index template --- .../ingest/IngestBootstrapper.java | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java index 0ce43a94e92..e81e425e54c 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java @@ -156,13 +156,17 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl } void forkAndInstallIngestIndexTemplate() { - threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { - try { - installIngestIndexTemplate(); - } catch (IOException e) { - logger.debug("Failed to install .ingest index template", e); - } - }); + try { + threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { + try { + installIngestIndexTemplate(); + } catch (IOException e) { + logger.debug("Failed to install .ingest index template", e); + } + }); + } catch (RejectedExecutionException e) { + logger.debug("async fork and install template failed", e); + } } void installIngestIndexTemplate() throws IOException { From 694eeffe9288e57e7b39cc562a713aa9dabec56f Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 15:36:34 +0100 Subject: [PATCH 182/347] catch the right exception... --- .../elasticsearch/ingest/IngestBootstrapper.java | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java index e81e425e54c..8eba39c0159 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; @@ -42,7 +43,6 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.io.InputStream; -import java.util.concurrent.RejectedExecutionException; /** * Instantiates and wires all the services that the ingest plugin will be needing. @@ -164,7 +164,7 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl logger.debug("Failed to install .ingest index template", e); } }); - } catch (RejectedExecutionException e) { + } catch (EsRejectedExecutionException e) { logger.debug("async fork and install template failed", e); } } @@ -214,14 +214,10 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl pipelineStore.start(); } catch (Exception e1) { logger.warn("pipeline store failed to start, retrying...", e1); - try { - startPipelineStore(metaData); - } catch (RejectedExecutionException e2) { - logger.debug("async pipeline store start retry failed", e2); - } + startPipelineStore(metaData); } }); - } catch (RejectedExecutionException e) { + } catch (EsRejectedExecutionException e) { logger.debug("async pipeline store start failed", e); } } @@ -235,7 +231,7 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl logger.error("pipeline store stop failure", e); } }); - } catch (RejectedExecutionException e) { + } catch (EsRejectedExecutionException e) { logger.debug("async pipeline store stop failed", e); } } From 03fe38681e914074da02a828378ded38845695d9 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 15:51:52 +0100 Subject: [PATCH 183/347] renamed qa package o.e.plugin.ingest to o.e.ingest This way InternalTemplateService constructor can be set back to package private visibility --- .../action/ingest/PutPipelineTransportAction.java | 1 - .../java/org/elasticsearch/ingest/InternalTemplateService.java | 2 +- .../org/elasticsearch/{plugin => }/ingest/IngestPlugin.java | 2 +- .../src/test/java/org/elasticsearch/ingest/IngestRestIT.java | 1 - .../{plugin => }/ingest/AbstractMustacheTests.java | 2 +- .../{plugin => }/ingest/IngestDocumentMustacheIT.java | 2 +- .../{plugin => }/ingest/IngestMustacheRemoveProcessorIT.java | 3 +-- .../{plugin => }/ingest/IngestMustacheSetProcessorIT.java | 2 +- .../elasticsearch/{plugin => }/ingest/TemplateServiceIT.java | 2 +- .../{plugin => }/ingest/ValueSourceMustacheIT.java | 2 +- 10 files changed, 8 insertions(+), 11 deletions(-) rename plugins/ingest/src/main/java/org/elasticsearch/{plugin => }/ingest/IngestPlugin.java (99%) rename qa/ingest-with-mustache/src/test/java/org/elasticsearch/{plugin => }/ingest/AbstractMustacheTests.java (98%) rename qa/ingest-with-mustache/src/test/java/org/elasticsearch/{plugin => }/ingest/IngestDocumentMustacheIT.java (99%) rename qa/ingest-with-mustache/src/test/java/org/elasticsearch/{plugin => }/ingest/IngestMustacheRemoveProcessorIT.java (97%) rename qa/ingest-with-mustache/src/test/java/org/elasticsearch/{plugin => }/ingest/IngestMustacheSetProcessorIT.java (98%) rename qa/ingest-with-mustache/src/test/java/org/elasticsearch/{plugin => }/ingest/TemplateServiceIT.java (98%) rename qa/ingest-with-mustache/src/test/java/org/elasticsearch/{plugin => }/ingest/ValueSourceMustacheIT.java (98%) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index 067a03d2ae4..8f7da7eff07 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -31,7 +31,6 @@ import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; - public class PutPipelineTransportAction extends HandledTransportAction { private final PipelineStore pipelineStore; diff --git a/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java b/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java index 531ffccf888..b4b5ce88fcb 100644 --- a/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java +++ b/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java @@ -34,7 +34,7 @@ public class InternalTemplateService implements TemplateService { private final ScriptService scriptService; - public InternalTemplateService(ScriptService scriptService) { + InternalTemplateService(ScriptService scriptService) { this.scriptService = scriptService; } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestPlugin.java similarity index 99% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java rename to plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestPlugin.java index e9e944d8a92..0acad64396d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestPlugin.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.ingest.IngestModule; import org.elasticsearch.ingest.processor.AppendProcessor; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java index f6da5b541bb..1205aa2f8a2 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/AbstractMustacheTests.java similarity index 98% rename from qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java rename to qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/AbstractMustacheTests.java index 326571fb17b..57165e69fb6 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/AbstractMustacheTests.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/AbstractMustacheTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java similarity index 99% rename from qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java rename to qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java index 6a6c8712d6e..1b080fec7ed 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestDocumentMustacheIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ValueSource; diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheRemoveProcessorIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/IngestMustacheRemoveProcessorIT.java similarity index 97% rename from qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheRemoveProcessorIT.java rename to qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/IngestMustacheRemoveProcessorIT.java index dccabb28a57..e94765a4aad 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheRemoveProcessorIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/IngestMustacheRemoveProcessorIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.ingest.processor.RemoveProcessor; import org.hamcrest.CoreMatchers; @@ -35,5 +35,4 @@ public class IngestMustacheRemoveProcessorIT extends AbstractMustacheTests { RemoveProcessor processor = factory.create(config); assertThat(processor.getField().execute(Collections.singletonMap("var", "_value")), CoreMatchers.equalTo("field_value")); } - } diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/IngestMustacheSetProcessorIT.java similarity index 98% rename from qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java rename to qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/IngestMustacheSetProcessorIT.java index e37cd323364..68466795b74 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/IngestMustacheSetProcessorIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/IngestMustacheSetProcessorIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.ingest.core.IngestDocument; diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/TemplateServiceIT.java similarity index 98% rename from qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java rename to qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/TemplateServiceIT.java index ebd32d4c752..1d1579fe66a 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/TemplateServiceIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/TemplateServiceIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.ingest.core.TemplateService; diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java similarity index 98% rename from qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java rename to qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java index 967c51be27d..18085b94b04 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/plugin/ingest/ValueSourceMustacheIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ValueSource; From 79c13776618828d78103eb0bbd086355f26efdc0 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 16:16:14 +0100 Subject: [PATCH 184/347] [TEST] set node.ingest to true otherwise we may try to modify values for a threadpool that hasn't been started --- .../UpdateThreadPoolSettingsTests.java | 22 +++++++++++-------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index e1b1c4451c9..a29524a4699 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.threadpool.ThreadPool.Names; import java.lang.reflect.Field; import java.util.Arrays; import java.util.HashSet; +import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; @@ -46,6 +47,7 @@ import static org.hamcrest.Matchers.sameInstance; /** */ public class UpdateThreadPoolSettingsTests extends ESTestCase { + public void testCorrectThreadPoolTypePermittedInSettings() throws InterruptedException { String threadPoolName = randomThreadPoolName(); ThreadPool.ThreadPoolType correctThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPoolName); @@ -54,6 +56,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { threadPool = new ThreadPool(settingsBuilder() .put("name", "testCorrectThreadPoolTypePermittedInSettings") .put("threadpool." + threadPoolName + ".type", correctThreadPoolType.getType()) + .put("node.ingest", true) .build()); ThreadPool.Info info = info(threadPool, threadPoolName); if (ThreadPool.Names.SAME.equals(threadPoolName)) { @@ -77,6 +80,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { settingsBuilder() .put("name", "testThreadPoolCanNotOverrideThreadPoolType") .put("threadpool." + threadPoolName + ".type", incorrectThreadPoolType.getType()) + .put("node.ingest", true) .build()); terminate(threadPool); fail("expected IllegalArgumentException"); @@ -95,7 +99,8 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { ThreadPool.ThreadPoolType validThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPoolName); ThreadPool threadPool = null; try { - threadPool = new ThreadPool(settingsBuilder().put("name", "testUpdateSettingsCanNotChangeThreadPoolType").build()); + threadPool = new ThreadPool(settingsBuilder().put("name", "testUpdateSettingsCanNotChangeThreadPoolType") + .put("node.ingest", true).build()); ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); threadPool.setClusterSettings(clusterSettings); @@ -120,7 +125,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { ThreadPool threadPool = null; try { Settings nodeSettings = Settings.settingsBuilder() - .put("name", "testCachedExecutorType").build(); + .put("name", "testCachedExecutorType").put("node.ingest", true).build(); threadPool = new ThreadPool(nodeSettings); ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); threadPool.setClusterSettings(clusterSettings); @@ -171,7 +176,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { try { Settings nodeSettings = Settings.settingsBuilder() - .put("name", "testFixedExecutorType").build(); + .put("name", "testFixedExecutorType").put("node.ingest", true).build(); threadPool = new ThreadPool(nodeSettings); ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); threadPool.setClusterSettings(clusterSettings); @@ -226,7 +231,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { try { Settings nodeSettings = settingsBuilder() .put("threadpool." + threadPoolName + ".size", 10) - .put("name", "testScalingExecutorType").build(); + .put("name", "testScalingExecutorType").put("node.ingest", true).build(); threadPool = new ThreadPool(nodeSettings); ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); threadPool.setClusterSettings(clusterSettings); @@ -264,7 +269,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { try { Settings nodeSettings = Settings.settingsBuilder() .put("threadpool." + threadPoolName + ".queue_size", 1000) - .put("name", "testCachedExecutorType").build(); + .put("name", "testCachedExecutorType").put("node.ingest", true).build(); threadPool = new ThreadPool(nodeSettings); ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); threadPool.setClusterSettings(clusterSettings); @@ -301,7 +306,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { .put("threadpool.my_pool2.type", "fixed") .put("threadpool.my_pool2.size", "1") .put("threadpool.my_pool2.queue_size", "1") - .put("name", "testCustomThreadPool").build(); + .put("name", "testCustomThreadPool").put("node.ingest", true).build(); threadPool = new ThreadPool(nodeSettings); ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); threadPool.setClusterSettings(clusterSettings); @@ -393,11 +398,10 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { Set set = new HashSet<>(); set.addAll(Arrays.asList(ThreadPool.ThreadPoolType.values())); set.remove(ThreadPool.THREAD_POOL_TYPES.get(threadPoolName)); - ThreadPool.ThreadPoolType invalidThreadPoolType = randomFrom(set.toArray(new ThreadPool.ThreadPoolType[set.size()])); - return invalidThreadPoolType; + return randomFrom(set.toArray(new ThreadPool.ThreadPoolType[set.size()])); } private String randomThreadPool(ThreadPool.ThreadPoolType type) { - return randomFrom(ThreadPool.THREAD_POOL_TYPES.entrySet().stream().filter(t -> t.getValue().equals(type)).map(t -> t.getKey()).collect(Collectors.toList())); + return randomFrom(ThreadPool.THREAD_POOL_TYPES.entrySet().stream().filter(t -> t.getValue().equals(type)).map(Map.Entry::getKey).collect(Collectors.toList())); } } From adac314328376e0f5067dea6bcffde08ffd6b054 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 18:32:46 +0100 Subject: [PATCH 185/347] revert move of IngestPlugin class This was moved accidentally as part of a previous refactoring. --- .../org/elasticsearch/{ => plugin}/ingest/IngestPlugin.java | 2 +- .../src/test/java/org/elasticsearch/ingest/IngestRestIT.java | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) rename plugins/ingest/src/main/java/org/elasticsearch/{ => plugin}/ingest/IngestPlugin.java (99%) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java similarity index 99% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestPlugin.java rename to plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index 0acad64396d..e9e944d8a92 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest; +package org.elasticsearch.plugin.ingest; import org.elasticsearch.ingest.IngestModule; import org.elasticsearch.ingest.processor.AppendProcessor; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java index 1205aa2f8a2..f6da5b541bb 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java @@ -21,6 +21,7 @@ package org.elasticsearch.ingest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; From 9c3ebb83a7599f863a9d864c96bf8486973441b6 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Thu, 7 Jan 2016 23:12:44 -0500 Subject: [PATCH 186/347] Don't do DNS lookups from GeoIpProcessor There is no need to involve DNS in this! --- .../ingest/processor/GeoIpProcessor.java | 9 ++------- .../ingest/processor/GeoIpProcessorTests.java | 18 ++++++++++++++++++ 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java index 445853dccb3..8a192000714 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java @@ -30,6 +30,7 @@ import com.maxmind.geoip2.record.Location; import com.maxmind.geoip2.record.Subdivision; import org.apache.lucene.util.IOUtils; import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Processor; @@ -38,7 +39,6 @@ import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.net.InetAddress; -import java.net.UnknownHostException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.PathMatcher; @@ -78,12 +78,7 @@ public final class GeoIpProcessor implements Processor { @Override public void execute(IngestDocument ingestDocument) { String ip = ingestDocument.getFieldValue(sourceField, String.class); - final InetAddress ipAddress; - try { - ipAddress = InetAddress.getByName(ip); - } catch (UnknownHostException e) { - throw new RuntimeException(e); - } + final InetAddress ipAddress = InetAddresses.forString(ip); Map geoData; switch (dbReader.getMetadata().getDatabaseType()) { diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorTests.java b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorTests.java index 818e9054749..ee09354de8f 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorTests.java +++ b/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorTests.java @@ -30,6 +30,7 @@ import java.util.EnumSet; import java.util.HashMap; import java.util.Map; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class GeoIpProcessorTests extends ESTestCase { @@ -91,4 +92,21 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(geoData.size(), equalTo(0)); } + /** Don't silently do DNS lookups or anything trappy on bogus data */ + public void testInvalid() throws Exception { + InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb"); + GeoIpProcessor processor = new GeoIpProcessor("source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); + + Map document = new HashMap<>(); + document.put("source_field", "www.google.com"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + try { + processor.execute(ingestDocument); + fail("did not get expected exception"); + } catch (IllegalArgumentException expected) { + assertNotNull(expected.getMessage()); + assertThat(expected.getMessage(), containsString("not an IP string literal")); + } + } + } From e35e9bd736bcc9e2b62c86c18eae4659ea6300b0 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 7 Jan 2016 19:06:03 +0100 Subject: [PATCH 187/347] Removed index template and instead create `.ingest` with settings and mapping when we find out that the index is missing during the put pipeline call. --- .../ingest/IngestBootstrapper.java | 52 ---- .../elasticsearch/ingest/PipelineStore.java | 143 ++++++++++ .../org/elasticsearch/ingest/ingest.json | 31 -- .../ingest/IngestTemplateTests.java | 64 ----- .../ingest/PipelineStoreTests.java | 269 ++++++++++++++++++ 5 files changed, 412 insertions(+), 147 deletions(-) delete mode 100644 core/src/main/resources/org/elasticsearch/ingest/ingest.json delete mode 100644 core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java index 8eba39c0159..ad062daed3c 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java @@ -52,7 +52,6 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl static final String INGEST_INDEX_TEMPLATE_NAME = "ingest-template"; - private Client client; private final ThreadPool threadPool; private final Environment environment; private final PipelineStore pipelineStore; @@ -98,7 +97,6 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl @Inject public void setClient(Client client) { - this.client = client; pipelineStore.setClient(client); } @@ -118,13 +116,6 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl if (validClusterState(state) == false) { stopPipelineStore("cluster state invalid [" + state + "]"); } - // We always check if the index template still exist, - // because it may have been removed via an api call and - // this allows us to add it back immediately: - // (this method gets invoked on each cluster state update) - if (isIngestTemplateInstallationRequired(state.metaData())) { - forkAndInstallIngestIndexTemplate(); - } } else { if (validClusterState(state)) { startPipelineStore(state.metaData()); @@ -147,43 +138,6 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl } } - boolean isIngestTemplateInstallationRequired(MetaData metaData) { - if (metaData.getTemplates().containsKey(INGEST_INDEX_TEMPLATE_NAME)) { - logger.trace("not installing ingest index template, because it already is installed"); - return false; - } - return true; - } - - void forkAndInstallIngestIndexTemplate() { - try { - threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { - try { - installIngestIndexTemplate(); - } catch (IOException e) { - logger.debug("Failed to install .ingest index template", e); - } - }); - } catch (EsRejectedExecutionException e) { - logger.debug("async fork and install template failed", e); - } - } - - void installIngestIndexTemplate() throws IOException { - logger.debug("installing .ingest index template..."); - try (InputStream is = IngestBootstrapper.class.getResourceAsStream("ingest.json")) { - final byte[] template; - try (BytesStreamOutput out = new BytesStreamOutput()) { - Streams.copy(is, out); - template = out.bytes().toBytes(); - } - PutIndexTemplateRequest request = new PutIndexTemplateRequest(INGEST_INDEX_TEMPLATE_NAME); - request.source(template); - client.execute(PutIndexTemplateAction.INSTANCE, request).actionGet(); - logger.debug(".ingest index template has been installed"); - } - } - @Override protected void doStart() { } @@ -205,12 +159,6 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl try { threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { try { - // Before we start the pipeline store we check if the index template exists, - // if it doesn't we add it. If for some reason this fails we will try again later, - // but the pipeline store won't start before that happened - if (isIngestTemplateInstallationRequired(metaData)) { - installIngestIndexTemplate(); - } pipelineStore.start(); } catch (Exception e1) { logger.warn("pipeline store failed to start, retrying...", e1); diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java index 7860a84adda..afae8468abb 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -21,6 +21,8 @@ package org.elasticsearch.ingest; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequest; @@ -31,13 +33,17 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.SearchScrollIterator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.action.ingest.DeletePipelineRequest; @@ -66,8 +72,45 @@ public class PipelineStore extends AbstractComponent implements Closeable { public final static String INDEX = ".ingest"; public final static String TYPE = "pipeline"; + final static Settings INGEST_INDEX_SETTING = Settings.builder() + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put("index.mapper.dynamic", false) + .build(); + + final static String PIPELINE_MAPPING; + + static { + try { + PIPELINE_MAPPING = XContentFactory.jsonBuilder().startObject() + .field("dynamic", "strict") + .startObject("_all") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("processors") + .field("type", "object") + .field("enabled", false) + .field("dynamic", true) + .endObject() + .startObject("on_failure") + .field("type", "object") + .field("enabled", false) + .field("dynamic", true) + .endObject() + .startObject("description") + .field("type", "string") + .endObject() + .endObject() + .endObject().string(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + private Client client; private final TimeValue scrollTimeout; + private final ClusterService clusterService; private final ReloadPipelinesAction reloadPipelinesAction; private final Pipeline.Factory factory = new Pipeline.Factory(); private Map processorFactoryRegistry; @@ -77,6 +120,7 @@ public class PipelineStore extends AbstractComponent implements Closeable { public PipelineStore(Settings settings, ClusterService clusterService, TransportService transportService) { super(settings); + this.clusterService = clusterService; this.scrollTimeout = settings.getAsTime("ingest.pipeline.store.scroll.timeout", TimeValue.timeValueSeconds(30)); this.reloadPipelinesAction = new ReloadPipelinesAction(settings, this, clusterService, transportService); } @@ -139,6 +183,28 @@ public class PipelineStore extends AbstractComponent implements Closeable { throw new IllegalArgumentException("Invalid pipeline configuration", e); } + ClusterState state = clusterService.state(); + if (isIngestIndexPresent(state)) { + innerPut(request, listener); + } else { + CreateIndexRequest createIndexRequest = new CreateIndexRequest(INDEX); + createIndexRequest.settings(INGEST_INDEX_SETTING); + createIndexRequest.mapping(TYPE, PIPELINE_MAPPING); + client.admin().indices().create(createIndexRequest, new ActionListener() { + @Override + public void onResponse(CreateIndexResponse createIndexResponse) { + innerPut(request, listener); + } + + @Override + public void onFailure(Throwable e) { + listener.onFailure(e); + } + }); + } + } + + private void innerPut(PutPipelineRequest request, ActionListener listener) { IndexRequest indexRequest = new IndexRequest(request); indexRequest.index(PipelineStore.INDEX); indexRequest.type(PipelineStore.TYPE); @@ -245,6 +311,83 @@ public class PipelineStore extends AbstractComponent implements Closeable { } } + /** + * @param clusterState The cluster just to check whether the ingest index exists and the state of the ingest index + * @throws IllegalStateException If the ingest template exists, but is in an invalid state + * @return true when the ingest index exists and has the expected settings and mappings or returns + * false when the ingest index doesn't exists and needs to be created. + */ + boolean isIngestIndexPresent(ClusterState clusterState) throws IllegalStateException { + if (clusterState.getMetaData().hasIndex(INDEX)) { + IndexMetaData indexMetaData = clusterState.getMetaData().index(INDEX); + Settings indexSettings = indexMetaData.getSettings(); + int numberOfShards = indexSettings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, -1); + if (numberOfShards != 1) { + throw new IllegalStateException("illegal ingest index setting, [" + IndexMetaData.SETTING_NUMBER_OF_SHARDS + "] setting is [" + numberOfShards + "] while [1] is expected"); + } + int numberOfReplicas = indexSettings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, -1); + if (numberOfReplicas != 1) { + throw new IllegalStateException("illegal ingest index setting, [" + IndexMetaData.SETTING_NUMBER_OF_REPLICAS + "] setting is [" + numberOfReplicas + "] while [1] is expected"); + } + boolean dynamicMappings = indexSettings.getAsBoolean("index.mapper.dynamic", true); + if (dynamicMappings != false) { + throw new IllegalStateException("illegal ingest index setting, [index.mapper.dynamic] setting is [" + dynamicMappings + "] while [false] is expected"); + } + + if (indexMetaData.getMappings().size() != 1 && indexMetaData.getMappings().containsKey(TYPE) == false) { + throw new IllegalStateException("illegal ingest mappings, only [" + TYPE + "] mapping is allowed to exist in the " + INDEX +" index"); + } + + try { + Map pipelineMapping = indexMetaData.getMappings().get(TYPE).getSourceAsMap(); + String dynamicMapping = (String) XContentMapValues.extractValue("dynamic", pipelineMapping); + if ("strict".equals(dynamicMapping) == false) { + throw new IllegalStateException("illegal ingest mapping, pipeline mapping must be strict"); + } + Boolean allEnabled = (Boolean) XContentMapValues.extractValue("_all.enabled", pipelineMapping); + if (Boolean.FALSE.equals(allEnabled) == false) { + throw new IllegalStateException("illegal ingest mapping, _all field is enabled"); + } + + String processorsType = (String) XContentMapValues.extractValue("properties.processors.type", pipelineMapping); + if ("object".equals(processorsType) == false) { + throw new IllegalStateException("illegal ingest mapping, processors field's type is [" + processorsType + "] while [object] is expected"); + } + + Boolean processorsEnabled = (Boolean) XContentMapValues.extractValue("properties.processors.enabled", pipelineMapping); + if (Boolean.FALSE.equals(processorsEnabled) == false) { + throw new IllegalStateException("illegal ingest mapping, processors field enabled option is [true] while [false] is expected"); + } + + Boolean processorsDynamic = (Boolean) XContentMapValues.extractValue("properties.processors.dynamic", pipelineMapping); + if (Boolean.TRUE.equals(processorsDynamic) == false) { + throw new IllegalStateException("illegal ingest mapping, processors field dynamic option is [false] while [true] is expected"); + } + + String onFailureType = (String) XContentMapValues.extractValue("properties.on_failure.type", pipelineMapping); + if ("object".equals(onFailureType) == false) { + throw new IllegalStateException("illegal ingest mapping, on_failure field type option is [" + onFailureType + "] while [object] is expected"); + } + + Boolean onFailureEnabled = (Boolean) XContentMapValues.extractValue("properties.on_failure.enabled", pipelineMapping); + if (Boolean.FALSE.equals(onFailureEnabled) == false) { + throw new IllegalStateException("illegal ingest mapping, on_failure field enabled option is [true] while [false] is expected"); + } + + Boolean onFailureDynamic = (Boolean) XContentMapValues.extractValue("properties.on_failure.dynamic", pipelineMapping); + if (Boolean.TRUE.equals(onFailureDynamic) == false) { + throw new IllegalStateException("illegal ingest mapping, on_failure field dynamic option is [false] while [true] is expected"); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + return true; + } else { + return false; + } + } + + synchronized void start() throws Exception { if (started) { logger.debug("Pipeline already started"); diff --git a/core/src/main/resources/org/elasticsearch/ingest/ingest.json b/core/src/main/resources/org/elasticsearch/ingest/ingest.json deleted file mode 100644 index 46e1e184e14..00000000000 --- a/core/src/main/resources/org/elasticsearch/ingest/ingest.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "template": ".ingest", - "order": 2147483647, - "settings": { - "index.number_of_shards": 1, - "index.mapper.dynamic" : false - }, - "mappings": { - "pipeline": { - "dynamic" : "strict", - "_all" : { - "enabled" : false - }, - "properties": { - "processors": { - "type": "object", - "enabled" : false, - "dynamic" : true - }, - "on_failure": { - "type": "object", - "enabled" : false, - "dynamic" : true - }, - "description": { - "type": "string" - } - } - } - } -} \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java deleted file mode 100644 index 22e2bbe97a8..00000000000 --- a/core/src/test/java/org/elasticsearch/ingest/IngestTemplateTests.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest; - -import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.hamcrest.Matchers; -import org.junit.Before; - -import static org.mockito.Mockito.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class IngestTemplateTests extends ESSingleNodeTestCase { - - @Override - protected boolean resetNodeAfterTest() { - return true; - } - - public void testIngestIndexTemplateIsInstalled() throws Exception { - assertBusy(IngestTemplateTests::verifyIngestIndexTemplateExist); - } - - public void testInstallTemplateAfterItHasBeenRemoved() throws Exception { - assertBusy(IngestTemplateTests::verifyIngestIndexTemplateExist); - client().admin().indices().prepareDeleteTemplate(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME).get(); - assertBusy(IngestTemplateTests::verifyIngestIndexTemplateExist); - } - - private static void verifyIngestIndexTemplateExist() { - GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME).get(); - assertThat(response.getIndexTemplates().size(), Matchers.equalTo(1)); - assertThat(response.getIndexTemplates().get(0).getName(), Matchers.equalTo(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME)); - assertThat(response.getIndexTemplates().get(0).getOrder(), Matchers.equalTo(Integer.MAX_VALUE)); - assertThat(response.getIndexTemplates().get(0).getMappings().size(), Matchers.equalTo(1)); - assertThat(response.getIndexTemplates().get(0).getMappings().get(PipelineStore.TYPE), Matchers.notNullValue()); - } - -} diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index bca06268000..e44a3fb5718 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -19,16 +19,22 @@ package org.elasticsearch.ingest; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHits; @@ -145,6 +151,269 @@ public class PipelineStoreTests extends ESTestCase { assertThat(result.get(1).getPipeline().getId(), equalTo("bar")); } + public void testValidateIngestIndex() throws Exception { + // ingest index doesn't exist: + ClusterState state = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder()) + .build(); + assertThat(store.isIngestIndexPresent(state), equalTo(false)); + + // ingest index does exist and is valid: + IndexMetaData.Builder indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) + .settings(Settings.builder() + .put(PipelineStore.INGEST_INDEX_SETTING) + .put("index.version.created", Version.CURRENT) + ) + .putMapping(PipelineStore.TYPE, PipelineStore.PIPELINE_MAPPING); + state = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder().put(indexMetaData)) + .build(); + assertThat(store.isIngestIndexPresent(state), equalTo(true)); + + // fails, has dynamic mapping + indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) + .settings(Settings.builder() + .put(PipelineStore.INGEST_INDEX_SETTING) + .put("index.mapper.dynamic", true) + .put("index.version.created", Version.CURRENT) + ) + .putMapping(PipelineStore.TYPE, PipelineStore.PIPELINE_MAPPING); + state = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder().put(indexMetaData)) + .build(); + try { + store.isIngestIndexPresent(state); + fail("exception expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("illegal ingest index setting, [index.mapper.dynamic] setting is [true] while [false] is expected")); + } + + // fails, incorrect number of primary shards + indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) + .settings(Settings.builder() + .put(PipelineStore.INGEST_INDEX_SETTING) + .put("index.number_of_shards", 2) + .put("index.version.created", Version.CURRENT) + ) + .putMapping(PipelineStore.TYPE, PipelineStore.PIPELINE_MAPPING); + state = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder().put(indexMetaData)) + .build(); + try { + store.isIngestIndexPresent(state); + fail("exception expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("illegal ingest index setting, [index.number_of_shards] setting is [2] while [1] is expected")); + } + + // fails, incorrect number of replica shards + indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) + .settings(Settings.builder() + .put(PipelineStore.INGEST_INDEX_SETTING) + .put("index.number_of_replicas", 2) + .put("index.version.created", Version.CURRENT) + ) + .putMapping(PipelineStore.TYPE, PipelineStore.PIPELINE_MAPPING); + state = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder().put(indexMetaData)) + .build(); + try { + store.isIngestIndexPresent(state); + fail("exception expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("illegal ingest index setting, [index.number_of_replicas] setting is [2] while [1] is expected")); + } + + // fails not a strict mapping: + String mapping = XContentFactory.jsonBuilder().startObject() + .startObject("_all") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("processors") + .field("type", "object") + .field("enabled", false) + .field("dynamic", true) + .endObject() + .startObject("on_failure") + .field("type", "object") + .field("enabled", false) + .field("dynamic", true) + .endObject() + .startObject("description") + .field("type", "string") + .endObject() + .endObject() + .endObject().string(); + indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) + .settings(Settings.builder() + .put(PipelineStore.INGEST_INDEX_SETTING) + .put("index.version.created", Version.CURRENT) + ) + .putMapping(PipelineStore.TYPE, mapping); + state = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder().put(indexMetaData)) + .build(); + try { + store.isIngestIndexPresent(state); + fail("exception expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("illegal ingest mapping, pipeline mapping must be strict")); + } + + // fails _all field is enabled: + mapping = XContentFactory.jsonBuilder().startObject() + .field("dynamic", "strict") + .startObject("_all") + .field("enabled", true) + .endObject() + .startObject("properties") + .startObject("processors") + .field("type", "object") + .field("enabled", false) + .field("dynamic", true) + .endObject() + .startObject("on_failure") + .field("type", "object") + .field("enabled", false) + .field("dynamic", true) + .endObject() + .startObject("description") + .field("type", "string") + .endObject() + .endObject() + .endObject().string(); + indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) + .settings(Settings.builder() + .put(PipelineStore.INGEST_INDEX_SETTING) + .put("index.version.created", Version.CURRENT) + ) + .putMapping(PipelineStore.TYPE, mapping); + state = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder().put(indexMetaData)) + .build(); + try { + store.isIngestIndexPresent(state); + fail("exception expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("illegal ingest mapping, _all field is enabled")); + } + + // fails processor field not of type object: + mapping = XContentFactory.jsonBuilder().startObject() + .field("dynamic", "strict") + .startObject("_all") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("processors") + .field("type", "nested") + .field("enabled", false) + .field("dynamic", true) + .endObject() + .startObject("on_failure") + .field("type", "object") + .field("enabled", false) + .field("dynamic", true) + .endObject() + .startObject("description") + .field("type", "string") + .endObject() + .endObject() + .endObject().string(); + indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) + .settings(Settings.builder() + .put(PipelineStore.INGEST_INDEX_SETTING) + .put("index.version.created", Version.CURRENT) + ) + .putMapping(PipelineStore.TYPE, mapping); + state = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder().put(indexMetaData)) + .build(); + try { + store.isIngestIndexPresent(state); + fail("exception expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("illegal ingest mapping, processors field's type is [nested] while [object] is expected")); + } + + // fails processor field enabled option is true: + mapping = XContentFactory.jsonBuilder().startObject() + .field("dynamic", "strict") + .startObject("_all") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("processors") + .field("type", "object") + .field("enabled", true) + .field("dynamic", true) + .endObject() + .startObject("on_failure") + .field("type", "object") + .field("enabled", false) + .field("dynamic", true) + .endObject() + .startObject("description") + .field("type", "string") + .endObject() + .endObject() + .endObject().string(); + indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) + .settings(Settings.builder() + .put(PipelineStore.INGEST_INDEX_SETTING) + .put("index.version.created", Version.CURRENT) + ) + .putMapping(PipelineStore.TYPE, mapping); + state = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder().put(indexMetaData)) + .build(); + try { + store.isIngestIndexPresent(state); + fail("exception expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("illegal ingest mapping, processors field enabled option is [true] while [false] is expected")); + } + + // fails processor field dynamic option is false: + mapping = XContentFactory.jsonBuilder().startObject() + .field("dynamic", "strict") + .startObject("_all") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("processors") + .field("type", "object") + .field("enabled", false) + .field("dynamic", false) + .endObject() + .startObject("on_failure") + .field("type", "object") + .field("enabled", false) + .field("dynamic", true) + .endObject() + .startObject("description") + .field("type", "string") + .endObject() + .endObject() + .endObject().string(); + indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) + .settings(Settings.builder() + .put(PipelineStore.INGEST_INDEX_SETTING) + .put("index.version.created", Version.CURRENT) + ) + .putMapping(PipelineStore.TYPE, mapping); + state = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder().put(indexMetaData)) + .build(); + try { + store.isIngestIndexPresent(state); + fail("exception expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("illegal ingest mapping, processors field dynamic option is [false] while [true] is expected")); + } + } + static ActionFuture expectedSearchReponse(List hits) { return new PlainActionFuture() { From ae69d46f9243f0b4e684ead46073dce522a32593 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 18:57:02 +0100 Subject: [PATCH 188/347] move processors that have no deps to core, also move to core rest spec and tests and set node.inget to true by default --- .../elasticsearch/ingest/IngestModule.java | 28 ++++++- .../processor/AbstractStringProcessor.java | 0 .../ingest/processor/AppendProcessor.java | 0 .../ingest/processor/ConvertProcessor.java | 0 .../ingest/processor/DateFormat.java | 0 .../ingest/processor/DateProcessor.java | 0 .../ingest/processor/FailProcessor.java | 0 .../processor/FailProcessorException.java | 0 .../ingest/processor/GsubProcessor.java | 0 .../ingest/processor/JoinProcessor.java | 0 .../ingest/processor/LowercaseProcessor.java | 0 .../ingest/processor/RemoveProcessor.java | 0 .../ingest/processor/RenameProcessor.java | 0 .../ingest/processor/SetProcessor.java | 0 .../ingest/processor/SplitProcessor.java | 0 .../ingest/processor/TrimProcessor.java | 0 .../ingest/processor/UppercaseProcessor.java | 0 .../elasticsearch/ingest/IngestClientIT.java | 25 +----- .../AbstractStringProcessorTestCase.java | 0 .../AppendProcessorFactoryTests.java | 0 .../processor/AppendProcessorTests.java | 0 .../ConvertProcessorFactoryTests.java | 0 .../processor/ConvertProcessorTests.java | 0 .../ingest/processor/DateFormatTests.java | 0 .../processor/DateProcessorFactoryTests.java | 0 .../ingest/processor/DateProcessorTests.java | 0 .../processor/FailProcessorFactoryTests.java | 0 .../ingest/processor/FailProcessorTests.java | 0 .../processor/GsubProcessorFactoryTests.java | 0 .../ingest/processor/GsubProcessorTests.java | 0 .../processor/JoinProcessorFactoryTests.java | 0 .../ingest/processor/JoinProcessorTests.java | 0 .../LowercaseProcessorFactoryTests.java | 0 .../processor/LowercaseProcessorTests.java | 0 .../RemoveProcessorFactoryTests.java | 0 .../processor/RemoveProcessorTests.java | 0 .../RenameProcessorFactoryTests.java | 0 .../processor/RenameProcessorTests.java | 0 .../processor/SetProcessorFactoryTests.java | 0 .../ingest/processor/SetProcessorTests.java | 0 .../processor/SplitProcessorFactoryTests.java | 0 .../ingest/processor/SplitProcessorTests.java | 0 .../processor/TrimProcessorFactoryTests.java | 0 .../ingest/processor/TrimProcessorTests.java | 0 .../UppercaseProcessorFactoryTests.java | 0 .../processor/UppercaseProcessorTests.java | 0 .../UpdateThreadPoolSettingsTests.java | 15 ++-- plugins/ingest/build.gradle | 8 +- .../plugin/ingest/IngestPlugin.java | 26 ------ .../rest-api-spec/api/ingest.bulk.json | 56 ------------- .../rest-api-spec/api/ingest.index.json | 80 ------------------- .../ingest/{30_grok.yaml => 20_grok.yaml} | 6 +- ...processor.yaml => 30_geoip_processor.yaml} | 6 +- qa/ingest-disabled/build.gradle | 1 + .../ingest_mustache/10_ingest_disabled.yaml | 4 +- qa/ingest-with-mustache/build.gradle | 1 - .../10_pipeline_with_mustache_templates.yaml | 10 +-- .../resources/rest-api-spec/api/bulk.json | 4 + .../resources/rest-api-spec/api/index.json | 4 + .../api/ingest.delete_pipeline.json | 0 .../api/ingest.get_pipeline.json | 0 .../api/ingest.put_pipeline.json | 0 .../rest-api-spec/api/ingest.simulate.json | 0 .../rest-api-spec/test/ingest/10_crud.yaml | 18 +---- .../test/ingest/20_date_processor.yaml | 2 +- .../rest-api-spec/test/ingest/30_mutate.yaml | 4 +- .../test/ingest/40_simulate.yaml | 0 .../test/ingest/50_on_failure.yaml | 42 ++++------ .../rest-api-spec/test/ingest/60_fail.yaml | 4 +- 69 files changed, 77 insertions(+), 267 deletions(-) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java (100%) rename {plugins/ingest => core}/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java (100%) rename {plugins/ingest => core}/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java (100%) delete mode 100644 plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json delete mode 100644 plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json rename plugins/ingest/src/test/resources/rest-api-spec/test/ingest/{30_grok.yaml => 20_grok.yaml} (97%) rename plugins/ingest/src/test/resources/rest-api-spec/test/ingest/{40_geoip_processor.yaml => 30_geoip_processor.yaml} (98%) rename {plugins/ingest/src/test => rest-api-spec/src/main}/resources/rest-api-spec/api/ingest.delete_pipeline.json (100%) rename {plugins/ingest/src/test => rest-api-spec/src/main}/resources/rest-api-spec/api/ingest.get_pipeline.json (100%) rename {plugins/ingest/src/test => rest-api-spec/src/main}/resources/rest-api-spec/api/ingest.put_pipeline.json (100%) rename {plugins/ingest/src/test => rest-api-spec/src/main}/resources/rest-api-spec/api/ingest.simulate.json (100%) rename plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml => rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml (87%) rename plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_date_processor.yaml => rest-api-spec/src/main/resources/rest-api-spec/test/ingest/20_date_processor.yaml (97%) rename plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml => rest-api-spec/src/main/resources/rest-api-spec/test/ingest/30_mutate.yaml (98%) rename plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml => rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml (100%) rename plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_on_failure.yaml => rest-api-spec/src/main/resources/rest-api-spec/test/ingest/50_on_failure.yaml (68%) rename plugins/ingest/src/test/resources/rest-api-spec/test/ingest/90_fail.yaml => rest-api-spec/src/main/resources/rest-api-spec/test/ingest/60_fail.yaml (97%) diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestModule.java b/core/src/main/java/org/elasticsearch/ingest/IngestModule.java index 0c294941922..4dc77356183 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestModule.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestModule.java @@ -24,6 +24,19 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.ingest.core.TemplateService; +import org.elasticsearch.ingest.processor.AppendProcessor; +import org.elasticsearch.ingest.processor.ConvertProcessor; +import org.elasticsearch.ingest.processor.DateProcessor; +import org.elasticsearch.ingest.processor.FailProcessor; +import org.elasticsearch.ingest.processor.GsubProcessor; +import org.elasticsearch.ingest.processor.JoinProcessor; +import org.elasticsearch.ingest.processor.LowercaseProcessor; +import org.elasticsearch.ingest.processor.RemoveProcessor; +import org.elasticsearch.ingest.processor.RenameProcessor; +import org.elasticsearch.ingest.processor.SetProcessor; +import org.elasticsearch.ingest.processor.SplitProcessor; +import org.elasticsearch.ingest.processor.TrimProcessor; +import org.elasticsearch.ingest.processor.UppercaseProcessor; import org.elasticsearch.rest.action.ingest.IngestRestFilter; import java.util.function.BiFunction; @@ -38,6 +51,19 @@ public class IngestModule extends AbstractModule { public IngestModule() { this.processorsRegistry = new ProcessorsRegistry(); + registerProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); + registerProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); + registerProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); + registerProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); + registerProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); + registerProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); + registerProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); + registerProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); + registerProcessor(LowercaseProcessor.TYPE, (environment, templateService) -> new LowercaseProcessor.Factory()); + registerProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); + registerProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); + registerProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); + registerProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); } @Override @@ -55,6 +81,6 @@ public class IngestModule extends AbstractModule { } public static boolean isIngestEnabled(Settings settings) { - return settings.getAsBoolean("node.ingest", false); + return settings.getAsBoolean("node.ingest", true); } } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java b/core/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java rename to core/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java b/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java rename to core/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java similarity index 100% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java rename to core/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 2d7d2021838..7e1911dff08 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -25,19 +25,18 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.ingest.IngestActionFilter; import org.elasticsearch.action.ingest.DeletePipelineAction; import org.elasticsearch.action.ingest.DeletePipelineRequestBuilder; import org.elasticsearch.action.ingest.GetPipelineAction; import org.elasticsearch.action.ingest.GetPipelineRequestBuilder; import org.elasticsearch.action.ingest.GetPipelineResponse; +import org.elasticsearch.action.ingest.IngestActionFilter; import org.elasticsearch.action.ingest.PutPipelineAction; import org.elasticsearch.action.ingest.PutPipelineRequestBuilder; import org.elasticsearch.action.ingest.SimulateDocumentSimpleResult; import org.elasticsearch.action.ingest.SimulatePipelineAction; import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder; import org.elasticsearch.action.ingest.SimulatePipelineResponse; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -60,28 +59,6 @@ public class IngestClientIT extends ESIntegTestCase { return pluginList(IngestPlugin.class); } - @Override - protected Collection> transportClientPlugins() { - return nodePlugins(); - } - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put("node.ingest", true) - .build(); - } - - @Override - protected Settings externalClusterClientSettings() { - return Settings.builder() - .put(super.transportClientSettings()) - //TODO can we remove this? - .put("node.ingest", true) - .build(); - } - public void testSimulate() throws Exception { new PutPipelineRequestBuilder(client(), PutPipelineAction.INSTANCE) .setId("_id") diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java b/core/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java rename to core/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java similarity index 100% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java rename to core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java diff --git a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index a29524a4699..ef03181fea6 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -56,7 +56,6 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { threadPool = new ThreadPool(settingsBuilder() .put("name", "testCorrectThreadPoolTypePermittedInSettings") .put("threadpool." + threadPoolName + ".type", correctThreadPoolType.getType()) - .put("node.ingest", true) .build()); ThreadPool.Info info = info(threadPool, threadPoolName); if (ThreadPool.Names.SAME.equals(threadPoolName)) { @@ -80,7 +79,6 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { settingsBuilder() .put("name", "testThreadPoolCanNotOverrideThreadPoolType") .put("threadpool." + threadPoolName + ".type", incorrectThreadPoolType.getType()) - .put("node.ingest", true) .build()); terminate(threadPool); fail("expected IllegalArgumentException"); @@ -99,8 +97,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { ThreadPool.ThreadPoolType validThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPoolName); ThreadPool threadPool = null; try { - threadPool = new ThreadPool(settingsBuilder().put("name", "testUpdateSettingsCanNotChangeThreadPoolType") - .put("node.ingest", true).build()); + threadPool = new ThreadPool(settingsBuilder().put("name", "testUpdateSettingsCanNotChangeThreadPoolType").build()); ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); threadPool.setClusterSettings(clusterSettings); @@ -125,7 +122,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { ThreadPool threadPool = null; try { Settings nodeSettings = Settings.settingsBuilder() - .put("name", "testCachedExecutorType").put("node.ingest", true).build(); + .put("name", "testCachedExecutorType").build(); threadPool = new ThreadPool(nodeSettings); ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); threadPool.setClusterSettings(clusterSettings); @@ -176,7 +173,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { try { Settings nodeSettings = Settings.settingsBuilder() - .put("name", "testFixedExecutorType").put("node.ingest", true).build(); + .put("name", "testFixedExecutorType").build(); threadPool = new ThreadPool(nodeSettings); ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); threadPool.setClusterSettings(clusterSettings); @@ -231,7 +228,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { try { Settings nodeSettings = settingsBuilder() .put("threadpool." + threadPoolName + ".size", 10) - .put("name", "testScalingExecutorType").put("node.ingest", true).build(); + .put("name", "testScalingExecutorType").build(); threadPool = new ThreadPool(nodeSettings); ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); threadPool.setClusterSettings(clusterSettings); @@ -269,7 +266,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { try { Settings nodeSettings = Settings.settingsBuilder() .put("threadpool." + threadPoolName + ".queue_size", 1000) - .put("name", "testCachedExecutorType").put("node.ingest", true).build(); + .put("name", "testCachedExecutorType").build(); threadPool = new ThreadPool(nodeSettings); ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); threadPool.setClusterSettings(clusterSettings); @@ -306,7 +303,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { .put("threadpool.my_pool2.type", "fixed") .put("threadpool.my_pool2.size", "1") .put("threadpool.my_pool2.queue_size", "1") - .put("name", "testCustomThreadPool").put("node.ingest", true).build(); + .put("name", "testCustomThreadPool").build(); threadPool = new ThreadPool(nodeSettings); ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); threadPool.setClusterSettings(clusterSettings); diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index 6b869e15582..ca7e33e5a04 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -77,10 +77,4 @@ thirdPartyAudit.excludes = [ 'org.objectweb.asm.ClassWriter', 'org.objectweb.asm.MethodVisitor', 'org.objectweb.asm.Opcodes', -] - -integTest { - cluster { - systemProperty 'es.node.ingest', 'true' - } -} +] \ No newline at end of file diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index e9e944d8a92..d743e72a700 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -20,21 +20,8 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.ingest.IngestModule; -import org.elasticsearch.ingest.processor.AppendProcessor; -import org.elasticsearch.ingest.processor.ConvertProcessor; -import org.elasticsearch.ingest.processor.DateProcessor; -import org.elasticsearch.ingest.processor.FailProcessor; import org.elasticsearch.ingest.processor.GeoIpProcessor; import org.elasticsearch.ingest.processor.GrokProcessor; -import org.elasticsearch.ingest.processor.GsubProcessor; -import org.elasticsearch.ingest.processor.JoinProcessor; -import org.elasticsearch.ingest.processor.LowercaseProcessor; -import org.elasticsearch.ingest.processor.RemoveProcessor; -import org.elasticsearch.ingest.processor.RenameProcessor; -import org.elasticsearch.ingest.processor.SetProcessor; -import org.elasticsearch.ingest.processor.SplitProcessor; -import org.elasticsearch.ingest.processor.TrimProcessor; -import org.elasticsearch.ingest.processor.UppercaseProcessor; import org.elasticsearch.plugins.Plugin; public class IngestPlugin extends Plugin { @@ -54,18 +41,5 @@ public class IngestPlugin extends Plugin { public void onModule(IngestModule ingestModule) { ingestModule.registerProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); ingestModule.registerProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); - ingestModule.registerProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); - ingestModule.registerProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); - ingestModule.registerProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); - ingestModule.registerProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); - ingestModule.registerProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); - ingestModule.registerProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); - ingestModule.registerProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); - ingestModule.registerProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); - ingestModule.registerProcessor(LowercaseProcessor.TYPE, (environment, templateService) -> new LowercaseProcessor.Factory()); - ingestModule.registerProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); - ingestModule.registerProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); - ingestModule.registerProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); - ingestModule.registerProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); } } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json deleted file mode 100644 index ecd53ee496b..00000000000 --- a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.bulk.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "ingest.bulk": { - "documentation": "Copied from bulk in core to add the pipeline parameter to rest spec", - "methods": ["POST", "PUT"], - "url": { - "path": "/_bulk", - "paths": ["/_bulk", "/{index}/_bulk", "/{index}/{type}/_bulk"], - "parts": { - "index": { - "type" : "string", - "description" : "Default index for items which don't provide one" - }, - "type": { - "type" : "string", - "description" : "Default document type for items which don't provide one" - } - }, - "params": { - "consistency": { - "type" : "enum", - "options" : ["one", "quorum", "all"], - "description" : "Explicit write consistency setting for the operation" - }, - "refresh": { - "type" : "boolean", - "description" : "Refresh the index after performing the operation" - }, - "routing": { - "type" : "string", - "description" : "Specific routing value" - }, - "timeout": { - "type" : "time", - "description" : "Explicit operation timeout" - }, - "type": { - "type" : "string", - "description" : "Default document type for items which don't provide one" - }, - "fields": { - "type": "list", - "description" : "Default comma-separated list of fields to return in the response for updates" - }, - "pipeline" : { - "type" : "string", - "description" : "The pipeline id to preprocess incoming documents with" - } - } - }, - "body": { - "description" : "The operation definition and data (action-data pairs), separated by newlines", - "required" : true, - "serialize" : "bulk" - } - } -} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json b/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json deleted file mode 100644 index 7420f69e45e..00000000000 --- a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.index.json +++ /dev/null @@ -1,80 +0,0 @@ -{ - "ingest.index": { - "documentation": "Copied from index in core to add support for the pipeline parameter to rest spec", - "methods": ["POST", "PUT"], - "url": { - "path": "/{index}/{type}", - "paths": ["/{index}/{type}", "/{index}/{type}/{id}"], - "parts": { - "id": { - "type" : "string", - "description" : "Document ID" - }, - "index": { - "type" : "string", - "required" : true, - "description" : "The name of the index" - }, - "type": { - "type" : "string", - "required" : true, - "description" : "The type of the document" - } - }, - "params": { - "consistency": { - "type" : "enum", - "options" : ["one", "quorum", "all"], - "description" : "Explicit write consistency setting for the operation" - }, - "op_type": { - "type" : "enum", - "options" : ["index", "create"], - "default" : "index", - "description" : "Explicit operation type" - }, - "parent": { - "type" : "string", - "description" : "ID of the parent document" - }, - "refresh": { - "type" : "boolean", - "description" : "Refresh the index after performing the operation" - }, - "routing": { - "type" : "string", - "description" : "Specific routing value" - }, - "timeout": { - "type" : "time", - "description" : "Explicit operation timeout" - }, - "timestamp": { - "type" : "time", - "description" : "Explicit timestamp for the document" - }, - "ttl": { - "type" : "duration", - "description" : "Expiration time for the document" - }, - "version" : { - "type" : "number", - "description" : "Explicit version number for concurrency control" - }, - "version_type": { - "type" : "enum", - "options" : ["internal", "external", "external_gte", "force"], - "description" : "Specific version type" - }, - "pipeline" : { - "type" : "string", - "description" : "The pipeline id to preprocess incoming documents with" - } - } - }, - "body": { - "description" : "The document", - "required" : true - } - } -} diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_grok.yaml similarity index 97% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml rename to plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_grok.yaml index e8a59d4ea65..70b1c8a9d40 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_grok.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_grok.yaml @@ -18,7 +18,7 @@ - match: { _id: "my_pipeline" } - do: - ingest.index: + index: index: test type: test id: 1 @@ -57,7 +57,7 @@ - match: { _id: "my_pipeline" } - do: - ingest.index: + index: index: test type: test id: 1 @@ -94,7 +94,7 @@ - match: { _id: "my_pipeline" } - do: - ingest.index: + index: index: test type: test id: 1 diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_geoip_processor.yaml similarity index 98% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml rename to plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_geoip_processor.yaml index d86eb25d4d4..91e0c7ac844 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/40_geoip_processor.yaml +++ b/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_geoip_processor.yaml @@ -17,7 +17,7 @@ - match: { _id: "my_pipeline" } - do: - ingest.index: + index: index: test type: test id: 1 @@ -61,7 +61,7 @@ - match: { _id: "my_pipeline" } - do: - ingest.index: + index: index: test type: test id: 1 @@ -106,7 +106,7 @@ - match: { _id: "my_pipeline" } - do: - ingest.index: + index: index: test type: test id: 1 diff --git a/qa/ingest-disabled/build.gradle b/qa/ingest-disabled/build.gradle index c74535e3789..6acfc38b378 100644 --- a/qa/ingest-disabled/build.gradle +++ b/qa/ingest-disabled/build.gradle @@ -26,5 +26,6 @@ dependencies { integTest { cluster { plugin 'ingest', project(':plugins:ingest') + systemProperty 'es.node.ingest', 'false' } } diff --git a/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml b/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml index a8eb7861efc..156455c0957 100644 --- a/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml +++ b/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml @@ -80,7 +80,7 @@ "Test index api with pipeline id fails when node.ingest is set to false": - do: catch: /ingest plugin is disabled, cannot execute pipeline with id \[my_pipeline_1\]/ - ingest.index: + index: index: test type: test id: 1 @@ -95,7 +95,7 @@ "Test bulk api with pipeline id fails when node.ingest is set to false": - do: catch: /ingest plugin is disabled, cannot execute pipeline with id \[my_pipeline_1\]/ - ingest.bulk: + bulk: pipeline: "my_pipeline_1" body: - index: diff --git a/qa/ingest-with-mustache/build.gradle b/qa/ingest-with-mustache/build.gradle index 8c0adbefaef..32ed5f8956f 100644 --- a/qa/ingest-with-mustache/build.gradle +++ b/qa/ingest-with-mustache/build.gradle @@ -27,6 +27,5 @@ dependencies { integTest { cluster { plugin 'ingest', project(':plugins:ingest') - systemProperty 'es.node.ingest', 'true' } } diff --git a/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml b/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml index 3421fc7fd00..e65e68fdd45 100644 --- a/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml +++ b/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml @@ -28,7 +28,7 @@ - match: { _id: "my_pipeline_1" } - do: - ingest.index: + index: index: test type: test id: 1 @@ -108,7 +108,7 @@ - match: { _id: "my_pipeline_3" } - do: - ingest.index: + index: index: test type: test id: 1 @@ -133,7 +133,7 @@ - match: { _source.metadata: ["0","1","2","3"] } - do: - ingest.index: + index: index: test type: test id: 1 @@ -152,7 +152,7 @@ - match: { _source.field2: "value" } - do: - ingest.index: + index: index: test type: test id: 1 @@ -201,7 +201,7 @@ - match: { _id: "my_handled_pipeline" } - do: - ingest.index: + index: index: test type: test id: 1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json b/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json index 577a03fd770..590054b04a4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json @@ -40,6 +40,10 @@ "fields": { "type": "list", "description" : "Default comma-separated list of fields to return in the response for updates" + }, + "pipeline" : { + "type" : "string", + "description" : "The pipeline id to preprocess incoming documents with" } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json index 1b8f7140dcd..5c13f67c212 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json @@ -65,6 +65,10 @@ "type" : "enum", "options" : ["internal", "external", "external_gte", "force"], "description" : "Specific version type" + }, + "pipeline" : { + "type" : "string", + "description" : "The pipeline id to preprocess incoming documents with" } } }, diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.delete_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_pipeline.json similarity index 100% rename from plugins/ingest/src/test/resources/rest-api-spec/api/ingest.delete_pipeline.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_pipeline.json diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.get_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json similarity index 100% rename from plugins/ingest/src/test/resources/rest-api-spec/api/ingest.get_pipeline.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.put_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_pipeline.json similarity index 100% rename from plugins/ingest/src/test/resources/rest-api-spec/api/ingest.put_pipeline.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_pipeline.json diff --git a/plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.simulate.json similarity index 100% rename from plugins/ingest/src/test/resources/rest-api-spec/api/ingest.simulate.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/ingest.simulate.json diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml similarity index 87% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml rename to rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml index 01b43cfefaa..5a62247df41 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml @@ -51,23 +51,7 @@ "description": "_description", "processors": [ { - "geoip" : { - } - } - ] - } - - - do: - catch: param - ingest.put_pipeline: - id: "my_pipeline" - body: > - { - "description": "_description", - "processors": [ - { - "geoip" : { - "ip_field" : 1234 + "set" : { } } ] diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_date_processor.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/20_date_processor.yaml similarity index 97% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_date_processor.yaml rename to rest-api-spec/src/main/resources/rest-api-spec/test/ingest/20_date_processor.yaml index 64354c8ac16..8852e5e5749 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/50_date_processor.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/20_date_processor.yaml @@ -20,7 +20,7 @@ - match: { _id: "my_pipeline" } - do: - ingest.index: + index: index: test type: test id: 1 diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/30_mutate.yaml similarity index 98% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml rename to rest-api-spec/src/main/resources/rest-api-spec/test/ingest/30_mutate.yaml index f1a324318e7..a0a29e9c050 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/60_mutate.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/30_mutate.yaml @@ -75,7 +75,7 @@ - match: { _id: "my_pipeline" } - do: - ingest.index: + index: index: test type: test id: 1 @@ -133,7 +133,7 @@ - match: { _id: "my_pipeline" } - do: - ingest.index: + index: index: test type: test id: 1 diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml similarity index 100% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/70_simulate.yaml rename to rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_on_failure.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/50_on_failure.yaml similarity index 68% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_on_failure.yaml rename to rest-api-spec/src/main/resources/rest-api-spec/test/ingest/50_on_failure.yaml index e28d5ddaf94..a01b0dacac0 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/80_on_failure.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/50_on_failure.yaml @@ -8,25 +8,20 @@ "description": "_description", "processors": [ { - "rename" : { - "field" : "foofield", - "to" : "field1" + "set" : { + "field" : "_executed", + "value" : true } }, { - "grok" : { - "field" : "field1", - "pattern" : "%{NUMBER:val} %{NUMBER:status} <%{WORD:msg}>" + "date" : { + "match_field" : "date", + "target_field" : "date", + "match_formats" : ["yyyy"] } } ], "on_failure" : [ - { - "grok" : { - "field" : "field1", - "pattern" : "%{NUMBER:val:float} %{NUMBER:status:int} <%{WORD:msg}>" - } - }, { "set" : { "field" : "_failed", @@ -38,21 +33,20 @@ - match: { _id: "my_pipeline" } - do: - ingest.index: + index: index: test type: test id: 1 pipeline: "my_pipeline" - body: {field1: "123.42 400 "} + body: {field1: "value1"} - do: get: index: test type: test id: 1 - - match: { _source.val: 123.42 } - - match: { _source.status: 400 } - - match: { _source.msg: "foo" } + - match: { _source.field1: "value1" } + - match: { _source._executed: true } - match: { _source._failed: true } --- @@ -64,12 +58,6 @@ { "description": "_description", "processors": [ - { - "grok" : { - "field" : "field1", - "pattern" : "%{NUMBER:val:float} %{NUMBER:status:int} <%{WORD:msg}>" - } - }, { "rename" : { "field" : "foofield", @@ -103,20 +91,18 @@ - match: { _id: "my_pipeline" } - do: - ingest.index: + index: index: test type: test id: 1 pipeline: "my_pipeline" - body: {field1: "123.42 400 "} + body: {field1: "value1"} - do: get: index: test type: test id: 1 - - match: { _source.val: 123.42 } - - match: { _source.msg: "foo" } - - match: { _source.status: 400 } + - match: { _source.field1: "value1" } - match: { _source.foofield: "exists" } - match: { _source.foofield2: "ran" } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/90_fail.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/60_fail.yaml similarity index 97% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/90_fail.yaml rename to rest-api-spec/src/main/resources/rest-api-spec/test/ingest/60_fail.yaml index d416e84100a..d491a95686e 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/90_fail.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/60_fail.yaml @@ -18,7 +18,7 @@ - do: catch: request - ingest.index: + index: index: test type: test id: 1 @@ -52,7 +52,7 @@ - match: { _id: "my_pipeline" } - do: - ingest.index: + index: index: test type: test id: 1 From 8675784e3fbf45304f03698dbe66dac9452d562f Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 7 Jan 2016 18:59:14 +0100 Subject: [PATCH 189/347] adapt error message when node.ingest is set to false and we fail executing a pipeline --- .../action/ingest/IngestDisabledActionFilter.java | 2 +- .../test/ingest_mustache/10_ingest_disabled.yaml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java index 0dd7b4a1eff..3603b882e4b 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java @@ -52,7 +52,7 @@ public final class IngestDisabledActionFilter implements ActionFilter { } private static void failRequest(String pipelineId) { - throw new IllegalArgumentException("ingest plugin is disabled, cannot execute pipeline with id [" + pipelineId + "]"); + throw new IllegalArgumentException("node.ingest is set to false, cannot execute pipeline with id [" + pipelineId + "]"); } } diff --git a/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml b/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml index 156455c0957..957fbd29d2c 100644 --- a/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml +++ b/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml @@ -79,7 +79,7 @@ --- "Test index api with pipeline id fails when node.ingest is set to false": - do: - catch: /ingest plugin is disabled, cannot execute pipeline with id \[my_pipeline_1\]/ + catch: /node.ingest is set to false, cannot execute pipeline with id \[my_pipeline_1\]/ index: index: test type: test @@ -94,7 +94,7 @@ --- "Test bulk api with pipeline id fails when node.ingest is set to false": - do: - catch: /ingest plugin is disabled, cannot execute pipeline with id \[my_pipeline_1\]/ + catch: /node.ingest is set to false, cannot execute pipeline with id \[my_pipeline_1\]/ bulk: pipeline: "my_pipeline_1" body: From b9dd99636d67855767831088087b3d448d13fca7 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 8 Jan 2016 11:36:07 +0100 Subject: [PATCH 190/347] dynamic option is a string, not a boolean --- .../org/elasticsearch/ingest/PipelineStore.java | 13 +++++++------ .../elasticsearch/ingest/PipelineStoreTests.java | 16 ++++++++-------- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java index afae8468abb..785eb5829bc 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -35,6 +35,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.SearchScrollIterator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; @@ -91,12 +92,12 @@ public class PipelineStore extends AbstractComponent implements Closeable { .startObject("processors") .field("type", "object") .field("enabled", false) - .field("dynamic", true) + .field("dynamic", "true") .endObject() .startObject("on_failure") .field("type", "object") .field("enabled", false) - .field("dynamic", true) + .field("dynamic", "true") .endObject() .startObject("description") .field("type", "string") @@ -359,8 +360,8 @@ public class PipelineStore extends AbstractComponent implements Closeable { throw new IllegalStateException("illegal ingest mapping, processors field enabled option is [true] while [false] is expected"); } - Boolean processorsDynamic = (Boolean) XContentMapValues.extractValue("properties.processors.dynamic", pipelineMapping); - if (Boolean.TRUE.equals(processorsDynamic) == false) { + String processorsDynamic = (String) XContentMapValues.extractValue("properties.processors.dynamic", pipelineMapping); + if ("true".equals(processorsDynamic) == false) { throw new IllegalStateException("illegal ingest mapping, processors field dynamic option is [false] while [true] is expected"); } @@ -374,8 +375,8 @@ public class PipelineStore extends AbstractComponent implements Closeable { throw new IllegalStateException("illegal ingest mapping, on_failure field enabled option is [true] while [false] is expected"); } - Boolean onFailureDynamic = (Boolean) XContentMapValues.extractValue("properties.on_failure.dynamic", pipelineMapping); - if (Boolean.TRUE.equals(onFailureDynamic) == false) { + String onFailureDynamic = (String) XContentMapValues.extractValue("properties.on_failure.dynamic", pipelineMapping); + if ("true".equals(onFailureDynamic) == false) { throw new IllegalStateException("illegal ingest mapping, on_failure field dynamic option is [false] while [true] is expected"); } } catch (IOException e) { diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index e44a3fb5718..57086cc02a9 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -271,12 +271,12 @@ public class PipelineStoreTests extends ESTestCase { .startObject("processors") .field("type", "object") .field("enabled", false) - .field("dynamic", true) + .field("dynamic", "true") .endObject() .startObject("on_failure") .field("type", "object") .field("enabled", false) - .field("dynamic", true) + .field("dynamic", "true") .endObject() .startObject("description") .field("type", "string") @@ -309,12 +309,12 @@ public class PipelineStoreTests extends ESTestCase { .startObject("processors") .field("type", "nested") .field("enabled", false) - .field("dynamic", true) + .field("dynamic", "true") .endObject() .startObject("on_failure") .field("type", "object") .field("enabled", false) - .field("dynamic", true) + .field("dynamic", "true") .endObject() .startObject("description") .field("type", "string") @@ -347,12 +347,12 @@ public class PipelineStoreTests extends ESTestCase { .startObject("processors") .field("type", "object") .field("enabled", true) - .field("dynamic", true) + .field("dynamic", "true") .endObject() .startObject("on_failure") .field("type", "object") .field("enabled", false) - .field("dynamic", true) + .field("dynamic", "true") .endObject() .startObject("description") .field("type", "string") @@ -385,12 +385,12 @@ public class PipelineStoreTests extends ESTestCase { .startObject("processors") .field("type", "object") .field("enabled", false) - .field("dynamic", false) + .field("dynamic", "false") .endObject() .startObject("on_failure") .field("type", "object") .field("enabled", false) - .field("dynamic", true) + .field("dynamic", "true") .endObject() .startObject("description") .field("type", "string") From de2eac4c49b178d89c05ed024e5274a1f47febad Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 8 Jan 2016 14:31:35 +0100 Subject: [PATCH 191/347] removed leftover jodatime dependency --- plugins/ingest/build.gradle | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index ca7e33e5a04..c9cfe531b83 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -33,7 +33,6 @@ dependencies { compile('com.fasterxml.jackson.core:jackson-databind:2.5.3') compile('com.maxmind.db:maxmind-db:1.0.1') - compile 'joda-time:joda-time:2.8.2' testCompile 'org.elasticsearch:geolite2-databases:20151029' } From 3e08c24991a79cb97286dacac8335f84fe1c6f6e Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 8 Jan 2016 14:31:57 +0100 Subject: [PATCH 192/347] clarify comment in TemplateService, explain why we have this class --- .../java/org/elasticsearch/ingest/core/TemplateService.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/core/TemplateService.java b/core/src/main/java/org/elasticsearch/ingest/core/TemplateService.java index df77453881c..8988c924c35 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/TemplateService.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/TemplateService.java @@ -21,7 +21,8 @@ package org.elasticsearch.ingest.core; import java.util.Map; /** - * Abstraction for the ingest template engine: allows to compile a template into a {@link Template} object. + * Abstraction for the ingest template engine used to decouple {@link IngestDocument} from {@link org.elasticsearch.script.ScriptService}. + * Allows to compile a template into an ingest {@link Template} object. * A compiled template can be executed by calling its {@link Template#execute(Map)} method. */ public interface TemplateService { From 1637fe9e0b184619279d92ff71b6ca494b3214b2 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 8 Jan 2016 14:23:53 +0100 Subject: [PATCH 193/347] Moved the grok processor to its own module, so that it will available out-of-the-box, while its dependencies are isolated --- modules/ingest-grok/build.gradle | 39 +++++++++++++++ .../licenses/jcodings-1.0.12.jar.sha1 | 1 + .../licenses/jcodings-LICENSE.txt | 0 .../ingest-grok}/licenses/jcodings-NOTICE.txt | 0 .../ingest-grok/licenses/joni-2.1.6.jar.sha1 | 1 + .../ingest-grok}/licenses/joni-LICENSE.txt | 0 .../ingest-grok}/licenses/joni-NOTICE.txt | 0 .../org/elasticsearch/ingest/grok}/Grok.java | 2 +- .../ingest/grok}/GrokMatchGroup.java | 2 +- .../ingest/grok}/GrokProcessor.java | 41 +++++++--------- .../ingest/grok/IngestGrokPlugin.java | 48 ++++++++++++++++++ .../src/main/resources}/patterns/aws | 0 .../src/main/resources}/patterns/bacula | 0 .../src/main/resources}/patterns/bro | 0 .../src/main/resources}/patterns/exim | 0 .../src/main/resources}/patterns/firewalls | 0 .../main/resources}/patterns/grok-patterns | 0 .../src/main/resources}/patterns/haproxy | 0 .../src/main/resources}/patterns/java | 0 .../src/main/resources}/patterns/junos | 0 .../src/main/resources}/patterns/linux-syslog | 0 .../resources}/patterns/mcollective-patterns | 0 .../src/main/resources}/patterns/mongodb | 0 .../src/main/resources}/patterns/nagios | 0 .../src/main/resources}/patterns/postgresql | 0 .../src/main/resources}/patterns/rails | 0 .../src/main/resources}/patterns/redis | 0 .../src/main/resources}/patterns/ruby | 0 .../grok}/GrokProcessorFactoryTests.java | 21 ++------ .../ingest/grok}/GrokProcessorTests.java | 6 ++- .../elasticsearch/ingest/grok}/GrokTests.java | 10 ++-- .../ingest/grok/IngestGrokRestIT.java | 49 +++++++++++++++++++ .../test/ingest_grok/10_basic.yaml | 12 +++++ .../test/ingest_grok}/20_grok.yaml | 0 plugins/ingest/build.gradle | 8 --- .../ingest/licenses/jcodings-1.0.12.jar.sha1 | 1 - plugins/ingest/licenses/joni-2.1.6.jar.sha1 | 1 - .../plugin/ingest/IngestPlugin.java | 2 - settings.gradle | 1 + 39 files changed, 184 insertions(+), 61 deletions(-) create mode 100644 modules/ingest-grok/build.gradle create mode 100644 modules/ingest-grok/licenses/jcodings-1.0.12.jar.sha1 rename {plugins/ingest => modules/ingest-grok}/licenses/jcodings-LICENSE.txt (100%) rename {plugins/ingest => modules/ingest-grok}/licenses/jcodings-NOTICE.txt (100%) create mode 100644 modules/ingest-grok/licenses/joni-2.1.6.jar.sha1 rename {plugins/ingest => modules/ingest-grok}/licenses/joni-LICENSE.txt (100%) rename {plugins/ingest => modules/ingest-grok}/licenses/joni-NOTICE.txt (100%) rename {plugins/ingest/src/main/java/org/elasticsearch/ingest/processor => modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok}/Grok.java (99%) rename {plugins/ingest/src/main/java/org/elasticsearch/ingest/processor => modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok}/GrokMatchGroup.java (97%) rename {plugins/ingest/src/main/java/org/elasticsearch/ingest/processor => modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok}/GrokProcessor.java (77%) create mode 100644 modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/aws (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/bacula (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/bro (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/exim (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/firewalls (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/grok-patterns (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/haproxy (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/java (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/junos (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/linux-syslog (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/mcollective-patterns (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/mongodb (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/nagios (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/postgresql (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/rails (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/redis (100%) rename {plugins/ingest/src/main/packaging/config/grok => modules/ingest-grok/src/main/resources}/patterns/ruby (100%) rename {plugins/ingest/src/test/java/org/elasticsearch/ingest/processor => modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok}/GrokProcessorFactoryTests.java (80%) rename {plugins/ingest/src/test/java/org/elasticsearch/ingest/processor => modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok}/GrokProcessorTests.java (96%) rename {plugins/ingest/src/test/java/org/elasticsearch/ingest/processor => modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok}/GrokTests.java (98%) create mode 100644 modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/IngestGrokRestIT.java create mode 100644 modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml rename {plugins/ingest/src/test/resources/rest-api-spec/test/ingest => modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok}/20_grok.yaml (100%) delete mode 100644 plugins/ingest/licenses/jcodings-1.0.12.jar.sha1 delete mode 100644 plugins/ingest/licenses/joni-2.1.6.jar.sha1 diff --git a/modules/ingest-grok/build.gradle b/modules/ingest-grok/build.gradle new file mode 100644 index 00000000000..26722345544 --- /dev/null +++ b/modules/ingest-grok/build.gradle @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +esplugin { + description 'Ingest processor that uses grok patterns to split text' + classname 'org.elasticsearch.ingest.grok.IngestGrokPlugin' +} + +dependencies { + compile 'org.jruby.joni:joni:2.1.6' + // joni dependencies: + compile 'org.jruby.jcodings:jcodings:1.0.12' +} + +compileJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked,-serial" +compileTestJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" + +thirdPartyAudit.excludes = [ + // joni has AsmCompilerSupport, but that isn't being used: + 'org.objectweb.asm.ClassWriter', + 'org.objectweb.asm.MethodVisitor', + 'org.objectweb.asm.Opcodes', +] \ No newline at end of file diff --git a/modules/ingest-grok/licenses/jcodings-1.0.12.jar.sha1 b/modules/ingest-grok/licenses/jcodings-1.0.12.jar.sha1 new file mode 100644 index 00000000000..b097e32ece4 --- /dev/null +++ b/modules/ingest-grok/licenses/jcodings-1.0.12.jar.sha1 @@ -0,0 +1 @@ +6bc17079fcaa8823ea8cd0d4c66516335b558db8 \ No newline at end of file diff --git a/plugins/ingest/licenses/jcodings-LICENSE.txt b/modules/ingest-grok/licenses/jcodings-LICENSE.txt similarity index 100% rename from plugins/ingest/licenses/jcodings-LICENSE.txt rename to modules/ingest-grok/licenses/jcodings-LICENSE.txt diff --git a/plugins/ingest/licenses/jcodings-NOTICE.txt b/modules/ingest-grok/licenses/jcodings-NOTICE.txt similarity index 100% rename from plugins/ingest/licenses/jcodings-NOTICE.txt rename to modules/ingest-grok/licenses/jcodings-NOTICE.txt diff --git a/modules/ingest-grok/licenses/joni-2.1.6.jar.sha1 b/modules/ingest-grok/licenses/joni-2.1.6.jar.sha1 new file mode 100644 index 00000000000..48abe138a8f --- /dev/null +++ b/modules/ingest-grok/licenses/joni-2.1.6.jar.sha1 @@ -0,0 +1 @@ +0f23c95a06eaecbc8c74c7458a8bfd13e4fd2d3a \ No newline at end of file diff --git a/plugins/ingest/licenses/joni-LICENSE.txt b/modules/ingest-grok/licenses/joni-LICENSE.txt similarity index 100% rename from plugins/ingest/licenses/joni-LICENSE.txt rename to modules/ingest-grok/licenses/joni-LICENSE.txt diff --git a/plugins/ingest/licenses/joni-NOTICE.txt b/modules/ingest-grok/licenses/joni-NOTICE.txt similarity index 100% rename from plugins/ingest/licenses/joni-NOTICE.txt rename to modules/ingest-grok/licenses/joni-NOTICE.txt diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Grok.java b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/Grok.java similarity index 99% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Grok.java rename to modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/Grok.java index bb88aaa3436..228a2cbab57 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/Grok.java +++ b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/Grok.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor; +package org.elasticsearch.ingest.grok; import org.jcodings.specific.UTF8Encoding; import org.joni.Matcher; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokMatchGroup.java b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokMatchGroup.java similarity index 97% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokMatchGroup.java rename to modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokMatchGroup.java index 3371a79c7e5..2cebf620c96 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokMatchGroup.java +++ b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokMatchGroup.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor; +package org.elasticsearch.ingest.grok; final class GrokMatchGroup { private static final String DEFAULT_TYPE = "string"; diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokProcessor.java b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java similarity index 77% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokProcessor.java rename to modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java index 5481a4dfffe..d38ea96131f 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GrokProcessor.java +++ b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor; +package org.elasticsearch.ingest.grok; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; @@ -28,10 +28,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; -import java.nio.file.DirectoryStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardOpenOption; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -71,12 +68,23 @@ public final class GrokProcessor implements Processor { return grok; } - public static class Factory implements Processor.Factory { + public final static class Factory implements Processor.Factory { - private final Path grokConfigDirectory; + private final static String[] PATTERN_NAMES = new String[] { + "aws", "bacula", "bro", "exim", "firewalls", "grok-patterns", "haproxy", + "java", "junos", "linux-syslog", "mcollective-patterns", "mongodb", "nagios", + "postgresql", "rails", "redis", "ruby" + }; + private final Map builtinPatternBank; - public Factory(Path configDirectory) { - this.grokConfigDirectory = configDirectory.resolve("ingest").resolve("grok"); + public Factory() throws IOException { + Map builtinPatterns = new HashMap<>(); + for (String pattern : PATTERN_NAMES) { + try(InputStream is = getClass().getResourceAsStream("/patterns/" + pattern)) { + loadBankFromStream(builtinPatterns, is); + } + } + this.builtinPatternBank = Collections.unmodifiableMap(builtinPatterns); } static void loadBankFromStream(Map patternBank, InputStream inputStream) throws IOException { @@ -99,20 +107,7 @@ public final class GrokProcessor implements Processor { String matchField = ConfigurationUtils.readStringProperty(config, "field"); String matchPattern = ConfigurationUtils.readStringProperty(config, "pattern"); Map customPatternBank = ConfigurationUtils.readOptionalMap(config, "pattern_definitions"); - - Map patternBank = new HashMap<>(); - - Path patternsDirectory = grokConfigDirectory.resolve("patterns"); - try (DirectoryStream stream = Files.newDirectoryStream(patternsDirectory)) { - for (Path patternFilePath : stream) { - if (Files.isRegularFile(patternFilePath)) { - try(InputStream is = Files.newInputStream(patternFilePath, StandardOpenOption.READ)) { - loadBankFromStream(patternBank, is); - } - } - } - } - + Map patternBank = new HashMap<>(builtinPatternBank); if (customPatternBank != null) { patternBank.putAll(customPatternBank); } diff --git a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java new file mode 100644 index 00000000000..2b61b5e9073 --- /dev/null +++ b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.grok; + +import org.elasticsearch.ingest.IngestModule; +import org.elasticsearch.plugins.Plugin; + +import java.io.IOException; + +public class IngestGrokPlugin extends Plugin { + + @Override + public String name() { + return "ingest-grok"; + } + + @Override + public String description() { + return "Ingest processor that uses grok patterns to split text"; + } + + public void onModule(IngestModule ingestModule) { + ingestModule.registerProcessor(GrokProcessor.TYPE, (environment, templateService) -> { + try { + return new GrokProcessor.Factory(); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } +} diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/aws b/modules/ingest-grok/src/main/resources/patterns/aws similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/aws rename to modules/ingest-grok/src/main/resources/patterns/aws diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/bacula b/modules/ingest-grok/src/main/resources/patterns/bacula similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/bacula rename to modules/ingest-grok/src/main/resources/patterns/bacula diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/bro b/modules/ingest-grok/src/main/resources/patterns/bro similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/bro rename to modules/ingest-grok/src/main/resources/patterns/bro diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/exim b/modules/ingest-grok/src/main/resources/patterns/exim similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/exim rename to modules/ingest-grok/src/main/resources/patterns/exim diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/firewalls b/modules/ingest-grok/src/main/resources/patterns/firewalls similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/firewalls rename to modules/ingest-grok/src/main/resources/patterns/firewalls diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/grok-patterns b/modules/ingest-grok/src/main/resources/patterns/grok-patterns similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/grok-patterns rename to modules/ingest-grok/src/main/resources/patterns/grok-patterns diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/haproxy b/modules/ingest-grok/src/main/resources/patterns/haproxy similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/haproxy rename to modules/ingest-grok/src/main/resources/patterns/haproxy diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/java b/modules/ingest-grok/src/main/resources/patterns/java similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/java rename to modules/ingest-grok/src/main/resources/patterns/java diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/junos b/modules/ingest-grok/src/main/resources/patterns/junos similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/junos rename to modules/ingest-grok/src/main/resources/patterns/junos diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/linux-syslog b/modules/ingest-grok/src/main/resources/patterns/linux-syslog similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/linux-syslog rename to modules/ingest-grok/src/main/resources/patterns/linux-syslog diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/mcollective-patterns b/modules/ingest-grok/src/main/resources/patterns/mcollective-patterns similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/mcollective-patterns rename to modules/ingest-grok/src/main/resources/patterns/mcollective-patterns diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/mongodb b/modules/ingest-grok/src/main/resources/patterns/mongodb similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/mongodb rename to modules/ingest-grok/src/main/resources/patterns/mongodb diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/nagios b/modules/ingest-grok/src/main/resources/patterns/nagios similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/nagios rename to modules/ingest-grok/src/main/resources/patterns/nagios diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/postgresql b/modules/ingest-grok/src/main/resources/patterns/postgresql similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/postgresql rename to modules/ingest-grok/src/main/resources/patterns/postgresql diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/rails b/modules/ingest-grok/src/main/resources/patterns/rails similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/rails rename to modules/ingest-grok/src/main/resources/patterns/rails diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/redis b/modules/ingest-grok/src/main/resources/patterns/redis similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/redis rename to modules/ingest-grok/src/main/resources/patterns/redis diff --git a/plugins/ingest/src/main/packaging/config/grok/patterns/ruby b/modules/ingest-grok/src/main/resources/patterns/ruby similarity index 100% rename from plugins/ingest/src/main/packaging/config/grok/patterns/ruby rename to modules/ingest-grok/src/main/resources/patterns/ruby diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorFactoryTests.java b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java similarity index 80% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorFactoryTests.java rename to modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java index 833228d18d7..11c1024eb82 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorFactoryTests.java +++ b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java @@ -17,14 +17,11 @@ * under the License. */ -package org.elasticsearch.ingest.processor; +package org.elasticsearch.ingest.grok; -import org.elasticsearch.ingest.processor.GrokProcessor; +import org.elasticsearch.ingest.grok.GrokProcessor; import org.elasticsearch.test.ESTestCase; -import org.junit.Before; -import java.nio.file.Files; -import java.nio.file.Path; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -34,18 +31,8 @@ import static org.hamcrest.Matchers.notNullValue; public class GrokProcessorFactoryTests extends ESTestCase { - private Path configDir; - - @Before - public void prepareConfigDirectory() throws Exception { - this.configDir = createTempDir(); - Path grokDir = configDir.resolve("ingest").resolve("grok"); - Path patternsDir = grokDir.resolve("patterns"); - Files.createDirectories(patternsDir); - } - public void testBuild() throws Exception { - GrokProcessor.Factory factory = new GrokProcessor.Factory(configDir); + GrokProcessor.Factory factory = new GrokProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "_field"); @@ -56,7 +43,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { } public void testCreateWithCustomPatterns() throws Exception { - GrokProcessor.Factory factory = new GrokProcessor.Factory(configDir); + GrokProcessor.Factory factory = new GrokProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "_field"); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorTests.java b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorTests.java similarity index 96% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorTests.java rename to modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorTests.java index ed9b8f6e621..bb2de7e2983 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokProcessorTests.java +++ b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorTests.java @@ -17,10 +17,12 @@ * under the License. */ -package org.elasticsearch.ingest.processor; +package org.elasticsearch.ingest.grok; -import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.grok.Grok; +import org.elasticsearch.ingest.grok.GrokProcessor; import org.elasticsearch.test.ESTestCase; import java.util.Collections; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokTests.java b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokTests.java similarity index 98% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokTests.java rename to modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokTests.java index 7565352885b..df9a7695d87 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GrokTests.java +++ b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokTests.java @@ -17,10 +17,10 @@ * under the License. */ -package org.elasticsearch.ingest.processor; +package org.elasticsearch.ingest.grok; -import org.elasticsearch.ingest.processor.Grok; -import org.elasticsearch.ingest.processor.GrokProcessor; +import org.elasticsearch.ingest.grok.Grok; +import org.elasticsearch.ingest.grok.GrokProcessor; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -52,8 +52,8 @@ public class GrokTests extends ESTestCase { @Before public void setup() throws IOException { basePatterns = newBankFromStreams( - getClass().getResourceAsStream("/grok/patterns/grok-patterns"), - getClass().getResourceAsStream("/grok/patterns/linux-syslog") + getClass().getResourceAsStream("/patterns/grok-patterns"), + getClass().getResourceAsStream("/patterns/linux-syslog") ); } diff --git a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/IngestGrokRestIT.java b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/IngestGrokRestIT.java new file mode 100644 index 00000000000..7073c4ede56 --- /dev/null +++ b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/IngestGrokRestIT.java @@ -0,0 +1,49 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.grok; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.ingest.grok.IngestGrokPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestCandidate; +import org.elasticsearch.test.rest.parser.RestTestParseException; + +import java.io.IOException; +import java.util.Collection; + +public class IngestGrokRestIT extends ESRestTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(IngestGrokPlugin.class); + } + + public IngestGrokRestIT(@Name("yaml") RestTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, RestTestParseException { + return ESRestTestCase.createParameters(0, 1); + } +} + diff --git a/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml b/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml new file mode 100644 index 00000000000..68d1fc649c7 --- /dev/null +++ b/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml @@ -0,0 +1,12 @@ +"Ingest grok installed": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - match: { nodes.$master.modules.0.name: ingest-grok } + - match: { nodes.$master.modules.0.jvm: true } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_grok.yaml b/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/20_grok.yaml similarity index 100% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/20_grok.yaml rename to modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/20_grok.yaml diff --git a/plugins/ingest/build.gradle b/plugins/ingest/build.gradle index c9cfe531b83..ecb6902cf8d 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest/build.gradle @@ -23,10 +23,6 @@ esplugin { } dependencies { - compile 'org.jruby.joni:joni:2.1.6' - // joni dependencies: - compile 'org.jruby.jcodings:jcodings:1.0.12' - compile ('com.maxmind.geoip2:geoip2:2.4.0') // geoip2 dependencies: compile('com.fasterxml.jackson.core:jackson-annotations:2.5.0') @@ -72,8 +68,4 @@ thirdPartyAudit.excludes = [ 'com.google.api.client.http.HttpResponseException', 'com.google.api.client.http.javanet.NetHttpTransport', 'com.google.api.client.http.javanet.NetHttpTransport', - // joni has AsmCompilerSupport, but that isn't being used: - 'org.objectweb.asm.ClassWriter', - 'org.objectweb.asm.MethodVisitor', - 'org.objectweb.asm.Opcodes', ] \ No newline at end of file diff --git a/plugins/ingest/licenses/jcodings-1.0.12.jar.sha1 b/plugins/ingest/licenses/jcodings-1.0.12.jar.sha1 deleted file mode 100644 index dac1d7a000a..00000000000 --- a/plugins/ingest/licenses/jcodings-1.0.12.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6bc17079fcaa8823ea8cd0d4c66516335b558db8 diff --git a/plugins/ingest/licenses/joni-2.1.6.jar.sha1 b/plugins/ingest/licenses/joni-2.1.6.jar.sha1 deleted file mode 100644 index 110752bb8ba..00000000000 --- a/plugins/ingest/licenses/joni-2.1.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0f23c95a06eaecbc8c74c7458a8bfd13e4fd2d3a diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java index d743e72a700..7a84fd5ecb0 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java @@ -21,7 +21,6 @@ package org.elasticsearch.plugin.ingest; import org.elasticsearch.ingest.IngestModule; import org.elasticsearch.ingest.processor.GeoIpProcessor; -import org.elasticsearch.ingest.processor.GrokProcessor; import org.elasticsearch.plugins.Plugin; public class IngestPlugin extends Plugin { @@ -40,6 +39,5 @@ public class IngestPlugin extends Plugin { public void onModule(IngestModule ingestModule) { ingestModule.registerProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); - ingestModule.registerProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(environment.configFile())); } } diff --git a/settings.gradle b/settings.gradle index 7eafddf8e43..fe38ba3ba33 100644 --- a/settings.gradle +++ b/settings.gradle @@ -14,6 +14,7 @@ List projects = [ 'modules:lang-expression', 'modules:lang-groovy', 'modules:lang-mustache', + 'modules:ingest-grok', 'plugins:analysis-icu', 'plugins:analysis-kuromoji', 'plugins:analysis-phonetic', From cd2155311f6e1e1ee22a5a3e24b20af42da741ab Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 8 Jan 2016 22:44:47 +0100 Subject: [PATCH 194/347] renamed ingest plugin to ingest-geoip plugin, since it only contains the geoip processor --- plugins/{ingest => ingest-geoip}/build.gradle | 18 +++++------------- .../licenses/geoip2-2.4.0.jar.sha1 | 0 .../licenses/geoip2-LICENSE.txt | 0 .../licenses/geoip2-NOTICE.txt | 0 .../jackson-annotations-2.5.0.jar.sha1 | 0 .../licenses/jackson-annotations-LICENSE | 0 .../licenses/jackson-annotations-NOTICE | 0 .../licenses/jackson-databind-2.5.3.jar.sha1 | 0 .../licenses/jackson-databind-LICENSE | 0 .../licenses/jackson-databind-NOTICE | 0 .../licenses/maxmind-db-1.0.1.jar.sha1 | 0 .../licenses/maxmind-db-LICENSE.txt | 0 .../licenses/maxmind-db-NOTICE.txt | 0 .../ingest/geoip}/GeoIpProcessor.java | 6 +++--- .../ingest/geoip/IngestGeoIpPlugin.java} | 9 +++------ .../plugin-metadata/plugin-security.policy | 0 .../geoip}/GeoIpProcessorFactoryTests.java | 5 ++--- .../ingest/geoip}/GeoIpProcessorTests.java | 2 +- .../ingest/geoip/IngestGeoIpRestIT.java} | 9 ++++----- .../test/ingest_geoip}/10_basic.yaml | 2 +- .../test/ingest_geoip/20_geoip_processor.yaml} | 0 qa/ingest-disabled/build.gradle | 5 ----- qa/ingest-with-mustache/build.gradle | 7 ------- settings.gradle | 2 +- 24 files changed, 20 insertions(+), 45 deletions(-) rename plugins/{ingest => ingest-geoip}/build.gradle (85%) rename plugins/{ingest => ingest-geoip}/licenses/geoip2-2.4.0.jar.sha1 (100%) rename plugins/{ingest => ingest-geoip}/licenses/geoip2-LICENSE.txt (100%) rename plugins/{ingest => ingest-geoip}/licenses/geoip2-NOTICE.txt (100%) rename plugins/{ingest => ingest-geoip}/licenses/jackson-annotations-2.5.0.jar.sha1 (100%) rename plugins/{ingest => ingest-geoip}/licenses/jackson-annotations-LICENSE (100%) rename plugins/{ingest => ingest-geoip}/licenses/jackson-annotations-NOTICE (100%) rename plugins/{ingest => ingest-geoip}/licenses/jackson-databind-2.5.3.jar.sha1 (100%) rename plugins/{ingest => ingest-geoip}/licenses/jackson-databind-LICENSE (100%) rename plugins/{ingest => ingest-geoip}/licenses/jackson-databind-NOTICE (100%) rename plugins/{ingest => ingest-geoip}/licenses/maxmind-db-1.0.1.jar.sha1 (100%) rename plugins/{ingest => ingest-geoip}/licenses/maxmind-db-LICENSE.txt (100%) rename plugins/{ingest => ingest-geoip}/licenses/maxmind-db-NOTICE.txt (100%) rename plugins/{ingest/src/main/java/org/elasticsearch/ingest/processor => ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip}/GeoIpProcessor.java (98%) rename plugins/{ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java => ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java} (85%) rename plugins/{ingest => ingest-geoip}/src/main/plugin-metadata/plugin-security.policy (100%) rename plugins/{ingest/src/test/java/org/elasticsearch/ingest/processor => ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip}/GeoIpProcessorFactoryTests.java (97%) rename plugins/{ingest/src/test/java/org/elasticsearch/ingest/processor => ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip}/GeoIpProcessorTests.java (99%) rename plugins/{ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java => ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java} (85%) rename plugins/{ingest/src/test/resources/rest-api-spec/test/ingest => ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip}/10_basic.yaml (66%) rename plugins/{ingest/src/test/resources/rest-api-spec/test/ingest/30_geoip_processor.yaml => ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml} (100%) diff --git a/plugins/ingest/build.gradle b/plugins/ingest-geoip/build.gradle similarity index 85% rename from plugins/ingest/build.gradle rename to plugins/ingest-geoip/build.gradle index ecb6902cf8d..7eee668793c 100644 --- a/plugins/ingest/build.gradle +++ b/plugins/ingest-geoip/build.gradle @@ -18,8 +18,8 @@ */ esplugin { - description 'Plugin that allows to configure pipelines to preprocess documents before indexing' - classname 'org.elasticsearch.plugin.ingest.IngestPlugin' + description 'Ingest processor that uses looksup geo data based on ip adresses using the Maxmind geo database' + classname 'org.elasticsearch.ingest.geoip.IngestGeoIpPlugin' } dependencies { @@ -32,17 +32,9 @@ dependencies { testCompile 'org.elasticsearch:geolite2-databases:20151029' } -sourceSets { - test { - resources { - srcDir "src/main/packaging/config" - } - } -} - task copyDefaultGeoIp2DatabaseFiles(type: Copy) { from { zipTree(configurations.testCompile.files.find { it.name.contains('geolite2-databases')}) } - into "${project.buildDir}/geoip" + into "${project.buildDir}/ingest-geoip" include "*.mmdb" } @@ -52,8 +44,8 @@ compileJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked,-serial" compileTestJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" bundlePlugin { - from("${project.buildDir}/geoip") { - into 'config/geoip' + from("${project.buildDir}/ingest-geoip") { + into 'config/' } } diff --git a/plugins/ingest/licenses/geoip2-2.4.0.jar.sha1 b/plugins/ingest-geoip/licenses/geoip2-2.4.0.jar.sha1 similarity index 100% rename from plugins/ingest/licenses/geoip2-2.4.0.jar.sha1 rename to plugins/ingest-geoip/licenses/geoip2-2.4.0.jar.sha1 diff --git a/plugins/ingest/licenses/geoip2-LICENSE.txt b/plugins/ingest-geoip/licenses/geoip2-LICENSE.txt similarity index 100% rename from plugins/ingest/licenses/geoip2-LICENSE.txt rename to plugins/ingest-geoip/licenses/geoip2-LICENSE.txt diff --git a/plugins/ingest/licenses/geoip2-NOTICE.txt b/plugins/ingest-geoip/licenses/geoip2-NOTICE.txt similarity index 100% rename from plugins/ingest/licenses/geoip2-NOTICE.txt rename to plugins/ingest-geoip/licenses/geoip2-NOTICE.txt diff --git a/plugins/ingest/licenses/jackson-annotations-2.5.0.jar.sha1 b/plugins/ingest-geoip/licenses/jackson-annotations-2.5.0.jar.sha1 similarity index 100% rename from plugins/ingest/licenses/jackson-annotations-2.5.0.jar.sha1 rename to plugins/ingest-geoip/licenses/jackson-annotations-2.5.0.jar.sha1 diff --git a/plugins/ingest/licenses/jackson-annotations-LICENSE b/plugins/ingest-geoip/licenses/jackson-annotations-LICENSE similarity index 100% rename from plugins/ingest/licenses/jackson-annotations-LICENSE rename to plugins/ingest-geoip/licenses/jackson-annotations-LICENSE diff --git a/plugins/ingest/licenses/jackson-annotations-NOTICE b/plugins/ingest-geoip/licenses/jackson-annotations-NOTICE similarity index 100% rename from plugins/ingest/licenses/jackson-annotations-NOTICE rename to plugins/ingest-geoip/licenses/jackson-annotations-NOTICE diff --git a/plugins/ingest/licenses/jackson-databind-2.5.3.jar.sha1 b/plugins/ingest-geoip/licenses/jackson-databind-2.5.3.jar.sha1 similarity index 100% rename from plugins/ingest/licenses/jackson-databind-2.5.3.jar.sha1 rename to plugins/ingest-geoip/licenses/jackson-databind-2.5.3.jar.sha1 diff --git a/plugins/ingest/licenses/jackson-databind-LICENSE b/plugins/ingest-geoip/licenses/jackson-databind-LICENSE similarity index 100% rename from plugins/ingest/licenses/jackson-databind-LICENSE rename to plugins/ingest-geoip/licenses/jackson-databind-LICENSE diff --git a/plugins/ingest/licenses/jackson-databind-NOTICE b/plugins/ingest-geoip/licenses/jackson-databind-NOTICE similarity index 100% rename from plugins/ingest/licenses/jackson-databind-NOTICE rename to plugins/ingest-geoip/licenses/jackson-databind-NOTICE diff --git a/plugins/ingest/licenses/maxmind-db-1.0.1.jar.sha1 b/plugins/ingest-geoip/licenses/maxmind-db-1.0.1.jar.sha1 similarity index 100% rename from plugins/ingest/licenses/maxmind-db-1.0.1.jar.sha1 rename to plugins/ingest-geoip/licenses/maxmind-db-1.0.1.jar.sha1 diff --git a/plugins/ingest/licenses/maxmind-db-LICENSE.txt b/plugins/ingest-geoip/licenses/maxmind-db-LICENSE.txt similarity index 100% rename from plugins/ingest/licenses/maxmind-db-LICENSE.txt rename to plugins/ingest-geoip/licenses/maxmind-db-LICENSE.txt diff --git a/plugins/ingest/licenses/maxmind-db-NOTICE.txt b/plugins/ingest-geoip/licenses/maxmind-db-NOTICE.txt similarity index 100% rename from plugins/ingest/licenses/maxmind-db-NOTICE.txt rename to plugins/ingest-geoip/licenses/maxmind-db-NOTICE.txt diff --git a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java similarity index 98% rename from plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java rename to plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 8a192000714..9f65c76c215 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/ingest/processor/GeoIpProcessor.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor; +package org.elasticsearch.ingest.geoip; import com.maxmind.geoip2.DatabaseReader; import com.maxmind.geoip2.exception.AddressNotFoundException; @@ -222,7 +222,7 @@ public final class GeoIpProcessor implements Processor { return geoData; } - public static class Factory implements Processor.Factory, Closeable { + public static final class Factory implements Processor.Factory, Closeable { static final Set DEFAULT_FIELDS = EnumSet.of( Field.CONTINENT_NAME, Field.COUNTRY_ISO_CODE, Field.REGION_NAME, Field.CITY_NAME, Field.LOCATION @@ -231,7 +231,7 @@ public final class GeoIpProcessor implements Processor { private final Map databaseReaders; public Factory(Path configDirectory) { - Path geoIpConfigDirectory = configDirectory.resolve("ingest").resolve("geoip"); + Path geoIpConfigDirectory = configDirectory.resolve("ingest-geoip"); if (Files.exists(geoIpConfigDirectory) == false && Files.isDirectory(geoIpConfigDirectory)) { throw new IllegalStateException("the geoip directory [" + geoIpConfigDirectory + "] containing databases doesn't exist"); } diff --git a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java similarity index 85% rename from plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java rename to plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 7a84fd5ecb0..81cecb76f5d 100644 --- a/plugins/ingest/src/main/java/org/elasticsearch/plugin/ingest/IngestPlugin.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -17,19 +17,16 @@ * under the License. */ -package org.elasticsearch.plugin.ingest; +package org.elasticsearch.ingest.geoip; import org.elasticsearch.ingest.IngestModule; -import org.elasticsearch.ingest.processor.GeoIpProcessor; import org.elasticsearch.plugins.Plugin; -public class IngestPlugin extends Plugin { - - public static final String NAME = "ingest"; +public class IngestGeoIpPlugin extends Plugin { @Override public String name() { - return NAME; + return "ingest-geoip"; } @Override diff --git a/plugins/ingest/src/main/plugin-metadata/plugin-security.policy b/plugins/ingest-geoip/src/main/plugin-metadata/plugin-security.policy similarity index 100% rename from plugins/ingest/src/main/plugin-metadata/plugin-security.policy rename to plugins/ingest-geoip/src/main/plugin-metadata/plugin-security.policy diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java similarity index 97% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorFactoryTests.java rename to plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index b19168ec920..78dd86d4fdc 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -17,9 +17,8 @@ * under the License. */ -package org.elasticsearch.ingest.processor; +package org.elasticsearch.ingest.geoip; -import org.elasticsearch.ingest.processor.GeoIpProcessor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; import org.junit.Before; @@ -46,7 +45,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { @Before public void prepareConfigDirectory() throws Exception { this.configDir = createTempDir(); - Path geoIpConfigDir = configDir.resolve("ingest").resolve("geoip"); + Path geoIpConfigDir = configDir.resolve("ingest-geoip"); Files.createDirectories(geoIpConfigDir); Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-City.mmdb")), geoIpConfigDir.resolve("GeoLite2-City.mmdb")); Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-Country.mmdb")), geoIpConfigDir.resolve("GeoLite2-Country.mmdb")); diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java similarity index 99% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorTests.java rename to plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index ee09354de8f..b3b41105157 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/processor/GeoIpProcessorTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest.processor; +package org.elasticsearch.ingest.geoip; import com.maxmind.geoip2.DatabaseReader; import org.elasticsearch.ingest.core.IngestDocument; diff --git a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java similarity index 85% rename from plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java rename to plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java index f6da5b541bb..fed53456f0b 100644 --- a/plugins/ingest/src/test/java/org/elasticsearch/ingest/IngestRestIT.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java @@ -17,11 +17,10 @@ * under the License. */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.geoip; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.ingest.IngestPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; @@ -30,14 +29,14 @@ import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; import java.util.Collection; -public class IngestRestIT extends ESRestTestCase { +public class IngestGeoIpRestIT extends ESRestTestCase { @Override protected Collection> nodePlugins() { - return pluginList(IngestPlugin.class); + return pluginList(IngestGeoIpPlugin.class); } - public IngestRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public IngestGeoIpRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml similarity index 66% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml rename to plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml index ad10d9b0041..9a82d95ed7f 100644 --- a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml @@ -2,5 +2,5 @@ - do: cluster.stats: {} - - match: { nodes.plugins.0.name: ingest } + - match: { nodes.plugins.0.name: ingest-geoip } - match: { nodes.plugins.0.jvm: true } diff --git a/plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_geoip_processor.yaml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml similarity index 100% rename from plugins/ingest/src/test/resources/rest-api-spec/test/ingest/30_geoip_processor.yaml rename to plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml diff --git a/qa/ingest-disabled/build.gradle b/qa/ingest-disabled/build.gradle index 6acfc38b378..ca71697a7b4 100644 --- a/qa/ingest-disabled/build.gradle +++ b/qa/ingest-disabled/build.gradle @@ -19,13 +19,8 @@ apply plugin: 'elasticsearch.rest-test' -dependencies { - testCompile project(path: ':plugins:ingest', configuration: 'runtime') -} - integTest { cluster { - plugin 'ingest', project(':plugins:ingest') systemProperty 'es.node.ingest', 'false' } } diff --git a/qa/ingest-with-mustache/build.gradle b/qa/ingest-with-mustache/build.gradle index 32ed5f8956f..e5ca482d85a 100644 --- a/qa/ingest-with-mustache/build.gradle +++ b/qa/ingest-with-mustache/build.gradle @@ -20,12 +20,5 @@ apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':plugins:ingest', configuration: 'runtime') testCompile project(path: ':modules:lang-mustache', configuration: 'runtime') } - -integTest { - cluster { - plugin 'ingest', project(':plugins:ingest') - } -} diff --git a/settings.gradle b/settings.gradle index fe38ba3ba33..39e0b4fb5b0 100644 --- a/settings.gradle +++ b/settings.gradle @@ -25,7 +25,7 @@ List projects = [ 'plugins:discovery-ec2', 'plugins:discovery-gce', 'plugins:discovery-multicast', - 'plugins:ingest', + 'plugins:ingest-geoip', 'plugins:lang-javascript', 'plugins:lang-plan-a', 'plugins:lang-python', From bac12061619247965df861549d814f6f8de96dc4 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 11 Jan 2016 09:49:55 +0100 Subject: [PATCH 195/347] remove use of request headers/context for pipeline id in favour of instance members added to IndexRequest and BulkRequest Now that the ingest infra is part of es core we can remove some code that was required by the plugin and have a better integration with es core. We allow to specify the pipeline id in bulk and index as a request parameter, we have a REST filter that parses it and adds it to the relevant action request. That is not required anymore, as we can add this logic to RestIndexAction and RestBulkAction directly, no need for a filter. Also, we can allow to specify a pipeline id for each index requests in a bulk request. The small downside of this is that the ingest filter has to go over each item of a bulk request, all the time, to figure out whether they have a pipeline id. --- .../action/bulk/BulkProcessor.java | 6 +- .../action/bulk/BulkRequest.java | 15 +-- .../action/index/IndexRequest.java | 19 ++++ .../action/index/IndexRequestBuilder.java | 8 ++ .../action/ingest/IngestActionFilter.java | 51 ++++----- .../ingest/IngestDisabledActionFilter.java | 36 ++++--- .../elasticsearch/ingest/IngestModule.java | 2 - .../ingest/PipelineExecutionService.java | 29 +++-- .../rest/action/bulk/RestBulkAction.java | 3 +- .../rest/action/index/RestIndexAction.java | 1 + .../rest/action/ingest/IngestRestFilter.java | 44 -------- .../ingest/IngestActionFilterTests.java | 100 +++++++----------- .../elasticsearch/ingest/IngestClientIT.java | 11 +- .../ingest/PipelineExecutionServiceTests.java | 95 +++++++++++------ .../ingest_mustache/10_ingest_disabled.yaml | 22 +++- 15 files changed, 233 insertions(+), 209 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/rest/action/ingest/IngestRestFilter.java diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java index 78a0c76702f..8ae91649cbc 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java @@ -289,11 +289,11 @@ public class BulkProcessor implements Closeable { } public BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType) throws Exception { - return add(data, defaultIndex, defaultType, null); + return add(data, defaultIndex, defaultType, null, null); } - public synchronized BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable Object payload) throws Exception { - bulkRequest.add(data, defaultIndex, defaultType, null, null, payload, true); + public synchronized BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultPipeline, @Nullable Object payload) throws Exception { + bulkRequest.add(data, defaultIndex, defaultType, null, null, defaultPipeline, payload, true); executeIfNeeded(); return this; } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 02e0ea40d65..2e93546d4de 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -254,17 +254,17 @@ public class BulkRequest extends ActionRequest implements Composite * Adds a framed data in binary format */ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType) throws Exception { - return add(data, defaultIndex, defaultType, null, null, null, true); + return add(data, defaultIndex, defaultType, null, null, null, null, true); } /** * Adds a framed data in binary format */ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, boolean allowExplicitIndex) throws Exception { - return add(data, defaultIndex, defaultType, null, null, null, allowExplicitIndex); + return add(data, defaultIndex, defaultType, null, null, null, null, allowExplicitIndex); } - public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultRouting, @Nullable String[] defaultFields, @Nullable Object payload, boolean allowExplicitIndex) throws Exception { + public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultRouting, @Nullable String[] defaultFields, @Nullable String defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex) throws Exception { XContent xContent = XContentFactory.xContent(data); int line = 0; int from = 0; @@ -305,6 +305,7 @@ public class BulkRequest extends ActionRequest implements Composite long version = Versions.MATCH_ANY; VersionType versionType = VersionType.INTERNAL; int retryOnConflict = 0; + String pipeline = defaultPipeline; // at this stage, next token can either be END_OBJECT (and use default index and type, with auto generated id) // or START_OBJECT which will have another set of parameters @@ -345,6 +346,8 @@ public class BulkRequest extends ActionRequest implements Composite versionType = VersionType.fromString(parser.text()); } else if ("_retry_on_conflict".equals(currentFieldName) || "_retryOnConflict".equals(currentFieldName)) { retryOnConflict = parser.intValue(); + } else if ("pipeline".equals(currentFieldName)) { + pipeline = parser.text(); } else if ("fields".equals(currentFieldName)) { throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected"); } else { @@ -381,15 +384,15 @@ public class BulkRequest extends ActionRequest implements Composite if ("index".equals(action)) { if (opType == null) { internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) - .source(data.slice(from, nextMarker - from)), payload); + .pipeline(pipeline).source(data.slice(from, nextMarker - from)), payload); } else { internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) - .create("create".equals(opType)) + .create("create".equals(opType)).pipeline(pipeline) .source(data.slice(from, nextMarker - from)), payload); } } else if ("create".equals(action)) { internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) - .create(true) + .create(true).pipeline(pipeline) .source(data.slice(from, nextMarker - from)), payload); } else if ("update".equals(action)) { UpdateRequest updateRequest = new UpdateRequest(index, type, id).routing(routing).parent(parent).retryOnConflict(retryOnConflict) diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 9899a5493aa..5c9f5aeb5aa 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -155,6 +155,8 @@ public class IndexRequest extends ReplicationRequest implements Do private XContentType contentType = Requests.INDEX_CONTENT_TYPE; + private String pipeline; + public IndexRequest() { } @@ -363,6 +365,21 @@ public class IndexRequest extends ReplicationRequest implements Do return this.ttl; } + /** + * Sets the ingest pipeline to be executed before indexing the document + */ + public IndexRequest pipeline(String pipeline) { + this.pipeline = pipeline; + return this; + } + + /** + * Returns the ingest pipeline to be executed before indexing the document + */ + public String pipeline() { + return this.pipeline; + } + /** * The source of the document to index, recopied to a new array if it is unsage. */ @@ -658,6 +675,7 @@ public class IndexRequest extends ReplicationRequest implements Do refresh = in.readBoolean(); version = in.readLong(); versionType = VersionType.fromValue(in.readByte()); + pipeline = in.readOptionalString(); } @Override @@ -679,6 +697,7 @@ public class IndexRequest extends ReplicationRequest implements Do out.writeBoolean(refresh); out.writeLong(version); out.writeByte(versionType.getValue()); + out.writeOptionalString(pipeline); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java index f7134d84843..a355d68b034 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java @@ -278,4 +278,12 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder actionListener = (ActionListener) listener; - processBulkIndexRequest(task, bulkRequest, pipelineId, action, chain, actionListener); - } else { - chain.proceed(task, action, request, listener); + processBulkIndexRequest(task, bulkRequest, action, chain, actionListener); + return; } + + chain.proceed(task, action, request, listener); } @Override @@ -83,7 +84,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio chain.proceed(action, response, listener); } - void processIndexRequest(Task task, String action, ActionListener listener, ActionFilterChain chain, IndexRequest indexRequest, String pipelineId) { + void processIndexRequest(Task task, String action, ActionListener listener, ActionFilterChain chain, IndexRequest indexRequest) { // The IndexRequest has the same type on the node that receives the request and the node that // processes the primary action. This could lead to a pipeline being executed twice for the same // index request, hence this check @@ -91,8 +92,8 @@ public final class IngestActionFilter extends AbstractComponent implements Actio chain.proceed(task, action, indexRequest, listener); return; } - executionService.execute(indexRequest, pipelineId, t -> { - logger.error("failed to execute pipeline [{}]", t, pipelineId); + executionService.execute(indexRequest, t -> { + logger.error("failed to execute pipeline [{}]", t, indexRequest.pipeline()); listener.onFailure(t); }, success -> { indexRequest.putHeader(PIPELINE_ALREADY_PROCESSED, true); @@ -100,18 +101,20 @@ public final class IngestActionFilter extends AbstractComponent implements Actio }); } - void processBulkIndexRequest(Task task, BulkRequest original, String pipelineId, String action, ActionFilterChain chain, ActionListener listener) { + void processBulkIndexRequest(Task task, BulkRequest original, String action, ActionFilterChain chain, ActionListener listener) { BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original); - executionService.execute(() -> bulkRequestModifier, pipelineId, e -> { - logger.debug("failed to execute pipeline [{}]", e, pipelineId); - bulkRequestModifier.markCurrentItemAsFailed(e); + executionService.execute(() -> bulkRequestModifier, tuple -> { + IndexRequest indexRequest = tuple.v1(); + Throwable throwable = tuple.v2(); + logger.debug("failed to execute pipeline [{}] for document [{}/{}/{}]", indexRequest.pipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id(), throwable); + bulkRequestModifier.markCurrentItemAsFailed(throwable); }, (success) -> { BulkRequest bulkRequest = bulkRequestModifier.getBulkRequest(); ActionListener actionListener = bulkRequestModifier.wrapActionListenerIfNeeded(listener); if (bulkRequest.requests().isEmpty()) { - // in this stage, the transport bulk action can't deal with a bulk request with no requests, - // so we stop and send a empty response back to the client. - // (this will happen if all preprocessing all items in the bulk failed) + // at this stage, the transport bulk action can't deal with a bulk request with no requests, + // so we stop and send an empty response back to the client. + // (this will happen if pre-processing all items in the bulk failed) actionListener.onResponse(new BulkResponse(new BulkItemResponse[0], 0)); } else { chain.proceed(task, action, bulkRequest, actionListener); diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java index 3603b882e4b..14abf4ebbbe 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java @@ -21,23 +21,40 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.common.Strings; import org.elasticsearch.tasks.Task; public final class IngestDisabledActionFilter implements ActionFilter { @Override public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { - String pipelineId = request.getFromContext(IngestActionFilter.PIPELINE_ID_PARAM_CONTEXT_KEY); - if (pipelineId != null) { - failRequest(pipelineId); + boolean isIngestRequest = false; + if (IndexAction.NAME.equals(action)) { + assert request instanceof IndexRequest; + IndexRequest indexRequest = (IndexRequest) request; + isIngestRequest = Strings.hasText(indexRequest.pipeline()); + } else if (BulkAction.NAME.equals(action)) { + assert request instanceof BulkRequest; + BulkRequest bulkRequest = (BulkRequest) request; + for (ActionRequest actionRequest : bulkRequest.requests()) { + if (actionRequest instanceof IndexRequest) { + IndexRequest indexRequest = (IndexRequest) actionRequest; + if (Strings.hasText(indexRequest.pipeline())) { + isIngestRequest = true; + break; + } + } + } } - pipelineId = request.getHeader(IngestActionFilter.PIPELINE_ID_PARAM); - if (pipelineId != null) { - failRequest(pipelineId); + if (isIngestRequest) { + throw new IllegalArgumentException("node.ingest is set to false, cannot execute pipeline"); } - chain.proceed(task, action, request, listener); } @@ -50,9 +67,4 @@ public final class IngestDisabledActionFilter implements ActionFilter { public int order() { return Integer.MAX_VALUE; } - - private static void failRequest(String pipelineId) { - throw new IllegalArgumentException("node.ingest is set to false, cannot execute pipeline with id [" + pipelineId + "]"); - } - } diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestModule.java b/core/src/main/java/org/elasticsearch/ingest/IngestModule.java index 4dc77356183..9dddbf6016b 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestModule.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestModule.java @@ -37,7 +37,6 @@ import org.elasticsearch.ingest.processor.SetProcessor; import org.elasticsearch.ingest.processor.SplitProcessor; import org.elasticsearch.ingest.processor.TrimProcessor; import org.elasticsearch.ingest.processor.UppercaseProcessor; -import org.elasticsearch.rest.action.ingest.IngestRestFilter; import java.util.function.BiFunction; @@ -68,7 +67,6 @@ public class IngestModule extends AbstractModule { @Override protected void configure() { - binder().bind(IngestRestFilter.class).asEagerSingleton(); bind(ProcessorsRegistry.class).toInstance(processorsRegistry); binder().bind(IngestBootstrapper.class).asEagerSingleton(); } diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index d0d0896e7e4..29435769e6b 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -21,6 +21,8 @@ package org.elasticsearch.ingest; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.threadpool.ThreadPool; @@ -38,8 +40,8 @@ public class PipelineExecutionService { this.threadPool = threadPool; } - public void execute(IndexRequest request, String pipelineId, Consumer failureHandler, Consumer completionHandler) { - Pipeline pipeline = getPipeline(pipelineId); + public void execute(IndexRequest request, Consumer failureHandler, Consumer completionHandler) { + Pipeline pipeline = getPipeline(request.pipeline()); threadPool.executor(ThreadPool.Names.INGEST).execute(() -> { try { innerExecute(request, pipeline); @@ -50,21 +52,18 @@ public class PipelineExecutionService { }); } - public void execute(Iterable actionRequests, String pipelineId, - Consumer itemFailureHandler, Consumer completionHandler) { - Pipeline pipeline = getPipeline(pipelineId); + public void execute(Iterable actionRequests, + Consumer> itemFailureHandler, Consumer completionHandler) { threadPool.executor(ThreadPool.Names.INGEST).execute(() -> { for (ActionRequest actionRequest : actionRequests) { - if ((actionRequest instanceof IndexRequest) == false) { - continue; - } - - IndexRequest indexRequest = (IndexRequest) actionRequest; - try { - innerExecute(indexRequest, pipeline); - } catch (Throwable e) { - if (itemFailureHandler != null) { - itemFailureHandler.accept(e); + if ((actionRequest instanceof IndexRequest)) { + IndexRequest indexRequest = (IndexRequest) actionRequest; + if (Strings.hasText(indexRequest.pipeline())) { + try { + innerExecute(indexRequest, getPipeline(indexRequest.pipeline())); + } catch (Throwable e) { + itemFailureHandler.accept(new Tuple<>(indexRequest, e)); + } } } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java b/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java index 37ce03bac70..df20438fa97 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java @@ -77,6 +77,7 @@ public class RestBulkAction extends BaseRestHandler { String defaultType = request.param("type"); String defaultRouting = request.param("routing"); String fieldsParam = request.param("fields"); + String defaultPipeline = request.param("pipeline"); String[] defaultFields = fieldsParam != null ? Strings.commaDelimitedListToStringArray(fieldsParam) : null; String consistencyLevel = request.param("consistency"); @@ -85,7 +86,7 @@ public class RestBulkAction extends BaseRestHandler { } bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.refresh(request.paramAsBoolean("refresh", bulkRequest.refresh())); - bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, null, allowExplicitIndex); + bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, defaultPipeline, null, allowExplicitIndex); client.bulk(bulkRequest, new RestBuilderListener(channel) { @Override diff --git a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java index 13a93299187..4eaec2c6b1b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java @@ -77,6 +77,7 @@ public class RestIndexAction extends BaseRestHandler { if (request.hasParam("ttl")) { indexRequest.ttl(request.param("ttl")); } + indexRequest.pipeline(request.param("pipeline")); indexRequest.source(request.content()); indexRequest.timeout(request.paramAsTime("timeout", IndexRequest.DEFAULT_TIMEOUT)); indexRequest.refresh(request.paramAsBoolean("refresh", indexRequest.refresh())); diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/IngestRestFilter.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/IngestRestFilter.java deleted file mode 100644 index d278a727dd9..00000000000 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/IngestRestFilter.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest.action.ingest; - -import org.elasticsearch.action.ingest.IngestActionFilter; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.rest.RestChannel; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestFilter; -import org.elasticsearch.rest.RestFilterChain; -import org.elasticsearch.rest.RestRequest; - -public class IngestRestFilter extends RestFilter { - - @Inject - public IngestRestFilter(RestController controller) { - controller.registerFilter(this); - } - - @Override - public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { - if (request.hasParam(IngestActionFilter.PIPELINE_ID_PARAM)) { - request.putInContext(IngestActionFilter.PIPELINE_ID_PARAM_CONTEXT_KEY, request.param(IngestActionFilter.PIPELINE_ID_PARAM)); - } - filterChain.continueProcessing(request, channel); - } -} diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java index f2d46dd7095..040782968e7 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java @@ -21,10 +21,12 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.action.update.UpdateRequest; @@ -40,7 +42,6 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; -import org.mockito.Matchers; import org.mockito.stubbing.Answer; import java.util.HashSet; @@ -51,9 +52,9 @@ import static org.elasticsearch.action.ingest.IngestActionFilter.BulkRequestModi import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Matchers.same; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; @@ -72,43 +73,29 @@ public class IngestActionFilterTests extends ESTestCase { filter = new IngestActionFilter(Settings.EMPTY, bootstrapper); } - public void testApplyNoIngestId() throws Exception { + public void testApplyNoPipelineId() throws Exception { IndexRequest indexRequest = new IndexRequest(); Task task = mock(Task.class); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); - filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); + filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain); - verify(actionFilterChain).proceed(task, "_action", indexRequest, actionListener); + verify(actionFilterChain).proceed(task, IndexAction.NAME, indexRequest, actionListener); verifyZeroInteractions(executionService, actionFilterChain); } + @SuppressWarnings("unchecked") public void testApplyIngestIdViaRequestParam() throws Exception { Task task = mock(Task.class); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline("_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); - filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); + filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(Matchers.any(IndexRequest.class), Matchers.eq("_id"), Matchers.any(Consumer.class), Matchers.any(Consumer.class)); - verifyZeroInteractions(actionFilterChain); - } - - public void testApplyIngestIdViaContext() throws Exception { - Task task = mock(Task.class); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); - indexRequest.source("field", "value"); - indexRequest.putInContext(IngestActionFilter.PIPELINE_ID_PARAM_CONTEXT_KEY, "_id"); - ActionListener actionListener = mock(ActionListener.class); - ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); - - filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); - - verify(executionService).execute(Matchers.any(IndexRequest.class), Matchers.eq("_id"), Matchers.any(Consumer.class), Matchers.any(Consumer.class)); + verify(executionService).execute(same(indexRequest), any(Consumer.class), any(Consumer.class)); verifyZeroInteractions(actionFilterChain); } @@ -116,57 +103,54 @@ public class IngestActionFilterTests extends ESTestCase { Task task = mock(Task.class); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id"); indexRequest.putHeader(IngestActionFilter.PIPELINE_ALREADY_PROCESSED, true); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); - - filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); - - verify(actionFilterChain).proceed(task, "_action", indexRequest, actionListener); + filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain); + verify(actionFilterChain).proceed(task, IndexAction.NAME, indexRequest, actionListener); verifyZeroInteractions(executionService, actionListener); } + @SuppressWarnings("unchecked") public void testApplyExecuted() throws Exception { Task task = mock(Task.class); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline("_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); Answer answer = invocationOnMock -> { @SuppressWarnings("unchecked") - Consumer listener = (Consumer) invocationOnMock.getArguments()[3]; + Consumer listener = (Consumer) invocationOnMock.getArguments()[2]; listener.accept(true); return null; }; - doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); - filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); + doAnswer(answer).when(executionService).execute(any(IndexRequest.class), any(Consumer.class), any(Consumer.class)); + filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); - verify(actionFilterChain).proceed(task, "_action", indexRequest, actionListener); + verify(executionService).execute(same(indexRequest), any(Consumer.class), any(Consumer.class)); + verify(actionFilterChain).proceed(task, IndexAction.NAME, indexRequest, actionListener); verifyZeroInteractions(actionListener); } + @SuppressWarnings("unchecked") public void testApplyFailed() throws Exception { Task task = mock(Task.class); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline("_id"); indexRequest.source("field", "value"); - indexRequest.putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); RuntimeException exception = new RuntimeException(); Answer answer = invocationOnMock -> { - Consumer handler = (Consumer) invocationOnMock.getArguments()[2]; + Consumer handler = (Consumer) invocationOnMock.getArguments()[1]; handler.accept(exception); return null; }; - doAnswer(answer).when(executionService).execute(any(IndexRequest.class), eq("_id"), any(Consumer.class), any(Consumer.class)); - filter.apply(task, "_action", indexRequest, actionListener, actionFilterChain); + doAnswer(answer).when(executionService).execute(same(indexRequest), any(Consumer.class), any(Consumer.class)); + filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain); - verify(executionService).execute(Matchers.any(IndexRequest.class), Matchers.eq("_id"), Matchers.any(Consumer.class), Matchers.any(Consumer.class)); + verify(executionService).execute(same(indexRequest), any(Consumer.class), any(Consumer.class)); verify(actionListener).onFailure(exception); verifyZeroInteractions(actionFilterChain); } @@ -195,7 +179,6 @@ public class IngestActionFilterTests extends ESTestCase { filter = new IngestActionFilter(Settings.EMPTY, bootstrapper); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id"); int numRequest = scaledRandomIntBetween(8, 64); for (int i = 0; i < numRequest; i++) { if (rarely()) { @@ -207,7 +190,7 @@ public class IngestActionFilterTests extends ESTestCase { } bulkRequest.add(request); } else { - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline("_id"); indexRequest.source("field1", "value1"); bulkRequest.add(indexRequest); } @@ -216,26 +199,23 @@ public class IngestActionFilterTests extends ESTestCase { ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); - filter.apply(task, "_action", bulkRequest, actionListener, actionFilterChain); + filter.apply(task, BulkAction.NAME, bulkRequest, actionListener, actionFilterChain); - assertBusy(new Runnable() { - @Override - public void run() { - verify(actionFilterChain).proceed(task, "_action", bulkRequest, actionListener); - verifyZeroInteractions(actionListener); + assertBusy(() -> { + verify(actionFilterChain).proceed(task, BulkAction.NAME, bulkRequest, actionListener); + verifyZeroInteractions(actionListener); - int assertedRequests = 0; - for (ActionRequest actionRequest : bulkRequest.requests()) { - if (actionRequest instanceof IndexRequest) { - IndexRequest indexRequest = (IndexRequest) actionRequest; - assertThat(indexRequest.sourceAsMap().size(), equalTo(2)); - assertThat(indexRequest.sourceAsMap().get("field1"), equalTo("value1")); - assertThat(indexRequest.sourceAsMap().get("field2"), equalTo("value2")); - } - assertedRequests++; + int assertedRequests = 0; + for (ActionRequest actionRequest : bulkRequest.requests()) { + if (actionRequest instanceof IndexRequest) { + IndexRequest indexRequest = (IndexRequest) actionRequest; + assertThat(indexRequest.sourceAsMap().size(), equalTo(2)); + assertThat(indexRequest.sourceAsMap().get("field1"), equalTo("value1")); + assertThat(indexRequest.sourceAsMap().get("field2"), equalTo("value2")); } - assertThat(assertedRequests, equalTo(numRequest)); + assertedRequests++; } + assertThat(assertedRequests, equalTo(numRequest)); }); } @@ -279,7 +259,7 @@ public class IngestActionFilterTests extends ESTestCase { } } - private final static class CaptureActionListener implements ActionListener { + private static class CaptureActionListener implements ActionListener { private BulkResponse response; diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 7e1911dff08..3767aac85f5 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -30,7 +30,6 @@ import org.elasticsearch.action.ingest.DeletePipelineRequestBuilder; import org.elasticsearch.action.ingest.GetPipelineAction; import org.elasticsearch.action.ingest.GetPipelineRequestBuilder; import org.elasticsearch.action.ingest.GetPipelineResponse; -import org.elasticsearch.action.ingest.IngestActionFilter; import org.elasticsearch.action.ingest.PutPipelineAction; import org.elasticsearch.action.ingest.PutPipelineRequestBuilder; import org.elasticsearch.action.ingest.SimulateDocumentSimpleResult; @@ -128,9 +127,8 @@ public class IngestClientIT extends ESIntegTestCase { int numRequests = scaledRandomIntBetween(32, 128); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id"); for (int i = 0; i < numRequests; i++) { - IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i)); + IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i)).pipeline("_id"); indexRequest.source("field", "value", "fail", i % 2 == 0); bulkRequest.add(indexRequest); } @@ -170,9 +168,7 @@ public class IngestClientIT extends ESIntegTestCase { assertThat(getResponse.pipelines().size(), equalTo(1)); assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id")); - client().prepareIndex("test", "type", "1").setSource("field", "value", "fail", false) - .putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id") - .get(); + client().prepareIndex("test", "type", "1").setPipeline("_id").setSource("field", "value", "fail", false).get(); Map doc = client().prepareGet("test", "type", "1") .get().getSourceAsMap(); @@ -180,8 +176,7 @@ public class IngestClientIT extends ESIntegTestCase { assertThat(doc.get("processed"), equalTo(true)); client().prepareBulk().add( - client().prepareIndex("test", "type", "2").setSource("field", "value2", "fail", false) - ).putHeader(IngestActionFilter.PIPELINE_ID_PARAM, "_id").get(); + client().prepareIndex("test", "type", "2").setSource("field", "value2", "fail", false).setPipeline("_id")).get(); doc = client().prepareGet("test", "type", "2").get().getSourceAsMap(); assertThat(doc.get("field"), equalTo("value2")); assertThat(doc.get("processed"), equalTo(true)); diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java index 5f31dd24621..a0fc2b8b4ba 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.ingest.core.CompoundProcessor; import org.elasticsearch.ingest.core.IngestDocument; @@ -32,6 +33,7 @@ import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.hamcrest.CustomTypeSafeMatcher; import org.junit.Before; import org.mockito.ArgumentMatcher; import org.mockito.invocation.InvocationOnMock; @@ -67,15 +69,14 @@ public class PipelineExecutionServiceTests extends ESTestCase { executionService = new PipelineExecutionService(store, threadPool); } - public void testExecutePipelineDoesNotExist() { - when(store.get("_id")).thenReturn(null); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + public void testExecuteIndexPipelineDoesNotExist() { + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); try { - executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + executionService.execute(indexRequest, failureHandler, completionHandler); fail("IllegalArgumentException expected"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("pipeline with id [_id] does not exist")); @@ -84,16 +85,47 @@ public class PipelineExecutionServiceTests extends ESTestCase { verify(completionHandler, never()).accept(anyBoolean()); } + public void testExecuteBulkPipelineDoesNotExist() { + CompoundProcessor processor = mock(CompoundProcessor.class); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); + BulkRequest bulkRequest = new BulkRequest(); + + IndexRequest indexRequest1 = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); + bulkRequest.add(indexRequest1); + IndexRequest indexRequest2 = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("does_not_exist"); + bulkRequest.add(indexRequest2); + @SuppressWarnings("unchecked") + Consumer> failureHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") + Consumer completionHandler = mock(Consumer.class); + executionService.execute(bulkRequest.requests(), failureHandler, completionHandler); + verify(failureHandler, times(1)).accept(argThat(new CustomTypeSafeMatcher>("failure handler was not called with the expected arguments") { + @Override + protected boolean matchesSafely(Tuple item) { + if( item.v1() != indexRequest2) { + return false; + } + if (item.v2() instanceof IllegalArgumentException == false) { + return false; + } + IllegalArgumentException iae = (IllegalArgumentException) item.v2(); + return "pipeline with id [does_not_exist] does not exist".equals(iae.getMessage()); + } + + })); + verify(completionHandler, times(1)).accept(anyBoolean()); + } + public void testExecuteSuccess() throws Exception { CompoundProcessor processor = mock(CompoundProcessor.class); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); - executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + executionService.execute(indexRequest, failureHandler, completionHandler); verify(failureHandler, never()).accept(any()); verify(completionHandler, times(1)).accept(true); } @@ -114,12 +146,12 @@ public class PipelineExecutionServiceTests extends ESTestCase { }).when(processor).execute(any()); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); - executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + executionService.execute(indexRequest, failureHandler, completionHandler); verify(processor).execute(any()); verify(failureHandler, never()).accept(any()); verify(completionHandler, times(1)).accept(true); @@ -136,13 +168,13 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testExecuteFailure() throws Exception { CompoundProcessor processor = mock(CompoundProcessor.class); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); - executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + executionService.execute(indexRequest, failureHandler, completionHandler); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); verify(failureHandler, times(1)).accept(any(RuntimeException.class)); verify(completionHandler, never()).accept(anyBoolean()); @@ -153,13 +185,13 @@ public class PipelineExecutionServiceTests extends ESTestCase { Processor onFailureProcessor = mock(Processor.class); CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), Collections.singletonList(new CompoundProcessor(onFailureProcessor))); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor)); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); - executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + executionService.execute(indexRequest, failureHandler, completionHandler); verify(failureHandler, never()).accept(any(RuntimeException.class)); verify(completionHandler, times(1)).accept(true); } @@ -169,14 +201,14 @@ public class PipelineExecutionServiceTests extends ESTestCase { Processor onFailureProcessor = mock(Processor.class); CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), Collections.singletonList(new CompoundProcessor(onFailureProcessor))); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor)); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); doThrow(new RuntimeException()).when(onFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); - executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + executionService.execute(indexRequest, failureHandler, completionHandler); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); verify(failureHandler, times(1)).accept(any(RuntimeException.class)); verify(completionHandler, never()).accept(anyBoolean()); @@ -189,7 +221,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), Collections.singletonList(new CompoundProcessor(Collections.singletonList(onFailureProcessor), Collections.singletonList(onFailureOnFailureProcessor)))); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor)); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); doThrow(new RuntimeException()).when(onFailureOnFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); doThrow(new RuntimeException()).when(onFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @@ -197,7 +229,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); - executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + executionService.execute(indexRequest, failureHandler, completionHandler); verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); verify(failureHandler, times(1)).accept(any(RuntimeException.class)); verify(completionHandler, never()).accept(anyBoolean()); @@ -207,12 +239,12 @@ public class PipelineExecutionServiceTests extends ESTestCase { Processor processor = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("_ttl", "5d")); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor))); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); - executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + executionService.execute(indexRequest, failureHandler, completionHandler); assertThat(indexRequest.ttl(), equalTo(TimeValue.parseTimeValue("5d", null, "ttl"))); verify(failureHandler, never()).accept(any()); @@ -223,12 +255,12 @@ public class PipelineExecutionServiceTests extends ESTestCase { Processor processor = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("_ttl", "abc")); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor))); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); - executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + executionService.execute(indexRequest, failureHandler, completionHandler); verify(failureHandler, times(1)).accept(any(ElasticsearchParseException.class)); verify(completionHandler, never()).accept(anyBoolean()); } @@ -236,12 +268,12 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testExecuteProvidedTTL() throws Exception { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", mock(CompoundProcessor.class))); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id") + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline("_id") .source(Collections.emptyMap()) .ttl(1000L); Consumer failureHandler = mock(Consumer.class); Consumer completionHandler = mock(Consumer.class); - executionService.execute(indexRequest, "_id", failureHandler, completionHandler); + executionService.execute(indexRequest, failureHandler, completionHandler); assertThat(indexRequest.ttl(), equalTo(new TimeValue(1000L))); verify(failureHandler, never()).accept(any()); @@ -250,6 +282,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testBulkRequestExecutionWithFailures() throws Exception { BulkRequest bulkRequest = new BulkRequest(); + String pipelineId = "_id"; int numRequest = scaledRandomIntBetween(8, 64); int numIndexRequests = 0; @@ -262,7 +295,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { request = new UpdateRequest("_index", "_type", "_id"); } } else { - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline(pipelineId); indexRequest.source("field1", "value1"); request = indexRequest; numIndexRequests++; @@ -270,39 +303,37 @@ public class PipelineExecutionServiceTests extends ESTestCase { bulkRequest.add(request); } - String pipelineId = "_id"; - CompoundProcessor processor = mock(CompoundProcessor.class); Exception error = new RuntimeException(); doThrow(error).when(processor).execute(any()); when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, processor)); - Consumer requestItemErrorHandler = mock(Consumer.class); + Consumer> requestItemErrorHandler = mock(Consumer.class); Consumer completionHandler = mock(Consumer.class); - executionService.execute(bulkRequest.requests(), pipelineId, requestItemErrorHandler, completionHandler); + executionService.execute(bulkRequest.requests(), requestItemErrorHandler, completionHandler); - verify(requestItemErrorHandler, times(numIndexRequests)).accept(error); + verify(requestItemErrorHandler, times(numIndexRequests)).accept(new Tuple<>(any(IndexRequest.class), error)); verify(completionHandler, times(1)).accept(true); } public void testBulkRequestExecution() throws Exception { BulkRequest bulkRequest = new BulkRequest(); + String pipelineId = "_id"; int numRequest = scaledRandomIntBetween(8, 64); for (int i = 0; i < numRequest; i++) { - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline(pipelineId); indexRequest.source("field1", "value1"); bulkRequest.add(indexRequest); } - String pipelineId = "_id"; when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, new CompoundProcessor())); @SuppressWarnings("unchecked") - Consumer requestItemErrorHandler = mock(Consumer.class); + Consumer> requestItemErrorHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); - executionService.execute(bulkRequest.requests(), pipelineId, requestItemErrorHandler, completionHandler); + executionService.execute(bulkRequest.requests(), requestItemErrorHandler, completionHandler); verify(requestItemErrorHandler, never()).accept(any()); verify(completionHandler, times(1)).accept(true); diff --git a/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml b/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml index 957fbd29d2c..b2b983e4fa9 100644 --- a/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml +++ b/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml @@ -79,7 +79,7 @@ --- "Test index api with pipeline id fails when node.ingest is set to false": - do: - catch: /node.ingest is set to false, cannot execute pipeline with id \[my_pipeline_1\]/ + catch: /node.ingest is set to false, cannot execute pipeline/ index: index: test type: test @@ -94,7 +94,7 @@ --- "Test bulk api with pipeline id fails when node.ingest is set to false": - do: - catch: /node.ingest is set to false, cannot execute pipeline with id \[my_pipeline_1\]/ + catch: /node.ingest is set to false, cannot execute pipeline/ bulk: pipeline: "my_pipeline_1" body: @@ -109,3 +109,21 @@ _id: test_id2 - f1: v2 +--- +"Test bulk api that contains a single index call with pipeline id fails when node.ingest is set to false": + - do: + catch: /node.ingest is set to false, cannot execute pipeline/ + bulk: + body: + - index: + _index: test_index + _type: test_type + _id: test_id + - f1: v1 + - index: + _index: test_index + _type: test_type + _id: test_id2 + pipeline: my_pipeline_1 + - f1: v2 + From 21c89e6c5b30576af30cea9955b0d73ddb13fa4a Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 11 Jan 2016 14:44:39 +0100 Subject: [PATCH 196/347] make sure we don't go ahead if no index request holds a pipeline id --- .../action/ingest/IngestActionFilter.java | 21 ++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java index 2a0bc85782a..81ae2ccc9c4 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java @@ -58,7 +58,6 @@ public final class IngestActionFilter extends AbstractComponent implements Actio @Override public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { - if (IndexAction.NAME.equals(action)) { assert request instanceof IndexRequest; IndexRequest indexRequest = (IndexRequest) request; @@ -70,10 +69,22 @@ public final class IngestActionFilter extends AbstractComponent implements Actio if (BulkAction.NAME.equals(action)) { assert request instanceof BulkRequest; BulkRequest bulkRequest = (BulkRequest) request; - @SuppressWarnings("unchecked") - ActionListener actionListener = (ActionListener) listener; - processBulkIndexRequest(task, bulkRequest, action, chain, actionListener); - return; + boolean isIngestRequest = false; + for (ActionRequest actionRequest : bulkRequest.requests()) { + if (actionRequest instanceof IndexRequest) { + IndexRequest indexRequest = (IndexRequest) actionRequest; + if (Strings.hasText(indexRequest.pipeline())) { + isIngestRequest = true; + break; + } + } + } + if (isIngestRequest) { + @SuppressWarnings("unchecked") + ActionListener actionListener = (ActionListener) listener; + processBulkIndexRequest(task, bulkRequest, action, chain, actionListener); + return; + } } chain.proceed(task, action, request, listener); From fad2571ba527faab1f06f5d014cc2beac9ce76f5 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 11 Jan 2016 14:56:02 +0100 Subject: [PATCH 197/347] add test for bulk without any request holiding a pipeline id --- .../action/ingest/IngestActionFilterTests.java | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java index 040782968e7..d8663ade60b 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java @@ -85,6 +85,19 @@ public class IngestActionFilterTests extends ESTestCase { verifyZeroInteractions(executionService, actionFilterChain); } + public void testApplyBulkNoPipelineId() throws Exception { + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest()); + Task task = mock(Task.class); + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + + filter.apply(task, BulkAction.NAME, bulkRequest, actionListener, actionFilterChain); + + verify(actionFilterChain).proceed(task, BulkAction.NAME, bulkRequest, actionListener); + verifyZeroInteractions(executionService, actionFilterChain); + } + @SuppressWarnings("unchecked") public void testApplyIngestIdViaRequestParam() throws Exception { Task task = mock(Task.class); From 362deb4579605123b038d1f2dfff4349de6a9974 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 11 Jan 2016 16:42:52 +0100 Subject: [PATCH 198/347] [TEST] move testBulkRequestModifier to existing BulkRequestModifierTests class --- .../ingest/BulkRequestModifierTests.java | 72 ++++++++++++++++- .../ingest/IngestActionFilterTests.java | 78 ------------------- 2 files changed, 68 insertions(+), 82 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java b/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java index a799b66678e..aa30c89ef59 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java @@ -29,14 +29,60 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; -import org.mockito.Mockito; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Mockito.mock; public class BulkRequestModifierTests extends ESTestCase { + public void testBulkRequestModifier() { + int numRequests = scaledRandomIntBetween(8, 64); + BulkRequest bulkRequest = new BulkRequest(); + for (int i = 0; i < numRequests; i++) { + bulkRequest.add(new IndexRequest("_index", "_type", String.valueOf(i)).source("{}")); + } + CaptureActionListener actionListener = new CaptureActionListener(); + IngestActionFilter.BulkRequestModifier bulkRequestModifier = new IngestActionFilter.BulkRequestModifier(bulkRequest); + + int i = 0; + Set failedSlots = new HashSet<>(); + while (bulkRequestModifier.hasNext()) { + bulkRequestModifier.next(); + if (randomBoolean()) { + bulkRequestModifier.markCurrentItemAsFailed(new RuntimeException()); + failedSlots.add(i); + } + i++; + } + + assertThat(bulkRequestModifier.getBulkRequest().requests().size(), equalTo(numRequests - failedSlots.size())); + // simulate that we actually executed the modified bulk request: + ActionListener result = bulkRequestModifier.wrapActionListenerIfNeeded(actionListener); + result.onResponse(new BulkResponse(new BulkItemResponse[numRequests - failedSlots.size()], 0)); + + BulkResponse bulkResponse = actionListener.getResponse(); + for (int j = 0; j < bulkResponse.getItems().length; j++) { + if (failedSlots.contains(j)) { + BulkItemResponse item = bulkResponse.getItems()[j]; + assertThat(item.isFailed(), is(true)); + assertThat(item.getFailure().getIndex(), equalTo("_index")); + assertThat(item.getFailure().getType(), equalTo("_type")); + assertThat(item.getFailure().getId(), equalTo(String.valueOf(j))); + assertThat(item.getFailure().getMessage(), equalTo("java.lang.RuntimeException")); + } else { + assertThat(bulkResponse.getItems()[j], nullValue()); + } + } + } + public void testPipelineFailures() { BulkRequest originalBulkRequest = new BulkRequest(); for (int i = 0; i < 32; i++) { @@ -73,7 +119,7 @@ public class BulkRequestModifierTests extends ESTestCase { IndexResponse indexResponse = new IndexResponse(new ShardId("index", 0), indexRequest.type(), indexRequest.id(), 1, true); originalResponses.add(new BulkItemResponse(Integer.parseInt(indexRequest.id()), indexRequest.opType().lowercase(), indexResponse)); } - bulkResponseListener.onResponse(new BulkResponse(originalResponses.toArray(new BulkItemResponse[0]), 0)); + bulkResponseListener.onResponse(new BulkResponse(originalResponses.toArray(new BulkItemResponse[originalResponses.size()]), 0)); assertThat(responses.size(), Matchers.equalTo(32)); for (int i = 0; i < 32; i++) { @@ -88,14 +134,32 @@ public class BulkRequestModifierTests extends ESTestCase { } IngestActionFilter.BulkRequestModifier modifier = new IngestActionFilter.BulkRequestModifier(originalBulkRequest); - for (int i = 0; modifier.hasNext(); i++) { + while (modifier.hasNext()) { modifier.next(); } BulkRequest bulkRequest = modifier.getBulkRequest(); assertThat(bulkRequest, Matchers.sameInstance(originalBulkRequest)); - ActionListener actionListener = Mockito.mock(ActionListener.class); + @SuppressWarnings("unchecked") + ActionListener actionListener = mock(ActionListener.class); assertThat(modifier.wrapActionListenerIfNeeded(actionListener), Matchers.sameInstance(actionListener)); } + private static class CaptureActionListener implements ActionListener { + + private BulkResponse response; + + @Override + public void onResponse(BulkResponse bulkItemResponses) { + this.response = bulkItemResponses ; + } + + @Override + public void onFailure(Throwable e) { + } + + public BulkResponse getResponse() { + return response; + } + } } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java index d8663ade60b..344770b2bba 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java @@ -22,9 +22,7 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.bulk.BulkAction; -import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; @@ -44,14 +42,9 @@ import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; import org.mockito.stubbing.Answer; -import java.util.HashSet; -import java.util.Set; import java.util.function.Consumer; -import static org.elasticsearch.action.ingest.IngestActionFilter.BulkRequestModifier; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; import static org.mockito.Matchers.same; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doAnswer; @@ -112,18 +105,6 @@ public class IngestActionFilterTests extends ESTestCase { verifyZeroInteractions(actionFilterChain); } - public void testApplyAlreadyProcessed() throws Exception { - Task task = mock(Task.class); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id"); - indexRequest.source("field", "value"); - indexRequest.putHeader(IngestActionFilter.PIPELINE_ALREADY_PROCESSED, true); - ActionListener actionListener = mock(ActionListener.class); - ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); - filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain); - verify(actionFilterChain).proceed(task, IndexAction.NAME, indexRequest, actionListener); - verifyZeroInteractions(executionService, actionListener); - } - @SuppressWarnings("unchecked") public void testApplyExecuted() throws Exception { Task task = mock(Task.class); @@ -231,63 +212,4 @@ public class IngestActionFilterTests extends ESTestCase { assertThat(assertedRequests, equalTo(numRequest)); }); } - - public void testBulkRequestModifier() { - int numRequests = scaledRandomIntBetween(8, 64); - BulkRequest bulkRequest = new BulkRequest(); - for (int i = 0; i < numRequests; i++) { - bulkRequest.add(new IndexRequest("_index", "_type", String.valueOf(i)).source("{}")); - } - CaptureActionListener actionListener = new CaptureActionListener(); - BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(bulkRequest); - - int i = 0; - Set failedSlots = new HashSet<>(); - while (bulkRequestModifier.hasNext()) { - bulkRequestModifier.next(); - if (randomBoolean()) { - bulkRequestModifier.markCurrentItemAsFailed(new RuntimeException()); - failedSlots.add(i); - } - i++; - } - - assertThat(bulkRequestModifier.getBulkRequest().requests().size(), equalTo(numRequests - failedSlots.size())); - // simulate that we actually executed the modified bulk request: - ActionListener result = bulkRequestModifier.wrapActionListenerIfNeeded(actionListener); - result.onResponse(new BulkResponse(new BulkItemResponse[numRequests - failedSlots.size()], 0)); - - BulkResponse bulkResponse = actionListener.getResponse(); - for (int j = 0; j < bulkResponse.getItems().length; j++) { - if (failedSlots.contains(j)) { - BulkItemResponse item = bulkResponse.getItems()[j]; - assertThat(item.isFailed(), is(true)); - assertThat(item.getFailure().getIndex(), equalTo("_index")); - assertThat(item.getFailure().getType(), equalTo("_type")); - assertThat(item.getFailure().getId(), equalTo(String.valueOf(j))); - assertThat(item.getFailure().getMessage(), equalTo("java.lang.RuntimeException")); - } else { - assertThat(bulkResponse.getItems()[j], nullValue()); - } - } - } - - private static class CaptureActionListener implements ActionListener { - - private BulkResponse response; - - @Override - public void onResponse(BulkResponse bulkItemResponses) { - this.response = bulkItemResponses ; - } - - @Override - public void onFailure(Throwable e) { - } - - public BulkResponse getResponse() { - return response; - } - } - } From b4baa6c7ab5cc993c5f78ebb24e77c84b3d31055 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 11 Jan 2016 17:30:58 +0100 Subject: [PATCH 199/347] remove use of already processed header in favour of resetting the pipeline id to null --- .../action/ingest/IngestActionFilter.java | 15 ++++-------- .../ingest/PipelineExecutionService.java | 2 ++ .../ingest/IngestActionFilterTests.java | 24 +++++++++++++++++++ 3 files changed, 31 insertions(+), 10 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java index 81ae2ccc9c4..c9467b07a05 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java @@ -46,8 +46,6 @@ import java.util.Set; public final class IngestActionFilter extends AbstractComponent implements ActionFilter { - static final String PIPELINE_ALREADY_PROCESSED = "ingest_already_processed"; - private final PipelineExecutionService executionService; @Inject @@ -96,18 +94,15 @@ public final class IngestActionFilter extends AbstractComponent implements Actio } void processIndexRequest(Task task, String action, ActionListener listener, ActionFilterChain chain, IndexRequest indexRequest) { - // The IndexRequest has the same type on the node that receives the request and the node that - // processes the primary action. This could lead to a pipeline being executed twice for the same - // index request, hence this check - if (indexRequest.hasHeader(PIPELINE_ALREADY_PROCESSED)) { - chain.proceed(task, action, indexRequest, listener); - return; - } + executionService.execute(indexRequest, t -> { logger.error("failed to execute pipeline [{}]", t, indexRequest.pipeline()); listener.onFailure(t); }, success -> { - indexRequest.putHeader(PIPELINE_ALREADY_PROCESSED, true); + // TransportIndexAction uses IndexRequest and same action name on the node that receives the request and the node that + // processes the primary action. This could lead to a pipeline being executed twice for the same + // index request, hence we set the pipeline to null once its execution completed. + indexRequest.pipeline(null); chain.proceed(task, action, indexRequest, listener); }); } diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index 29435769e6b..5553374880e 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -61,6 +61,8 @@ public class PipelineExecutionService { if (Strings.hasText(indexRequest.pipeline())) { try { innerExecute(indexRequest, getPipeline(indexRequest.pipeline())); + //this shouldn't be needed here but we do it for consistency with index api which requires it to prevent double execution + indexRequest.pipeline(null); } catch (Throwable e) { itemFailureHandler.accept(new Tuple<>(indexRequest, e)); } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java index 344770b2bba..91c6765520c 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java @@ -45,10 +45,12 @@ import org.mockito.stubbing.Answer; import java.util.function.Consumer; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.Matchers.same; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; @@ -212,4 +214,26 @@ public class IngestActionFilterTests extends ESTestCase { assertThat(assertedRequests, equalTo(numRequest)); }); } + + @SuppressWarnings("unchecked") + public void testIndexApiSinglePipelineExecution() { + Answer answer = invocationOnMock -> { + @SuppressWarnings("unchecked") + Consumer listener = (Consumer) invocationOnMock.getArguments()[2]; + listener.accept(true); + return null; + }; + doAnswer(answer).when(executionService).execute(any(IndexRequest.class), any(Consumer.class), any(Consumer.class)); + + Task task = mock(Task.class); + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline("_id").source("field", "value"); + filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain); + assertThat(indexRequest.pipeline(), nullValue()); + filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain); + verify(executionService, times(1)).execute(same(indexRequest), any(Consumer.class), any(Consumer.class)); + verify(actionFilterChain, times(2)).proceed(task, IndexAction.NAME, indexRequest, actionListener); + } } From 90743d8db0d3b486590a4ac4f9a7346d8f68b685 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 11 Jan 2016 19:04:34 +0100 Subject: [PATCH 200/347] add REST test for bulk api integration with ingest --- .../rest-api-spec/test/ingest/70_bulk.yaml | 105 ++++++++++++++++++ 1 file changed, 105 insertions(+) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml new file mode 100644 index 00000000000..b70f05af67e --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml @@ -0,0 +1,105 @@ +setup: + - do: + ingest.put_pipeline: + id: "pipeline1" + body: > + { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field1", + "value": "value1" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "pipeline2" + body: > + { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field2", + "value": "value2" + } + } + ] + } + +--- +"Test bulk request without default pipeline": + + - do: + bulk: + body: + - index: + _index: test_index + _type: test_type + _id: test_id1 + pipeline: pipeline1 + - f1: v1 + - index: + _index: test_index + _type: test_type + _id: test_id2 + - f1: v2 + + - do: + get: + index: test_index + type: test_type + id: test_id1 + + - match: {_source.field1: value1} + - is_false: _source.field2 + + - do: + get: + index: test_index + type: test_type + id: test_id2 + + - is_false: _source.field1 + - is_false: _source.field2 + +--- +"Test bulk request with default pipeline": + + - do: + bulk: + pipeline: pipeline1 + body: + - index: + _index: test_index + _type: test_type + _id: test_id1 + - f1: v1 + - index: + _index: test_index + _type: test_type + _id: test_id2 + pipeline: pipeline2 + - f1: v2 + - do: + get: + index: test_index + type: test_type + id: test_id1 + + - match: {_source.field1: value1} + - is_false: _source.field2 + + - do: + get: + index: test_index + type: test_type + id: test_id2 + + - is_false: _source.field1 + - match: {_source.field2: value2} + From cc07e5b9e02851a6e02d05575ba2765497a3b159 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Wed, 6 Jan 2016 12:57:27 -0800 Subject: [PATCH 201/347] throw exception when invalid locale is provided to the date processor --- .../ingest/processor/DateProcessor.java | 10 +++++- .../processor/DateProcessorFactoryTests.java | 31 ++++++++++++++++++- 2 files changed, 39 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java index 1c047382a03..46a6e92fedf 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java @@ -27,6 +27,7 @@ import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; import java.util.ArrayList; +import java.util.IllformedLocaleException; import java.util.List; import java.util.Locale; import java.util.Map; @@ -121,7 +122,14 @@ public final class DateProcessor implements Processor { String timezoneString = ConfigurationUtils.readOptionalStringProperty(config, "timezone"); DateTimeZone timezone = timezoneString == null ? DateTimeZone.UTC : DateTimeZone.forID(timezoneString); String localeString = ConfigurationUtils.readOptionalStringProperty(config, "locale"); - Locale locale = localeString == null ? Locale.ENGLISH : Locale.forLanguageTag(localeString); + Locale locale = Locale.ENGLISH; + if (localeString != null) { + try { + locale = (new Locale.Builder()).setLanguageTag(localeString).build(); + } catch (IllformedLocaleException e) { + throw new IllegalArgumentException("Invalid language tag specified: " + localeString); + } + } List matchFormats = ConfigurationUtils.readList(config, "match_formats"); return new DateProcessor(timezone, locale, matchField, matchFormats, targetField); } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java index 4f62461ce46..708b164ebd6 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.processor.DateProcessor; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTimeZone; @@ -95,6 +94,21 @@ public class DateProcessorFactoryTests extends ESTestCase { assertThat(processor.getLocale().toLanguageTag(), equalTo(locale.toLanguageTag())); } + public void testParseInvalidLocale() throws Exception { + DateProcessor.Factory factory = new DateProcessor.Factory(); + Map config = new HashMap<>(); + String sourceField = randomAsciiOfLengthBetween(1, 10); + config.put("match_field", sourceField); + config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); + config.put("locale", "invalid_locale"); + try { + factory.create(config); + fail("should fail with invalid locale"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Invalid language tag specified: invalid_locale")); + } + } + public void testParseTimezone() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); @@ -108,6 +122,21 @@ public class DateProcessorFactoryTests extends ESTestCase { assertThat(processor.getTimezone(), equalTo(timezone)); } + public void testParseInvalidTimezone() throws Exception { + DateProcessor.Factory factory = new DateProcessor.Factory(); + Map config = new HashMap<>(); + String sourceField = randomAsciiOfLengthBetween(1, 10); + config.put("match_field", sourceField); + config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); + config.put("timezone", "invalid_timezone"); + try { + factory.create(config); + fail("invalid timezone should fail"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("The datetime zone id 'invalid_timezone' is not recognised")); + } + } + //we generate a timezone out of the available ones in joda, some available in the jdk are not available in joda by default private static DateTimeZone randomTimezone() { List ids = new ArrayList<>(DateTimeZone.getAvailableIDs()); From 4d38a47eb50435030807bfdf980bae2a12f64633 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 12 Jan 2016 12:06:14 +0100 Subject: [PATCH 202/347] Review feedback and several cleanups --- .../ingest/SimulateDocumentVerboseResult.java | 8 +--- .../ingest/SimulateExecutionService.java | 14 ++++--- .../SimulatePipelineTransportAction.java | 4 +- .../ingest/SimulateProcessorResult.java | 42 ++++++++----------- .../ingest/WriteableIngestDocument.java | 29 +++++-------- .../ingest/IngestBootstrapper.java | 4 ++ .../ingest/core/CompoundProcessor.java | 4 +- .../elasticsearch/ingest/core/Pipeline.java | 2 +- .../ingest/core/ValueSource.java | 2 +- .../elasticsearch/threadpool/ThreadPool.java | 2 +- .../ingest/SimulateProcessorResultTests.java | 2 +- .../ingest/grok/GrokProcessor.java | 3 ++ .../ingest/geoip/GeoIpProcessor.java | 2 + 13 files changed, 56 insertions(+), 62 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java index 2b119afb9d5..8a8a1b00325 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java @@ -51,7 +51,7 @@ public class SimulateDocumentVerboseResult implements SimulateDocumentResult processorResults = new ArrayList<>(); for (int i = 0; i < size; i++) { - processorResults.add(SimulateProcessorResult.readSimulateProcessorResultFrom(in)); + processorResults.add(new SimulateProcessorResult(in)); } return new SimulateDocumentVerboseResult(processorResults); } @@ -67,7 +67,7 @@ public class SimulateDocumentVerboseResult implements SimulateDocumentResult listener) { - threadPool.executor(THREAD_POOL_NAME).execute(() -> { - List responses = new ArrayList<>(); - for (IngestDocument ingestDocument : request.getDocuments()) { - responses.add(executeDocument(request.getPipeline(), ingestDocument, request.isVerbose())); + threadPool.executor(THREAD_POOL_NAME).execute(new ActionRunnable(listener) { + @Override + protected void doRun() throws Exception { + List responses = new ArrayList<>(); + for (IngestDocument ingestDocument : request.getDocuments()) { + responses.add(executeDocument(request.getPipeline(), ingestDocument, request.isVerbose())); + } + listener.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(), request.isVerbose(), responses)); } - listener.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(), request.isVerbose(), responses)); }); } } diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java index 3d5e02a9332..89764b0ff7f 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java @@ -47,9 +47,9 @@ public class SimulatePipelineTransportAction extends HandledTransportAction listener) { - Map source = XContentHelper.convertToMap(request.getSource(), false).v2(); + final Map source = XContentHelper.convertToMap(request.getSource(), false).v2(); - SimulatePipelineRequest.Parsed simulateRequest; + final SimulatePipelineRequest.Parsed simulateRequest; try { if (request.getId() != null) { simulateRequest = SimulatePipelineRequest.parseWithPipelineId(request.getId(), source, request.isVerbose(), pipelineStore); diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java index afa85b4c219..25680152049 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java @@ -31,26 +31,31 @@ import java.io.IOException; import java.util.Collections; public class SimulateProcessorResult implements Writeable, ToXContent { + private final String processorId; + private final WriteableIngestDocument ingestDocument; + private final Exception failure; - private static final SimulateProcessorResult PROTOTYPE = new SimulateProcessorResult("_na", new WriteableIngestDocument(new IngestDocument(Collections.emptyMap(), Collections.emptyMap()))); - - private String processorId; - private WriteableIngestDocument ingestDocument; - private Exception failure; + public SimulateProcessorResult(StreamInput in) throws IOException { + this.processorId = in.readString(); + if (in.readBoolean()) { + this.failure = in.readThrowable(); + this.ingestDocument = null; + } else { + this.ingestDocument = new WriteableIngestDocument(in); + this.failure = null; + } + } public SimulateProcessorResult(String processorId, IngestDocument ingestDocument) { this.processorId = processorId; this.ingestDocument = new WriteableIngestDocument(ingestDocument); - } - - private SimulateProcessorResult(String processorId, WriteableIngestDocument ingestDocument) { - this.processorId = processorId; - this.ingestDocument = ingestDocument; + this.failure = null; } public SimulateProcessorResult(String processorId, Exception failure) { this.processorId = processorId; this.failure = failure; + this.ingestDocument = null; } public IngestDocument getIngestDocument() { @@ -68,18 +73,9 @@ public class SimulateProcessorResult implements Writeable, ToXContent { - private static final WriteableIngestDocument PROTOTYPE = new WriteableIngestDocument(new IngestDocument(Collections.emptyMap(), Collections.emptyMap())); - private final IngestDocument ingestDocument; WriteableIngestDocument(IngestDocument ingestDocument) { @@ -43,20 +41,21 @@ final class WriteableIngestDocument implements Writeable sourceAndMetadata = in.readMap(); + @SuppressWarnings("unchecked") + Map ingestMetadata = (Map) in.readGenericValue(); + this.ingestDocument = new IngestDocument(sourceAndMetadata, ingestMetadata); + } + IngestDocument getIngestDocument() { return ingestDocument; } - static WriteableIngestDocument readWriteableIngestDocumentFrom(StreamInput in) throws IOException { - return PROTOTYPE.readFrom(in); - } @Override public WriteableIngestDocument readFrom(StreamInput in) throws IOException { - Map sourceAndMetadata = in.readMap(); - @SuppressWarnings("unchecked") - Map ingestMetadata = (Map) in.readGenericValue(); - return new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, ingestMetadata)); + return new WriteableIngestDocument(in); } @Override @@ -67,13 +66,13 @@ final class WriteableIngestDocument implements Writeable metadataMap = ingestDocument.extractMetadata(); for (Map.Entry metadata : metadataMap.entrySet()) { builder.field(metadata.getKey().getFieldName(), metadata.getValue()); } - builder.field(Fields.SOURCE, ingestDocument.getSourceAndMetadata()); - builder.startObject(Fields.INGEST); + builder.field("_source", ingestDocument.getSourceAndMetadata()); + builder.startObject("_ingest"); for (Map.Entry ingestMetadata : ingestDocument.getIngestMetadata().entrySet()) { builder.field(ingestMetadata.getKey(), ingestMetadata.getValue()); } @@ -103,10 +102,4 @@ final class WriteableIngestDocument implements Writeable config, Map processorRegistry) throws Exception { - String description = ConfigurationUtils.readOptionalStringProperty(config, "description"); + String description = ConfigurationUtils.readOptionalStringProperty(config, "description"); // TODO(simonw): can we make these strings constants? List processors = readProcessors("processors", processorRegistry, config); List onFailureProcessors = readProcessors("on_failure", processorRegistry, config); CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.unmodifiableList(processors), Collections.unmodifiableList(onFailureProcessors)); diff --git a/core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java b/core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java index 987002f0354..5280b3e6702 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java @@ -57,7 +57,7 @@ public interface ValueSource { valueSourceList.add(wrap(item, templateService)); } return new ListValue(valueSourceList); - } else if (value == null || value instanceof Integer || + } else if (value == null || value instanceof Integer || // TODO(simonw): maybe we just check for Number? value instanceof Long || value instanceof Float || value instanceof Double || value instanceof Boolean) { return new ObjectValue(value); diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index c73605c5547..bf4f75a569f 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -88,7 +88,7 @@ public class ThreadPool extends AbstractComponent { public static final String FORCE_MERGE = "force_merge"; public static final String FETCH_SHARD_STARTED = "fetch_shard_started"; public static final String FETCH_SHARD_STORE = "fetch_shard_store"; - public static final String INGEST = "ingest"; + public static final String INGEST = "ingest"; //TODO(simonw): wow what is the reason for having yet another threadpool? I really think we should just use index for this. } public enum ThreadPoolType { diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java index 208d2534a4c..999e7ee6650 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java @@ -47,7 +47,7 @@ public class SimulateProcessorResultTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); simulateProcessorResult.writeTo(out); StreamInput streamInput = StreamInput.wrap(out.bytes()); - SimulateProcessorResult otherSimulateProcessorResult = SimulateProcessorResult.readSimulateProcessorResultFrom(streamInput); + SimulateProcessorResult otherSimulateProcessorResult = new SimulateProcessorResult(streamInput); assertThat(otherSimulateProcessorResult.getProcessorId(), equalTo(simulateProcessorResult.getProcessorId())); assertThat(otherSimulateProcessorResult.getIngestDocument(), equalTo(simulateProcessorResult.getIngestDocument())); if (isFailure) { diff --git a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java index d38ea96131f..2c63646ff81 100644 --- a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java +++ b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java @@ -78,6 +78,9 @@ public final class GrokProcessor implements Processor { private final Map builtinPatternBank; public Factory() throws IOException { + // TODO(simonw): we should have a static helper method to load these patterns and make this + // factory only accept a String->String map instead. That way we can load + // the patterns in the IngestGrokPlugin ctor or even in a static context and this ctor doesn't need to throw any exception. Map builtinPatterns = new HashMap<>(); for (String pattern : PATTERN_NAMES) { try(InputStream is = getClass().getResourceAsStream("/patterns/" + pattern)) { diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 9f65c76c215..ab87d51318b 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -231,6 +231,8 @@ public final class GeoIpProcessor implements Processor { private final Map databaseReaders; public Factory(Path configDirectory) { + + // TODO(simonw): same as fro grok we should load this outside of the factory in a static method and hass the map to the ctor Path geoIpConfigDirectory = configDirectory.resolve("ingest-geoip"); if (Files.exists(geoIpConfigDirectory) == false && Files.isDirectory(geoIpConfigDirectory)) { throw new IllegalStateException("the geoip directory [" + geoIpConfigDirectory + "] containing databases doesn't exist"); From b111c7bbd5213e8def976c8d8a2ce083fd546211 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jan 2016 12:28:12 +0100 Subject: [PATCH 203/347] redirect ingest requests to an ingest node Rather than failing the request, when a node with node.ingest set to false receives an index or bulk request with a pipeline id, it should try to redirect the request to another node with node.ingest set to true. If there are no node with ingest set to true based on the current cluster state, an exception will be returned and the request will fail. Note that in case there are no ingest nodes and bulk has a pipeline id specified only for a subset of index requests, the whole bulk will fail. --- .../elasticsearch/action/ActionModule.java | 4 +- .../ingest/IngestDisabledActionFilter.java | 70 ----- .../ingest/IngestProxyActionFilter.java | 153 +++++++++++ .../elasticsearch/ingest/IngestModule.java | 8 + .../ingest/IngestProxyActionFilterTests.java | 251 ++++++++++++++++++ .../ingest_mustache/10_ingest_disabled.yaml | 6 +- 6 files changed, 417 insertions(+), 75 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java create mode 100644 core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java create mode 100644 core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 51a5498fd63..c784eba55dc 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -150,7 +150,7 @@ import org.elasticsearch.action.indexedscripts.get.TransportGetIndexedScriptActi import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptAction; import org.elasticsearch.action.indexedscripts.put.TransportPutIndexedScriptAction; import org.elasticsearch.action.ingest.IngestActionFilter; -import org.elasticsearch.action.ingest.IngestDisabledActionFilter; +import org.elasticsearch.action.ingest.IngestProxyActionFilter; import org.elasticsearch.action.ingest.DeletePipelineAction; import org.elasticsearch.action.ingest.DeletePipelineTransportAction; import org.elasticsearch.action.ingest.GetPipelineAction; @@ -256,7 +256,7 @@ public class ActionModule extends AbstractModule { if (ingestEnabled) { registerFilter(IngestActionFilter.class); } else { - registerFilter(IngestDisabledActionFilter.class); + registerFilter(IngestProxyActionFilter.class); } } diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java deleted file mode 100644 index 14abf4ebbbe..00000000000 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestDisabledActionFilter.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.action.ingest; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.bulk.BulkAction; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.ActionFilter; -import org.elasticsearch.action.support.ActionFilterChain; -import org.elasticsearch.common.Strings; -import org.elasticsearch.tasks.Task; - -public final class IngestDisabledActionFilter implements ActionFilter { - - @Override - public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { - boolean isIngestRequest = false; - if (IndexAction.NAME.equals(action)) { - assert request instanceof IndexRequest; - IndexRequest indexRequest = (IndexRequest) request; - isIngestRequest = Strings.hasText(indexRequest.pipeline()); - } else if (BulkAction.NAME.equals(action)) { - assert request instanceof BulkRequest; - BulkRequest bulkRequest = (BulkRequest) request; - for (ActionRequest actionRequest : bulkRequest.requests()) { - if (actionRequest instanceof IndexRequest) { - IndexRequest indexRequest = (IndexRequest) actionRequest; - if (Strings.hasText(indexRequest.pipeline())) { - isIngestRequest = true; - break; - } - } - } - } - if (isIngestRequest) { - throw new IllegalArgumentException("node.ingest is set to false, cannot execute pipeline"); - } - chain.proceed(task, action, request, listener); - } - - @Override - public void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { - chain.proceed(action, response, listener); - } - - @Override - public int order() { - return Integer.MAX_VALUE; - } -} diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java new file mode 100644 index 00000000000..54d01bab36c --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java @@ -0,0 +1,153 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.ingest; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActionFilter; +import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.ingest.IngestModule; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportService; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; + +public final class IngestProxyActionFilter implements ActionFilter { + + private final ClusterService clusterService; + private final TransportService transportService; + private final AtomicInteger randomNodeGenerator = new AtomicInteger(); + + @Inject + public IngestProxyActionFilter(ClusterService clusterService, TransportService transportService) { + assert IngestModule.isIngestEnabled(clusterService.state().nodes().localNode().attributes()) == false; + this.clusterService = clusterService; + this.transportService = transportService; + } + + @Override + public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { + Action ingestAction = null; + boolean isIngestRequest = false; + if (IndexAction.NAME.equals(action)) { + ingestAction = IndexAction.INSTANCE; + assert request instanceof IndexRequest; + IndexRequest indexRequest = (IndexRequest) request; + isIngestRequest = Strings.hasText(indexRequest.pipeline()); + } else if (BulkAction.NAME.equals(action)) { + ingestAction = BulkAction.INSTANCE; + assert request instanceof BulkRequest; + BulkRequest bulkRequest = (BulkRequest) request; + for (ActionRequest actionRequest : bulkRequest.requests()) { + if (actionRequest instanceof IndexRequest) { + IndexRequest indexRequest = (IndexRequest) actionRequest; + if (Strings.hasText(indexRequest.pipeline())) { + isIngestRequest = true; + break; + } + } + } + } + + if (isIngestRequest) { + assert ingestAction != null; + forwardIngestRequest(ingestAction, request, listener); + return; + } + chain.proceed(task, action, request, listener); + } + + private void forwardIngestRequest(Action action, ActionRequest request, ActionListener listener) { + transportService.sendRequest(randomIngestNode(), action.name(), request, new TransportResponseHandler() { + @Override + public TransportResponse newInstance() { + return action.newResponse(); + } + + @Override + @SuppressWarnings("unchecked") + public void handleResponse(TransportResponse response) { + listener.onResponse(response); + } + + @Override + public void handleException(TransportException exp) { + listener.onFailure(exp); + } + + @Override + public String executor() { + return ThreadPool.Names.SAME; + } + }); + } + + @Override + public void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { + chain.proceed(action, response, listener); + } + + @Override + public int order() { + return Integer.MAX_VALUE; + } + + private DiscoveryNode randomIngestNode() { + ClusterState state = clusterService.state(); + List ingestNodes = new ArrayList<>(); + for (DiscoveryNode node : state.nodes()) { + if (IngestModule.isIngestEnabled(node.getAttributes())) { + ingestNodes.add(node); + } + } + + if (ingestNodes.isEmpty()) { + throw new IllegalStateException("There are no ingest nodes in this cluster, unable to forward request to an ingest node."); + } + + int index = getNodeNumber(); + return ingestNodes.get((index) % ingestNodes.size()); + } + + private int getNodeNumber() { + int index = randomNodeGenerator.incrementAndGet(); + if (index < 0) { + index = 0; + randomNodeGenerator.set(0); + } + return index; + } +} diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestModule.java b/core/src/main/java/org/elasticsearch/ingest/IngestModule.java index 9dddbf6016b..e74e89a6c00 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestModule.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestModule.java @@ -19,6 +19,8 @@ package org.elasticsearch.ingest; +import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -81,4 +83,10 @@ public class IngestModule extends AbstractModule { public static boolean isIngestEnabled(Settings settings) { return settings.getAsBoolean("node.ingest", true); } + + public static boolean isIngestEnabled(ImmutableOpenMap nodeAttributes) { + String ingestEnabled = nodeAttributes.get("ingest"); + //reproduces same logic used in settings.getAsBoolean used above + return Booleans.parseBoolean(ingestEnabled, true); + } } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java new file mode 100644 index 00000000000..9f6e5b13b04 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java @@ -0,0 +1,251 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.ingest; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.ingest.IngestModule; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportService; +import org.hamcrest.CustomTypeSafeMatcher; +import org.mockito.stubbing.Answer; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.argThat; +import static org.mockito.Matchers.eq; +import static org.mockito.Matchers.same; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; + +public class IngestProxyActionFilterTests extends ESTestCase { + + private TransportService transportService; + + @SuppressWarnings("unchecked") + private IngestProxyActionFilter buildFilter(int ingestNodes, int totalNodes) { + ClusterState clusterState = mock(ClusterState.class); + DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder(); + for (int i = 0; i < totalNodes; i++) { + String nodeId = "node" + i; + Map attributes = new HashMap<>(); + if (i >= ingestNodes) { + attributes.put("ingest", "false"); + } else if (randomBoolean()) { + attributes.put("ingest", "true"); + } + builder.put(new DiscoveryNode(nodeId, nodeId, DummyTransportAddress.INSTANCE, attributes, VersionUtils.randomVersion(random()))); + } + //at least one node must not have ingest enabled, and that will always be the last one + builder.localNodeId("node" + (totalNodes - 1)); + when(clusterState.nodes()).thenReturn(builder.build()); + ClusterService clusterService = mock(ClusterService.class); + when(clusterService.state()).thenReturn(clusterState); + transportService = mock(TransportService.class); + return new IngestProxyActionFilter(clusterService, transportService); + } + + public void testApplyNoIngestNodes() { + Task task = mock(Task.class); + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + int totalNodes = randomIntBetween(1, 5); + IngestProxyActionFilter filter = buildFilter(0, totalNodes); + + String action; + ActionRequest request; + if (randomBoolean()) { + action = IndexAction.NAME; + request = new IndexRequest().pipeline("_id"); + } else { + action = BulkAction.NAME; + request = new BulkRequest().add(new IndexRequest().pipeline("_id")); + } + try { + filter.apply(task, action, request, actionListener, actionFilterChain); + fail("should have failed because there are no ingest nodes"); + } catch(IllegalStateException e) { + assertThat(e.getMessage(), equalTo("There are no ingest nodes in this cluster, unable to forward request to an ingest node.")); + } + verifyZeroInteractions(transportService); + verifyZeroInteractions(actionFilterChain); + verifyZeroInteractions(actionListener); + } + + public void testApplyNoPipelineId() { + Task task = mock(Task.class); + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + int totalNodes = randomIntBetween(1, 5); + IngestProxyActionFilter filter = buildFilter(randomIntBetween(0, totalNodes - 1), totalNodes); + + String action; + ActionRequest request; + if (randomBoolean()) { + action = IndexAction.NAME; + request = new IndexRequest(); + } else { + action = BulkAction.NAME; + request = new BulkRequest().add(new IndexRequest()); + } + filter.apply(task, action, request, actionListener, actionFilterChain); + verifyZeroInteractions(transportService); + verify(actionFilterChain).proceed(any(Task.class), eq(action), same(request), same(actionListener)); + verifyZeroInteractions(actionListener); + } + + public void testApplyAnyAction() { + Task task = mock(Task.class); + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + ActionRequest request = mock(ActionRequest.class); + int totalNodes = randomIntBetween(1, 5); + IngestProxyActionFilter filter = buildFilter(randomIntBetween(0, totalNodes - 1), totalNodes); + + String action = randomAsciiOfLengthBetween(1, 20); + filter.apply(task, action, request, actionListener, actionFilterChain); + verifyZeroInteractions(transportService); + verify(actionFilterChain).proceed(any(Task.class), eq(action), same(request), same(actionListener)); + verifyZeroInteractions(actionListener); + } + + @SuppressWarnings("unchecked") + public void testApplyIndexRedirect() { + Task task = mock(Task.class); + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + int totalNodes = randomIntBetween(2, 5); + IngestProxyActionFilter filter = buildFilter(randomIntBetween(1, totalNodes - 1), totalNodes); + Answer answer = invocationOnMock -> { + TransportResponseHandler transportResponseHandler = (TransportResponseHandler) invocationOnMock.getArguments()[3]; + transportResponseHandler.handleResponse(new IndexResponse()); + return null; + }; + doAnswer(answer).when(transportService).sendRequest(any(DiscoveryNode.class), any(String.class), any(TransportRequest.class), any(TransportResponseHandler.class)); + + IndexRequest indexRequest = new IndexRequest().pipeline("_id"); + filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain); + + verify(transportService).sendRequest(argThat(new CustomTypeSafeMatcher("discovery node should be an ingest node") { + @Override + protected boolean matchesSafely(DiscoveryNode node) { + return IngestModule.isIngestEnabled(node.getAttributes()); + } + }), eq(IndexAction.NAME), same(indexRequest), any(TransportResponseHandler.class)); + verifyZeroInteractions(actionFilterChain); + verify(actionListener).onResponse(any(IndexResponse.class)); + verify(actionListener, never()).onFailure(any(TransportException.class)); + } + + @SuppressWarnings("unchecked") + public void testApplyBulkRedirect() { + Task task = mock(Task.class); + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + int totalNodes = randomIntBetween(2, 5); + IngestProxyActionFilter filter = buildFilter(randomIntBetween(1, totalNodes - 1), totalNodes); + Answer answer = invocationOnMock -> { + TransportResponseHandler transportResponseHandler = (TransportResponseHandler) invocationOnMock.getArguments()[3]; + transportResponseHandler.handleResponse(new BulkResponse(null, -1)); + return null; + }; + doAnswer(answer).when(transportService).sendRequest(any(DiscoveryNode.class), any(String.class), any(TransportRequest.class), any(TransportResponseHandler.class)); + + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest().pipeline("_id")); + int numNoPipelineRequests = randomIntBetween(0, 10); + for (int i = 0; i < numNoPipelineRequests; i++) { + bulkRequest.add(new IndexRequest()); + } + filter.apply(task, BulkAction.NAME, bulkRequest, actionListener, actionFilterChain); + + verify(transportService).sendRequest(argThat(new CustomTypeSafeMatcher("discovery node should be an ingest node") { + @Override + protected boolean matchesSafely(DiscoveryNode node) { + return IngestModule.isIngestEnabled(node.getAttributes()); + } + }), eq(BulkAction.NAME), same(bulkRequest), any(TransportResponseHandler.class)); + verifyZeroInteractions(actionFilterChain); + verify(actionListener).onResponse(any(BulkResponse.class)); + verify(actionListener, never()).onFailure(any(TransportException.class)); + } + + @SuppressWarnings("unchecked") + public void testApplyFailures() { + Task task = mock(Task.class); + ActionListener actionListener = mock(ActionListener.class); + ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); + int totalNodes = randomIntBetween(2, 5); + IngestProxyActionFilter filter = buildFilter(randomIntBetween(1, totalNodes - 1), totalNodes); + Answer answer = invocationOnMock -> { + TransportResponseHandler transportResponseHandler = (TransportResponseHandler) invocationOnMock.getArguments()[3]; + transportResponseHandler.handleException(new TransportException(new IllegalArgumentException())); + return null; + }; + doAnswer(answer).when(transportService).sendRequest(any(DiscoveryNode.class), any(String.class), any(TransportRequest.class), any(TransportResponseHandler.class)); + + String action; + ActionRequest request; + if (randomBoolean()) { + action = IndexAction.NAME; + request = new IndexRequest().pipeline("_id"); + } else { + action = BulkAction.NAME; + request = new BulkRequest().add(new IndexRequest().pipeline("_id")); + } + + filter.apply(task, action, request, actionListener, actionFilterChain); + + verify(transportService).sendRequest(argThat(new CustomTypeSafeMatcher("discovery node should be an ingest node") { + @Override + protected boolean matchesSafely(DiscoveryNode node) { + return IngestModule.isIngestEnabled(node.getAttributes()); + } + }), eq(action), same(request), any(TransportResponseHandler.class)); + verifyZeroInteractions(actionFilterChain); + verify(actionListener).onFailure(any(TransportException.class)); + verify(actionListener, never()).onResponse(any(TransportResponse.class)); + } +} diff --git a/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml b/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml index b2b983e4fa9..24ac604989f 100644 --- a/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml +++ b/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml @@ -79,7 +79,7 @@ --- "Test index api with pipeline id fails when node.ingest is set to false": - do: - catch: /node.ingest is set to false, cannot execute pipeline/ + catch: /There are no ingest nodes in this cluster, unable to forward request to an ingest node./ index: index: test type: test @@ -94,7 +94,7 @@ --- "Test bulk api with pipeline id fails when node.ingest is set to false": - do: - catch: /node.ingest is set to false, cannot execute pipeline/ + catch: /There are no ingest nodes in this cluster, unable to forward request to an ingest node./ bulk: pipeline: "my_pipeline_1" body: @@ -112,7 +112,7 @@ --- "Test bulk api that contains a single index call with pipeline id fails when node.ingest is set to false": - do: - catch: /node.ingest is set to false, cannot execute pipeline/ + catch: /There are no ingest nodes in this cluster, unable to forward request to an ingest node./ bulk: body: - index: From d67106af4e280706f35ada4c2659368e0d89ab1a Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jan 2016 14:51:05 +0100 Subject: [PATCH 204/347] added specific tests for isIngestEnabled methods --- .../ingest/IngestModuleTests.java | 80 +++++++++++++++++++ 1 file changed, 80 insertions(+) create mode 100644 core/src/test/java/org/elasticsearch/ingest/IngestModuleTests.java diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestModuleTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestModuleTests.java new file mode 100644 index 00000000000..c5abb491161 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/ingest/IngestModuleTests.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class IngestModuleTests extends ESTestCase { + + public void testIsIngestEnabledSettings() { + assertThat(IngestModule.isIngestEnabled(Settings.EMPTY), equalTo(true)); + assertThat(IngestModule.isIngestEnabled(Settings.builder().put("node.ingest", true).build()), equalTo(true)); + assertThat(IngestModule.isIngestEnabled(Settings.builder().put("node.ingest", "true").build()), equalTo(true)); + assertThat(IngestModule.isIngestEnabled(Settings.builder().put("node.ingest", false).build()), equalTo(false)); + + assertThat(IngestModule.isIngestEnabled(Settings.builder().put("node.ingest", "false").build()), equalTo(false)); + assertThat(IngestModule.isIngestEnabled(Settings.builder().put("node.ingest", "off").build()), equalTo(false)); + assertThat(IngestModule.isIngestEnabled(Settings.builder().put("node.ingest", "no").build()), equalTo(false)); + assertThat(IngestModule.isIngestEnabled(Settings.builder().put("node.ingest", "0").build()), equalTo(false)); + } + + public void testIsIngestEnabledAttributes() { + assertThat(IngestModule.isIngestEnabled(ImmutableOpenMap.builder().build()), equalTo(true)); + + ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(); + builder.put("ingest", "true"); + assertThat(IngestModule.isIngestEnabled(builder.build()), equalTo(true)); + + builder = ImmutableOpenMap.builder(); + builder.put("ingest", "false"); + assertThat(IngestModule.isIngestEnabled(builder.build()), equalTo(false)); + + builder = ImmutableOpenMap.builder(); + builder.put("ingest", "off"); + assertThat(IngestModule.isIngestEnabled(builder.build()), equalTo(false)); + + builder = ImmutableOpenMap.builder(); + builder.put("ingest", "no"); + assertThat(IngestModule.isIngestEnabled(builder.build()), equalTo(false)); + + builder = ImmutableOpenMap.builder(); + builder.put("ingest", "0"); + assertThat(IngestModule.isIngestEnabled(builder.build()), equalTo(false)); + } + + public void testIsIngestEnabledMethodsReturnTheSameValue() { + String randomString; + if (randomBoolean()) { + randomString = randomFrom("true", "false", "on", "off", "yes", "no", "0", "1"); + } else { + randomString = randomAsciiOfLengthBetween(1, 5); + } + Settings settings = Settings.builder().put("node.ingest", randomString).build(); + ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(); + builder.put("ingest", randomString); + ImmutableOpenMap attributes = builder.build(); + + assertThat(IngestModule.isIngestEnabled(settings), equalTo(IngestModule.isIngestEnabled(attributes))); + } +} From 7dac1b31c3855842e1a33de2d47127d9fcf5c8f0 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jan 2016 15:15:56 +0100 Subject: [PATCH 205/347] extract IngestNodeMatcher to its own private class --- .../ingest/IngestProxyActionFilterTests.java | 32 ++++++++----------- 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java index 9f6e5b13b04..44a4c492f7f 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java @@ -168,12 +168,7 @@ public class IngestProxyActionFilterTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest().pipeline("_id"); filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain); - verify(transportService).sendRequest(argThat(new CustomTypeSafeMatcher("discovery node should be an ingest node") { - @Override - protected boolean matchesSafely(DiscoveryNode node) { - return IngestModule.isIngestEnabled(node.getAttributes()); - } - }), eq(IndexAction.NAME), same(indexRequest), any(TransportResponseHandler.class)); + verify(transportService).sendRequest(argThat(new IngestNodeMatcher()), eq(IndexAction.NAME), same(indexRequest), any(TransportResponseHandler.class)); verifyZeroInteractions(actionFilterChain); verify(actionListener).onResponse(any(IndexResponse.class)); verify(actionListener, never()).onFailure(any(TransportException.class)); @@ -201,12 +196,7 @@ public class IngestProxyActionFilterTests extends ESTestCase { } filter.apply(task, BulkAction.NAME, bulkRequest, actionListener, actionFilterChain); - verify(transportService).sendRequest(argThat(new CustomTypeSafeMatcher("discovery node should be an ingest node") { - @Override - protected boolean matchesSafely(DiscoveryNode node) { - return IngestModule.isIngestEnabled(node.getAttributes()); - } - }), eq(BulkAction.NAME), same(bulkRequest), any(TransportResponseHandler.class)); + verify(transportService).sendRequest(argThat(new IngestNodeMatcher()), eq(BulkAction.NAME), same(bulkRequest), any(TransportResponseHandler.class)); verifyZeroInteractions(actionFilterChain); verify(actionListener).onResponse(any(BulkResponse.class)); verify(actionListener, never()).onFailure(any(TransportException.class)); @@ -238,14 +228,20 @@ public class IngestProxyActionFilterTests extends ESTestCase { filter.apply(task, action, request, actionListener, actionFilterChain); - verify(transportService).sendRequest(argThat(new CustomTypeSafeMatcher("discovery node should be an ingest node") { - @Override - protected boolean matchesSafely(DiscoveryNode node) { - return IngestModule.isIngestEnabled(node.getAttributes()); - } - }), eq(action), same(request), any(TransportResponseHandler.class)); + verify(transportService).sendRequest(argThat(new IngestNodeMatcher()), eq(action), same(request), any(TransportResponseHandler.class)); verifyZeroInteractions(actionFilterChain); verify(actionListener).onFailure(any(TransportException.class)); verify(actionListener, never()).onResponse(any(TransportResponse.class)); } + + private static class IngestNodeMatcher extends CustomTypeSafeMatcher { + private IngestNodeMatcher() { + super("discovery node should be an ingest node"); + } + + @Override + protected boolean matchesSafely(DiscoveryNode node) { + return IngestModule.isIngestEnabled(node.getAttributes()); + } + } } From da0f934c52cb906dc054a94095c0d6bc47bcf2b3 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jan 2016 15:17:50 +0100 Subject: [PATCH 206/347] add randomness init for random node generator --- .../elasticsearch/action/ingest/IngestProxyActionFilter.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java index 54d01bab36c..0602fbbed20 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java @@ -31,6 +31,7 @@ import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.ingest.IngestModule; @@ -49,7 +50,7 @@ public final class IngestProxyActionFilter implements ActionFilter { private final ClusterService clusterService; private final TransportService transportService; - private final AtomicInteger randomNodeGenerator = new AtomicInteger(); + private final AtomicInteger randomNodeGenerator = new AtomicInteger(Randomness.get().nextInt()); @Inject public IngestProxyActionFilter(ClusterService clusterService, TransportService transportService) { From 1dc5a6f3cf81bd2f04910b8d3a8da02a1628add2 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jan 2016 15:48:28 +0100 Subject: [PATCH 207/347] move assert that proxy filter should be installed on non ingest nodes only --- .../action/ingest/IngestProxyActionFilter.java | 6 ++++-- .../action/ingest/IngestProxyActionFilterTests.java | 2 -- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java index 0602fbbed20..98c256f8a72 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java @@ -31,6 +31,7 @@ import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; @@ -54,7 +55,6 @@ public final class IngestProxyActionFilter implements ActionFilter { @Inject public IngestProxyActionFilter(ClusterService clusterService, TransportService transportService) { - assert IngestModule.isIngestEnabled(clusterService.state().nodes().localNode().attributes()) == false; this.clusterService = clusterService; this.transportService = transportService; } @@ -128,8 +128,10 @@ public final class IngestProxyActionFilter implements ActionFilter { private DiscoveryNode randomIngestNode() { ClusterState state = clusterService.state(); + DiscoveryNodes nodes = state.nodes(); + assert IngestModule.isIngestEnabled(nodes.localNode().attributes()) == false; List ingestNodes = new ArrayList<>(); - for (DiscoveryNode node : state.nodes()) { + for (DiscoveryNode node : nodes) { if (IngestModule.isIngestEnabled(node.getAttributes())) { ingestNodes.add(node); } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java index 44a4c492f7f..186c806b8e0 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java @@ -78,8 +78,6 @@ public class IngestProxyActionFilterTests extends ESTestCase { } builder.put(new DiscoveryNode(nodeId, nodeId, DummyTransportAddress.INSTANCE, attributes, VersionUtils.randomVersion(random()))); } - //at least one node must not have ingest enabled, and that will always be the last one - builder.localNodeId("node" + (totalNodes - 1)); when(clusterState.nodes()).thenReturn(builder.build()); ClusterService clusterService = mock(ClusterService.class); when(clusterService.state()).thenReturn(clusterState); From 0769636ea293d0f070226445f13546553f0fc564 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jan 2016 15:49:15 +0100 Subject: [PATCH 208/347] add some nodes with ingest set to false to test redirect --- .../java/org/elasticsearch/ingest/IngestClientIT.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 3767aac85f5..43c12255b17 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -36,6 +36,7 @@ import org.elasticsearch.action.ingest.SimulateDocumentSimpleResult; import org.elasticsearch.action.ingest.SimulatePipelineAction; import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder; import org.elasticsearch.action.ingest.SimulatePipelineResponse; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -51,8 +52,17 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; +@ESIntegTestCase.ClusterScope(minNumDataNodes = 2) public class IngestClientIT extends ESIntegTestCase { + @Override + protected Settings nodeSettings(int nodeOrdinal) { + if (nodeOrdinal % 2 == 0) { + return Settings.builder().put("node.ingest", false).put(super.nodeSettings(nodeOrdinal)).build(); + } + return super.nodeSettings(nodeOrdinal); + } + @Override protected Collection> nodePlugins() { return pluginList(IngestPlugin.class); From 5d94f5d35f128145d9780b1dd39000a2c5511f3a Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jan 2016 16:06:56 +0100 Subject: [PATCH 209/347] make IngestProxyActionFilterTests work again --- .../action/ingest/IngestProxyActionFilter.java | 8 ++------ .../action/ingest/IngestProxyActionFilterTests.java | 8 +++++++- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java index 98c256f8a72..be9051bc9ca 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java @@ -29,9 +29,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; @@ -127,11 +125,9 @@ public final class IngestProxyActionFilter implements ActionFilter { } private DiscoveryNode randomIngestNode() { - ClusterState state = clusterService.state(); - DiscoveryNodes nodes = state.nodes(); - assert IngestModule.isIngestEnabled(nodes.localNode().attributes()) == false; + assert IngestModule.isIngestEnabled(clusterService.localNode().attributes()) == false; List ingestNodes = new ArrayList<>(); - for (DiscoveryNode node : nodes) { + for (DiscoveryNode node : clusterService.state().nodes()) { if (IngestModule.isIngestEnabled(node.getAttributes())) { ingestNodes.add(node); } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java index 186c806b8e0..ed1bc7ee0ce 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java @@ -68,6 +68,7 @@ public class IngestProxyActionFilterTests extends ESTestCase { private IngestProxyActionFilter buildFilter(int ingestNodes, int totalNodes) { ClusterState clusterState = mock(ClusterState.class); DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder(); + DiscoveryNode localNode = null; for (int i = 0; i < totalNodes; i++) { String nodeId = "node" + i; Map attributes = new HashMap<>(); @@ -76,10 +77,15 @@ public class IngestProxyActionFilterTests extends ESTestCase { } else if (randomBoolean()) { attributes.put("ingest", "true"); } - builder.put(new DiscoveryNode(nodeId, nodeId, DummyTransportAddress.INSTANCE, attributes, VersionUtils.randomVersion(random()))); + DiscoveryNode node = new DiscoveryNode(nodeId, nodeId, DummyTransportAddress.INSTANCE, attributes, VersionUtils.randomVersion(random())); + builder.put(node); + if (i == totalNodes - 1) { + localNode = node; + } } when(clusterState.nodes()).thenReturn(builder.build()); ClusterService clusterService = mock(ClusterService.class); + when(clusterService.localNode()).thenReturn(localNode); when(clusterService.state()).thenReturn(clusterState); transportService = mock(TransportService.class); return new IngestProxyActionFilter(clusterService, transportService); From f8019926b3ee944bf39cdd3c9d5140726c346322 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jan 2016 19:44:04 +0100 Subject: [PATCH 210/347] remove declared serialVersionUid --- .../elasticsearch/ingest/processor/RenameProcessorTests.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java index c42ca825652..ae354e893d7 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java @@ -125,7 +125,6 @@ public class RenameProcessorTests extends ESTestCase { public void testRenameAtomicOperationSetFails() throws Exception { Map source = new HashMap() { - private static final long serialVersionUID = 362498820763181265L; @Override public Object put(String key, Object value) { if (key.equals("new_field")) { @@ -150,7 +149,6 @@ public class RenameProcessorTests extends ESTestCase { public void testRenameAtomicOperationRemoveFails() throws Exception { Map source = new HashMap() { - private static final long serialVersionUID = 362498820763181265L; @Override public Object remove(Object key) { if (key.equals("list")) { From 11a6622e46c65e0e94244f791146f957f8b1b63d Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jan 2016 19:10:04 +0100 Subject: [PATCH 211/347] add proper ingest methods to Client Now that ingest is part of core we can add specific put/get/delete/simualtePipeline methods to the Client interface which is nice for java api users --- .../java/org/elasticsearch/client/Client.java | 70 +++++++++++ .../client/support/AbstractClient.java | 74 ++++++++++++ .../ingest/RestDeletePipelineAction.java | 5 +- .../action/ingest/RestGetPipelineAction.java | 5 +- .../action/ingest/RestPutPipelineAction.java | 5 +- .../ingest/RestSimulatePipelineAction.java | 5 +- .../elasticsearch/ingest/IngestClientIT.java | 109 +++++++++--------- 7 files changed, 209 insertions(+), 64 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/client/Client.java b/core/src/main/java/org/elasticsearch/client/Client.java index e7461dabfe1..2d7e8bde0a6 100644 --- a/core/src/main/java/org/elasticsearch/client/Client.java +++ b/core/src/main/java/org/elasticsearch/client/Client.java @@ -51,6 +51,16 @@ import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptResponse; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequestBuilder; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; +import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.action.ingest.DeletePipelineRequestBuilder; +import org.elasticsearch.action.ingest.GetPipelineRequest; +import org.elasticsearch.action.ingest.GetPipelineRequestBuilder; +import org.elasticsearch.action.ingest.GetPipelineResponse; +import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineRequestBuilder; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder; +import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.percolate.MultiPercolateRequest; import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; import org.elasticsearch.action.percolate.MultiPercolateResponse; @@ -592,6 +602,66 @@ public interface Client extends ElasticsearchClient, Releasable { void fieldStats(FieldStatsRequest request, ActionListener listener); + /** + * Stores an ingest pipeline + */ + void putPipeline(PutPipelineRequest request, ActionListener listener); + + /** + * Stores an ingest pipeline + */ + ActionFuture putPipeline(PutPipelineRequest request); + + /** + * Stores an ingest pipeline + */ + PutPipelineRequestBuilder preparePutPipeline(); + + /** + * Deletes a stored ingest pipeline + */ + void deletePipeline(DeletePipelineRequest request, ActionListener listener); + + /** + * Deletes a stored ingest pipeline + */ + ActionFuture deletePipeline(DeletePipelineRequest request); + + /** + * Deletes a stored ingest pipeline + */ + DeletePipelineRequestBuilder prepareDeletePipeline(); + + /** + * Returns a stored ingest pipeline + */ + void getPipeline(GetPipelineRequest request, ActionListener listener); + + /** + * Returns a stored ingest pipeline + */ + ActionFuture getPipeline(GetPipelineRequest request); + + /** + * Returns a stored ingest pipeline + */ + GetPipelineRequestBuilder prepareGetPipeline(); + + /** + * Simulates an ingest pipeline + */ + void simulatePipeline(SimulatePipelineRequest request, ActionListener listener); + + /** + * Simulates an ingest pipeline + */ + ActionFuture simulatePipeline(SimulatePipelineRequest request); + + /** + * Simulates an ingest pipeline + */ + SimulatePipelineRequestBuilder prepareSimulatePipeline(); + /** * Returns this clients settings */ diff --git a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java index e5a465442bb..bc5ed9410d1 100644 --- a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -272,6 +272,20 @@ import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptAction; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequestBuilder; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; +import org.elasticsearch.action.ingest.DeletePipelineAction; +import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.action.ingest.DeletePipelineRequestBuilder; +import org.elasticsearch.action.ingest.GetPipelineAction; +import org.elasticsearch.action.ingest.GetPipelineRequest; +import org.elasticsearch.action.ingest.GetPipelineRequestBuilder; +import org.elasticsearch.action.ingest.GetPipelineResponse; +import org.elasticsearch.action.ingest.PutPipelineAction; +import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineRequestBuilder; +import org.elasticsearch.action.ingest.SimulatePipelineAction; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder; +import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.percolate.MultiPercolateAction; import org.elasticsearch.action.percolate.MultiPercolateRequest; import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; @@ -791,6 +805,66 @@ public abstract class AbstractClient extends AbstractComponent implements Client return new FieldStatsRequestBuilder(this, FieldStatsAction.INSTANCE); } + @Override + public void putPipeline(PutPipelineRequest request, ActionListener listener) { + execute(PutPipelineAction.INSTANCE, request, listener); + } + + @Override + public ActionFuture putPipeline(PutPipelineRequest request) { + return execute(PutPipelineAction.INSTANCE, request); + } + + @Override + public PutPipelineRequestBuilder preparePutPipeline() { + return new PutPipelineRequestBuilder(this, PutPipelineAction.INSTANCE); + } + + @Override + public void deletePipeline(DeletePipelineRequest request, ActionListener listener) { + execute(DeletePipelineAction.INSTANCE, request, listener); + } + + @Override + public ActionFuture deletePipeline(DeletePipelineRequest request) { + return execute(DeletePipelineAction.INSTANCE, request); + } + + @Override + public DeletePipelineRequestBuilder prepareDeletePipeline() { + return new DeletePipelineRequestBuilder(this, DeletePipelineAction.INSTANCE); + } + + @Override + public void getPipeline(GetPipelineRequest request, ActionListener listener) { + execute(GetPipelineAction.INSTANCE, request, listener); + } + + @Override + public ActionFuture getPipeline(GetPipelineRequest request) { + return execute(GetPipelineAction.INSTANCE, request); + } + + @Override + public GetPipelineRequestBuilder prepareGetPipeline() { + return new GetPipelineRequestBuilder(this, GetPipelineAction.INSTANCE); + } + + @Override + public void simulatePipeline(SimulatePipelineRequest request, ActionListener listener) { + execute(SimulatePipelineAction.INSTANCE, request, listener); + } + + @Override + public ActionFuture simulatePipeline(SimulatePipelineRequest request) { + return execute(SimulatePipelineAction.INSTANCE, request); + } + + @Override + public SimulatePipelineRequestBuilder prepareSimulatePipeline() { + return new SimulatePipelineRequestBuilder(this, SimulatePipelineAction.INSTANCE); + } + static class Admin implements AdminClient { private final ClusterAdmin clusterAdmin; diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java index 994e0300407..cb70b5a79c8 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java @@ -19,11 +19,10 @@ package org.elasticsearch.rest.action.ingest; +import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.action.ingest.DeletePipelineAction; -import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; @@ -42,6 +41,6 @@ public class RestDeletePipelineAction extends BaseRestHandler { protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { DeletePipelineRequest request = new DeletePipelineRequest(); request.id(restRequest.param("id")); - client.execute(DeletePipelineAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); + client.deletePipeline(request, new RestStatusToXContentListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java index 47f41fc437b..7fae61eaed5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java @@ -19,12 +19,11 @@ package org.elasticsearch.rest.action.ingest; +import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.action.ingest.GetPipelineAction; -import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; @@ -43,6 +42,6 @@ public class RestGetPipelineAction extends BaseRestHandler { protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { GetPipelineRequest request = new GetPipelineRequest(); request.ids(Strings.splitStringByCommaToArray(restRequest.param("id"))); - client.execute(GetPipelineAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); + client.getPipeline(request, new RestStatusToXContentListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java index b63b2eb44a7..98ec67782d5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java @@ -19,11 +19,10 @@ package org.elasticsearch.rest.action.ingest; +import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.action.ingest.PutPipelineAction; -import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; @@ -45,6 +44,6 @@ public class RestPutPipelineAction extends BaseRestHandler { if (restRequest.hasContent()) { request.source(restRequest.content()); } - client.execute(PutPipelineAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); + client.putPipeline(request, new RestStatusToXContentListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java index ed859e2a442..da902bdaa42 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java @@ -19,11 +19,10 @@ package org.elasticsearch.rest.action.ingest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.action.ingest.SimulatePipelineAction; -import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; @@ -52,6 +51,6 @@ public class RestSimulatePipelineAction extends BaseRestHandler { request.setSource(RestActions.getRestContent(restRequest)); } - client.execute(SimulatePipelineAction.INSTANCE, request, new RestToXContentListener<>(channel)); + client.simulatePipeline(request, new RestToXContentListener<>(channel)); } } diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 43c12255b17..1c50bc38b6a 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -25,17 +25,14 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.ingest.DeletePipelineAction; -import org.elasticsearch.action.ingest.DeletePipelineRequestBuilder; -import org.elasticsearch.action.ingest.GetPipelineAction; -import org.elasticsearch.action.ingest.GetPipelineRequestBuilder; +import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; -import org.elasticsearch.action.ingest.PutPipelineAction; -import org.elasticsearch.action.ingest.PutPipelineRequestBuilder; +import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.ingest.SimulateDocumentSimpleResult; -import org.elasticsearch.action.ingest.SimulatePipelineAction; -import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineResponse; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.plugins.Plugin; @@ -69,7 +66,7 @@ public class IngestClientIT extends ESIntegTestCase { } public void testSimulate() throws Exception { - new PutPipelineRequestBuilder(client(), PutPipelineAction.INSTANCE) + client().preparePutPipeline() .setId("_id") .setSource(jsonBuilder().startObject() .field("description", "my_pipeline") @@ -81,30 +78,37 @@ public class IngestClientIT extends ESIntegTestCase { .endArray() .endObject().bytes()) .get(); - GetPipelineResponse getResponse = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) + GetPipelineResponse getResponse = client().prepareGetPipeline() .setIds("_id") .get(); assertThat(getResponse.isFound(), is(true)); assertThat(getResponse.pipelines().size(), equalTo(1)); assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id")); - SimulatePipelineResponse response = new SimulatePipelineRequestBuilder(client(), SimulatePipelineAction.INSTANCE) + BytesReference bytes = jsonBuilder().startObject() + .startArray("docs") + .startObject() + .field("_index", "index") + .field("_type", "type") + .field("_id", "id") + .startObject("_source") + .field("foo", "bar") + .field("fail", false) + .endObject() + .endObject() + .endArray() + .endObject().bytes(); + SimulatePipelineResponse response; + if (randomBoolean()) { + response = client().prepareSimulatePipeline() .setId("_id") - .setSource(jsonBuilder().startObject() - .startArray("docs") - .startObject() - .field("_index", "index") - .field("_type", "type") - .field("_id", "id") - .startObject("_source") - .field("foo", "bar") - .field("fail", false) - .endObject() - .endObject() - .endArray() - .endObject().bytes()) - .get(); - + .setSource(bytes).get(); + } else { + SimulatePipelineRequest request = new SimulatePipelineRequest(); + request.setId("_id"); + request.setSource(bytes); + response = client().simulatePipeline(request).get(); + } assertThat(response.isVerbose(), equalTo(false)); assertThat(response.getPipelineId(), equalTo("_id")); assertThat(response.getResults().size(), equalTo(1)); @@ -122,18 +126,19 @@ public class IngestClientIT extends ESIntegTestCase { public void testBulkWithIngestFailures() throws Exception { createIndex("index"); - new PutPipelineRequestBuilder(client(), PutPipelineAction.INSTANCE) - .setId("_id") - .setSource(jsonBuilder().startObject() - .field("description", "my_pipeline") - .startArray("processors") - .startObject() - .startObject("test") - .endObject() - .endObject() - .endArray() - .endObject().bytes()) - .get(); + PutPipelineRequest putPipelineRequest = new PutPipelineRequest(); + putPipelineRequest.id("_id"); + putPipelineRequest.source(jsonBuilder().startObject() + .field("description", "my_pipeline") + .startArray("processors") + .startObject() + .startObject("test") + .endObject() + .endObject() + .endArray() + .endObject().bytes()); + + client().putPipeline(putPipelineRequest).get(); int numRequests = scaledRandomIntBetween(32, 128); BulkRequest bulkRequest = new BulkRequest(); @@ -159,9 +164,10 @@ public class IngestClientIT extends ESIntegTestCase { } public void test() throws Exception { - new PutPipelineRequestBuilder(client(), PutPipelineAction.INSTANCE) - .setId("_id") - .setSource(jsonBuilder().startObject() + + PutPipelineRequest putPipelineRequest = new PutPipelineRequest(); + putPipelineRequest.id("_id"); + putPipelineRequest.source(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -169,11 +175,12 @@ public class IngestClientIT extends ESIntegTestCase { .endObject() .endObject() .endArray() - .endObject().bytes()) - .get(); - GetPipelineResponse getResponse = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) - .setIds("_id") - .get(); + .endObject().bytes()); + client().putPipeline(putPipelineRequest).get(); + + GetPipelineRequest getPipelineRequest = new GetPipelineRequest(); + getPipelineRequest.ids("_id"); + GetPipelineResponse getResponse = client().getPipeline(getPipelineRequest).get(); assertThat(getResponse.isFound(), is(true)); assertThat(getResponse.pipelines().size(), equalTo(1)); assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id")); @@ -191,15 +198,13 @@ public class IngestClientIT extends ESIntegTestCase { assertThat(doc.get("field"), equalTo("value2")); assertThat(doc.get("processed"), equalTo(true)); - DeleteResponse response = new DeletePipelineRequestBuilder(client(), DeletePipelineAction.INSTANCE) - .setId("_id") - .get(); + DeletePipelineRequest deletePipelineRequest = new DeletePipelineRequest(); + deletePipelineRequest.id("_id"); + DeleteResponse response = client().deletePipeline(deletePipelineRequest).get(); assertThat(response.isFound(), is(true)); assertThat(response.getId(), equalTo("_id")); - getResponse = new GetPipelineRequestBuilder(client(), GetPipelineAction.INSTANCE) - .setIds("_id") - .get(); + getResponse = client().prepareGetPipeline().setIds("_id").get(); assertThat(getResponse.isFound(), is(false)); assertThat(getResponse.pipelines().size(), equalTo(0)); } From b39a64e49f8fd2b6732282c294c5f584edfc74d0 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Mon, 11 Jan 2016 18:02:29 -0800 Subject: [PATCH 212/347] accept grok matches with no captures instead of NPE --- .../main/java/org/elasticsearch/ingest/grok/Grok.java | 10 +++++----- .../elasticsearch/ingest/grok/GrokProcessorTests.java | 11 +++++++++++ .../java/org/elasticsearch/ingest/grok/GrokTests.java | 7 +++++++ 3 files changed, 23 insertions(+), 5 deletions(-) diff --git a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/Grok.java b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/Grok.java index 228a2cbab57..abed8413287 100644 --- a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/Grok.java +++ b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/Grok.java @@ -134,7 +134,7 @@ final class Grok { Map fields = new HashMap<>(); Matcher matcher = compiledExpression.matcher(textAsBytes); int result = matcher.search(0, textAsBytes.length, Option.DEFAULT); - if (result != -1) { + if (result != -1 && compiledExpression.numberOfNames() > 0) { Region region = matcher.getEagerRegion(); for (Iterator entry = compiledExpression.namedBackrefIterator(); entry.hasNext();) { NameEntry e = entry.next(); @@ -148,11 +148,11 @@ final class Grok { GrokMatchGroup match = new GrokMatchGroup(groupName, matchValue); fields.put(match.getName(), match.getValue()); } - } else { - return null; + return fields; + } else if (result != -1) { + return fields; } - - return fields; + return null; } } diff --git a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorTests.java b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorTests.java index bb2de7e2983..9fe203ca396 100644 --- a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorTests.java +++ b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorTests.java @@ -57,6 +57,17 @@ public class GrokProcessorTests extends ESTestCase { } } + public void testMatchWithoutCaptures() throws Exception { + String fieldName = "value"; + IngestDocument originalDoc = new IngestDocument(new HashMap<>(), new HashMap<>()); + originalDoc.setFieldValue(fieldName, fieldName); + IngestDocument doc = new IngestDocument(originalDoc); + Grok grok = new Grok(Collections.emptyMap(), fieldName); + GrokProcessor processor = new GrokProcessor(grok, fieldName); + processor.execute(doc); + assertThat(doc, equalTo(originalDoc)); + } + public void testNotStringField() { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); diff --git a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokTests.java b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokTests.java index df9a7695d87..89e44fc7239 100644 --- a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokTests.java +++ b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokTests.java @@ -57,6 +57,13 @@ public class GrokTests extends ESTestCase { ); } + public void testMatchWithoutCaptures() { + String line = "value"; + Grok grok = new Grok(basePatterns, "value"); + Map matches = grok.captures(line); + assertEquals(0, matches.size()); + } + public void testSimpleSyslogLine() { String line = "Mar 16 00:01:25 evita postfix/smtpd[1713]: connect from camomile.cloud9.net[168.100.1.3]"; Grok grok = new Grok(basePatterns, "%{SYSLOGLINE}"); From 45f192b7ccc12f7946f9bbf9f80be5e738f189fb Mon Sep 17 00:00:00 2001 From: Jason Bryan Date: Tue, 12 Jan 2016 15:52:32 -0500 Subject: [PATCH 213/347] Minor documentation updates. --- docs/plugins/ingest.asciidoc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index e41f9bbd431..ea42932e561 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -21,7 +21,7 @@ on an index or bulk request to tell the ingest plugin what pipeline is going to [source,js] -------------------------------------------------- -PUT /my-index/my-type/my-id?ingest=my_pipeline_id +PUT /my-index/my-type/my-id?pipeline=my_pipeline_id { ... } @@ -80,7 +80,8 @@ name must not exist. -------------------------------------------------- { "rename": { - "field": "foo" + "field": "foo", + "to": "foobar" } } -------------------------------------------------- From aa464778b111c07530414aa47b85eede8ce58036 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 13 Jan 2016 12:16:26 +0100 Subject: [PATCH 214/347] fix compile errors --- .../action/ingest/SimulateDocumentSimpleResult.java | 2 +- .../action/ingest/WriteableIngestDocumentTests.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResult.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResult.java index 3249775a8e4..74173acff1f 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResult.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResult.java @@ -67,7 +67,7 @@ public class SimulateDocumentSimpleResult implements SimulateDocumentResult Date: Wed, 13 Jan 2016 13:23:12 +0100 Subject: [PATCH 215/347] remove final keyword --- .../java/org/elasticsearch/ingest/core/CompoundProcessor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java index b412fb5d6e2..f25e13637c4 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java @@ -30,7 +30,7 @@ import java.util.stream.Collectors; * A Processor that executes a list of other "processors". It executes a separate list of * "onFailureProcessors" when any of the processors throw an {@link Exception}. */ -public final class CompoundProcessor implements Processor { +public class CompoundProcessor implements Processor { static final String ON_FAILURE_MESSAGE_FIELD = "on_failure_message"; static final String ON_FAILURE_PROCESSOR_FIELD = "on_failure_processor"; From f3883343cb20d7144496080c62b3a2b4fc1fa46a Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 11 Jan 2016 23:49:56 +0100 Subject: [PATCH 216/347] Move the pipeline configuration from the dedicated index to the cluster state. Closes #15842 --- .../elasticsearch/ElasticsearchException.java | 3 +- .../action/ingest/DeletePipelineAction.java | 7 +- .../action/ingest/DeletePipelineRequest.java | 3 +- .../ingest/DeletePipelineRequestBuilder.java | 3 +- .../ingest/DeletePipelineTransportAction.java | 33 +- .../action/ingest/GetPipelineRequest.java | 4 +- .../ingest/GetPipelineRequestBuilder.java | 4 +- .../action/ingest/GetPipelineResponse.java | 18 +- .../ingest/GetPipelineTransportAction.java | 38 +- .../action/ingest/PutPipelineAction.java | 6 +- .../action/ingest/PutPipelineRequest.java | 4 +- .../ingest/PutPipelineRequestBuilder.java | 2 +- .../ingest/PutPipelineTransportAction.java | 31 +- .../action/ingest/ReloadPipelinesAction.java | 117 ---- .../action/ingest/WritePipelineResponse.java | 48 ++ .../java/org/elasticsearch/client/Client.java | 9 +- .../client/support/AbstractClient.java | 9 +- .../cluster/metadata/MetaData.java | 2 + .../common/SearchScrollIterator.java | 93 ---- .../ingest/IngestBootstrapper.java | 95 +--- .../elasticsearch/ingest/IngestMetadata.java | 124 +++++ .../ingest/PipelineConfiguration.java | 112 ++++ .../ingest/PipelineDefinition.java | 114 ---- .../ingest/PipelineMissingException.java | 42 ++ .../elasticsearch/ingest/PipelineStore.java | 435 ++++----------- .../elasticsearch/ingest/core/Pipeline.java | 45 +- .../ingest/RestDeletePipelineAction.java | 6 +- .../action/ingest/RestGetPipelineAction.java | 1 + .../action/ingest/RestPutPipelineAction.java | 6 +- .../ExceptionSerializationTests.java | 1 + .../ingest/ReloadPipelinesActionTests.java | 105 ---- .../common/SearchScrollIteratorTests.java | 59 -- .../ingest/IngestBootstrapperTests.java | 276 ---------- .../elasticsearch/ingest/IngestClientIT.java | 8 +- .../ingest/PipelineStoreTests.java | 519 +++++------------- .../test/ingest_grok/20_grok.yaml | 6 +- .../test/ingest_geoip/20_geoip_processor.yaml | 6 +- .../ingest_mustache/10_ingest_disabled.yaml | 17 +- .../10_pipeline_with_mustache_templates.yaml | 10 +- .../api/ingest.delete_pipeline.json | 8 + .../api/ingest.get_pipeline.json | 4 + .../api/ingest.put_pipeline.json | 8 + .../rest-api-spec/test/ingest/10_crud.yaml | 30 +- .../test/ingest/20_date_processor.yaml | 2 +- .../rest-api-spec/test/ingest/30_mutate.yaml | 4 +- .../test/ingest/40_simulate.yaml | 2 +- .../test/ingest/50_on_failure.yaml | 4 +- .../rest-api-spec/test/ingest/60_fail.yaml | 4 +- 48 files changed, 781 insertions(+), 1706 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/action/ingest/ReloadPipelinesAction.java create mode 100644 core/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponse.java delete mode 100644 core/src/main/java/org/elasticsearch/common/SearchScrollIterator.java create mode 100644 core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java create mode 100644 core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java delete mode 100644 core/src/main/java/org/elasticsearch/ingest/PipelineDefinition.java create mode 100644 core/src/main/java/org/elasticsearch/ingest/PipelineMissingException.java delete mode 100644 core/src/test/java/org/elasticsearch/action/ingest/ReloadPipelinesActionTests.java delete mode 100644 core/src/test/java/org/elasticsearch/common/SearchScrollIteratorTests.java delete mode 100644 core/src/test/java/org/elasticsearch/ingest/IngestBootstrapperTests.java diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index 4a35bcbcfb0..98c992cf2bc 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -610,7 +610,8 @@ public class ElasticsearchException extends RuntimeException implements ToXConte RETRY_ON_REPLICA_EXCEPTION(org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnReplicaException.class, org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnReplicaException::new, 136), TYPE_MISSING_EXCEPTION(org.elasticsearch.indices.TypeMissingException.class, org.elasticsearch.indices.TypeMissingException::new, 137), FAILED_TO_COMMIT_CLUSTER_STATE_EXCEPTION(org.elasticsearch.discovery.Discovery.FailedToCommitClusterStateException.class, org.elasticsearch.discovery.Discovery.FailedToCommitClusterStateException::new, 140), - QUERY_SHARD_EXCEPTION(org.elasticsearch.index.query.QueryShardException.class, org.elasticsearch.index.query.QueryShardException::new, 141); + QUERY_SHARD_EXCEPTION(org.elasticsearch.index.query.QueryShardException.class, org.elasticsearch.index.query.QueryShardException::new, 141), + PIPELINE_MISSING_EXCEPTION(org.elasticsearch.ingest.PipelineMissingException.class, org.elasticsearch.ingest.PipelineMissingException::new, 142); final Class exceptionClass; final FunctionThatThrowsIOException constructor; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java index 8456d7e0e6a..ba1dd5d385f 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java @@ -20,10 +20,9 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.Action; -import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.client.ElasticsearchClient; -public class DeletePipelineAction extends Action { +public class DeletePipelineAction extends Action { public static final DeletePipelineAction INSTANCE = new DeletePipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/delete"; @@ -38,7 +37,7 @@ public class DeletePipelineAction extends Action { +public class DeletePipelineRequestBuilder extends ActionRequestBuilder { public DeletePipelineRequestBuilder(ElasticsearchClient client, DeletePipelineAction action) { super(client, action, new DeletePipelineRequest()); diff --git a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java index 4f25a9d330c..03f63ff26b7 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java @@ -20,9 +20,12 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -31,18 +34,36 @@ import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -public class DeletePipelineTransportAction extends HandledTransportAction { +public class DeletePipelineTransportAction extends TransportMasterNodeAction { private final PipelineStore pipelineStore; @Inject - public DeletePipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, IngestBootstrapper bootstrapper) { - super(settings, DeletePipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, DeletePipelineRequest::new); + public DeletePipelineTransportAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, IngestBootstrapper bootstrapper) { + super(settings, DeletePipelineAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, DeletePipelineRequest::new); this.pipelineStore = bootstrapper.getPipelineStore(); } @Override - protected void doExecute(DeletePipelineRequest request, ActionListener listener) { + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected WritePipelineResponse newResponse() { + return new WritePipelineResponse(); + } + + @Override + protected void masterOperation(DeletePipelineRequest request, ClusterState state, ActionListener listener) throws Exception { pipelineStore.delete(request, listener); } + + @Override + protected ClusterBlockException checkBlock(DeletePipelineRequest request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + } diff --git a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java index e0bfca6cac4..afde709d699 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java @@ -19,8 +19,8 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +28,7 @@ import java.io.IOException; import static org.elasticsearch.action.ValidateActions.addValidationError; -public class GetPipelineRequest extends ActionRequest { +public class GetPipelineRequest extends MasterNodeReadRequest { private String[] ids; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java index c339603104e..21fa974cd3c 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java @@ -19,10 +19,10 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; -public class GetPipelineRequestBuilder extends ActionRequestBuilder { +public class GetPipelineRequestBuilder extends MasterNodeReadOperationRequestBuilder { public GetPipelineRequestBuilder(ElasticsearchClient client, GetPipelineAction action) { super(client, action, new GetPipelineRequest()); diff --git a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java index 9a12f4b1d03..9f0b229d322 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.StatusToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.ingest.PipelineDefinition; +import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -33,16 +33,16 @@ import java.util.List; public class GetPipelineResponse extends ActionResponse implements StatusToXContent { - private List pipelines; + private List pipelines; public GetPipelineResponse() { } - public GetPipelineResponse(List pipelines) { + public GetPipelineResponse(List pipelines) { this.pipelines = pipelines; } - public List pipelines() { + public List pipelines() { return pipelines; } @@ -52,7 +52,7 @@ public class GetPipelineResponse extends ActionResponse implements StatusToXCont int size = in.readVInt(); pipelines = new ArrayList<>(size); for (int i = 0; i < size; i++) { - pipelines.add(PipelineDefinition.readPipelineDefinitionFrom(in)); + pipelines.add(PipelineConfiguration.readPipelineConfiguration(in)); } } @@ -60,7 +60,7 @@ public class GetPipelineResponse extends ActionResponse implements StatusToXCont public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(pipelines.size()); - for (PipelineDefinition pipeline : pipelines) { + for (PipelineConfiguration pipeline : pipelines) { pipeline.writeTo(out); } } @@ -76,9 +76,11 @@ public class GetPipelineResponse extends ActionResponse implements StatusToXCont @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - for (PipelineDefinition definition : pipelines) { - definition.toXContent(builder, params); + builder.startArray("pipelines"); + for (PipelineConfiguration pipeline : pipelines) { + pipeline.toXContent(builder, params); } + builder.endArray(); return builder; } } diff --git a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java index 471238e0587..cb962550255 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java @@ -21,31 +21,49 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.IngestBootstrapper; -import org.elasticsearch.ingest.PipelineDefinition; import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.List; - -public class GetPipelineTransportAction extends HandledTransportAction { +public class GetPipelineTransportAction extends TransportMasterNodeReadAction { private final PipelineStore pipelineStore; @Inject - public GetPipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, IngestBootstrapper bootstrapper) { - super(settings, GetPipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, GetPipelineRequest::new); + public GetPipelineTransportAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, IngestBootstrapper bootstrapper) { + super(settings, GetPipelineAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, GetPipelineRequest::new); this.pipelineStore = bootstrapper.getPipelineStore(); } @Override - protected void doExecute(GetPipelineRequest request, ActionListener listener) { - List references = pipelineStore.getReference(request.ids()); - listener.onResponse(new GetPipelineResponse(references)); + protected String executor() { + return ThreadPool.Names.SAME; } + + @Override + protected GetPipelineResponse newResponse() { + return new GetPipelineResponse(); + } + + @Override + protected void masterOperation(GetPipelineRequest request, ClusterState state, ActionListener listener) throws Exception { + listener.onResponse(new GetPipelineResponse(pipelineStore.getPipelines(request.ids()))); + } + + @Override + protected ClusterBlockException checkBlock(GetPipelineRequest request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } + } diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java index 7f37009577e..8f4b4170f51 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.ElasticsearchClient; -public class PutPipelineAction extends Action { +public class PutPipelineAction extends Action { public static final PutPipelineAction INSTANCE = new PutPipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/put"; @@ -38,7 +38,7 @@ public class PutPipelineAction extends Action { +public class PutPipelineRequestBuilder extends ActionRequestBuilder { public PutPipelineRequestBuilder(ElasticsearchClient client, PutPipelineAction action) { super(client, action, new PutPipelineRequest()); diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index 8f7da7eff07..3b9e738f69b 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -23,6 +23,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -31,18 +36,36 @@ import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -public class PutPipelineTransportAction extends HandledTransportAction { +public class PutPipelineTransportAction extends TransportMasterNodeAction { private final PipelineStore pipelineStore; @Inject - public PutPipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, IngestBootstrapper bootstrapper) { - super(settings, PutPipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, PutPipelineRequest::new); + public PutPipelineTransportAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, IngestBootstrapper bootstrapper) { + super(settings, PutPipelineAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, PutPipelineRequest::new); this.pipelineStore = bootstrapper.getPipelineStore(); } @Override - protected void doExecute(PutPipelineRequest request, ActionListener listener) { + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected WritePipelineResponse newResponse() { + return new WritePipelineResponse(); + } + + @Override + protected void masterOperation(PutPipelineRequest request, ClusterState state, ActionListener listener) throws Exception { pipelineStore.put(request, listener); } + + @Override + protected ClusterBlockException checkBlock(PutPipelineRequest request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + } diff --git a/core/src/main/java/org/elasticsearch/action/ingest/ReloadPipelinesAction.java b/core/src/main/java/org/elasticsearch/action/ingest/ReloadPipelinesAction.java deleted file mode 100644 index 452f3a3341f..00000000000 --- a/core/src/main/java/org/elasticsearch/action/ingest/ReloadPipelinesAction.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.ingest; - -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.PipelineStore; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportChannel; -import org.elasticsearch.transport.TransportException; -import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.transport.TransportRequestHandler; -import org.elasticsearch.transport.TransportResponse; -import org.elasticsearch.transport.TransportResponseHandler; -import org.elasticsearch.transport.TransportService; - -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Consumer; - -/** - * An internal api that refreshes the in-memory representation of all the pipelines on all ingest nodes. - */ -public class ReloadPipelinesAction extends AbstractComponent implements TransportRequestHandler { - - public static final String ACTION_NAME = "internal:admin/ingest/reload/pipelines"; - - private final ClusterService clusterService; - private final TransportService transportService; - private final PipelineStore pipelineStore; - - public ReloadPipelinesAction(Settings settings, PipelineStore pipelineStore, ClusterService clusterService, TransportService transportService) { - super(settings); - this.pipelineStore = pipelineStore; - this.clusterService = clusterService; - this.transportService = transportService; - transportService.registerRequestHandler(ACTION_NAME, ReloadPipelinesRequest::new, ThreadPool.Names.MANAGEMENT, this); - } - - public void reloadPipelinesOnAllNodes(Consumer listener) { - AtomicBoolean failed = new AtomicBoolean(); - DiscoveryNodes nodes = clusterService.state().getNodes(); - AtomicInteger expectedResponses = new AtomicInteger(nodes.size()); - for (DiscoveryNode node : nodes) { - ReloadPipelinesRequest nodeRequest = new ReloadPipelinesRequest(); - transportService.sendRequest(node, ACTION_NAME, nodeRequest, new TransportResponseHandler() { - @Override - public ReloadPipelinesResponse newInstance() { - return new ReloadPipelinesResponse(); - } - - @Override - public void handleResponse(ReloadPipelinesResponse response) { - decrementAndReturn(); - } - - @Override - public void handleException(TransportException exp) { - logger.warn("failed to update pipelines on remote node [{}]", exp, node); - failed.set(true); - decrementAndReturn(); - } - - void decrementAndReturn() { - if (expectedResponses.decrementAndGet() == 0) { - listener.accept(!failed.get()); - } - } - - @Override - public String executor() { - return ThreadPool.Names.SAME; - } - }); - } - } - - @Override - public void messageReceived(ReloadPipelinesRequest request, TransportChannel channel) throws Exception { - try { - pipelineStore.updatePipelines(); - channel.sendResponse(new ReloadPipelinesResponse()); - } catch (Throwable e) { - logger.warn("failed to update pipelines", e); - channel.sendResponse(e); - } - } - - final static class ReloadPipelinesRequest extends TransportRequest { - - } - - final static class ReloadPipelinesResponse extends TransportResponse { - - } - -} diff --git a/core/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponse.java b/core/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponse.java new file mode 100644 index 00000000000..885fd9f35d6 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponse.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.ingest; + +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +public class WritePipelineResponse extends AcknowledgedResponse { + + WritePipelineResponse() { + } + + public WritePipelineResponse(boolean acknowledge) { + super(acknowledge); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + readAcknowledged(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + writeAcknowledged(out); + } +} diff --git a/core/src/main/java/org/elasticsearch/client/Client.java b/core/src/main/java/org/elasticsearch/client/Client.java index 2d7e8bde0a6..984f79308a8 100644 --- a/core/src/main/java/org/elasticsearch/client/Client.java +++ b/core/src/main/java/org/elasticsearch/client/Client.java @@ -61,6 +61,7 @@ import org.elasticsearch.action.ingest.PutPipelineRequestBuilder; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder; import org.elasticsearch.action.ingest.SimulatePipelineResponse; +import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.action.percolate.MultiPercolateRequest; import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; import org.elasticsearch.action.percolate.MultiPercolateResponse; @@ -605,12 +606,12 @@ public interface Client extends ElasticsearchClient, Releasable { /** * Stores an ingest pipeline */ - void putPipeline(PutPipelineRequest request, ActionListener listener); + void putPipeline(PutPipelineRequest request, ActionListener listener); /** * Stores an ingest pipeline */ - ActionFuture putPipeline(PutPipelineRequest request); + ActionFuture putPipeline(PutPipelineRequest request); /** * Stores an ingest pipeline @@ -620,12 +621,12 @@ public interface Client extends ElasticsearchClient, Releasable { /** * Deletes a stored ingest pipeline */ - void deletePipeline(DeletePipelineRequest request, ActionListener listener); + void deletePipeline(DeletePipelineRequest request, ActionListener listener); /** * Deletes a stored ingest pipeline */ - ActionFuture deletePipeline(DeletePipelineRequest request); + ActionFuture deletePipeline(DeletePipelineRequest request); /** * Deletes a stored ingest pipeline diff --git a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java index bc5ed9410d1..69806aeab84 100644 --- a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -286,6 +286,7 @@ import org.elasticsearch.action.ingest.SimulatePipelineAction; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder; import org.elasticsearch.action.ingest.SimulatePipelineResponse; +import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.action.percolate.MultiPercolateAction; import org.elasticsearch.action.percolate.MultiPercolateRequest; import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; @@ -806,12 +807,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public void putPipeline(PutPipelineRequest request, ActionListener listener) { + public void putPipeline(PutPipelineRequest request, ActionListener listener) { execute(PutPipelineAction.INSTANCE, request, listener); } @Override - public ActionFuture putPipeline(PutPipelineRequest request) { + public ActionFuture putPipeline(PutPipelineRequest request) { return execute(PutPipelineAction.INSTANCE, request); } @@ -821,12 +822,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public void deletePipeline(DeletePipelineRequest request, ActionListener listener) { + public void deletePipeline(DeletePipelineRequest request, ActionListener listener) { execute(DeletePipelineAction.INSTANCE, request, listener); } @Override - public ActionFuture deletePipeline(DeletePipelineRequest request) { + public ActionFuture deletePipeline(DeletePipelineRequest request) { return execute(DeletePipelineAction.INSTANCE, request); } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 002d1a51107..0e41dda1888 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -54,6 +54,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.ttl.IndicesTTLService; +import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -111,6 +112,7 @@ public class MetaData implements Iterable, Diffable, Fr static { // register non plugin custom metadata registerPrototype(RepositoriesMetaData.TYPE, RepositoriesMetaData.PROTO); + registerPrototype(IngestMetadata.TYPE, IngestMetadata.PROTO); } /** diff --git a/core/src/main/java/org/elasticsearch/common/SearchScrollIterator.java b/core/src/main/java/org/elasticsearch/common/SearchScrollIterator.java deleted file mode 100644 index 18535d1626e..00000000000 --- a/core/src/main/java/org/elasticsearch/common/SearchScrollIterator.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common; - -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchScrollRequest; -import org.elasticsearch.client.Client; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.search.SearchHit; - -import java.util.Collections; -import java.util.Iterator; - -/** - * An iterator that easily helps to consume all hits from a scroll search. - */ -public final class SearchScrollIterator implements Iterator { - - /** - * Creates an iterator that returns all matching hits of a scroll search via an iterator. - * The iterator will return all hits per scroll search and execute additional scroll searches - * to get more hits until all hits have been returned by the scroll search on the ES side. - */ - public static Iterable createIterator(Client client, TimeValue scrollTimeout, SearchRequest searchRequest) { - searchRequest.scroll(scrollTimeout); - SearchResponse searchResponse = client.search(searchRequest).actionGet(scrollTimeout); - if (searchResponse.getHits().getTotalHits() == 0) { - return Collections.emptyList(); - } else { - return () -> new SearchScrollIterator(client, scrollTimeout, searchResponse); - } - } - - private final Client client; - private final TimeValue scrollTimeout; - - private int currentIndex; - private SearchHit[] currentHits; - private SearchResponse searchResponse; - - private SearchScrollIterator(Client client, TimeValue scrollTimeout, SearchResponse searchResponse) { - this.client = client; - this.scrollTimeout = scrollTimeout; - this.searchResponse = searchResponse; - this.currentHits = searchResponse.getHits().getHits(); - } - - @Override - public boolean hasNext() { - if (currentIndex < currentHits.length) { - return true; - } else { - if (searchResponse == null) { - return false; - } - - SearchScrollRequest request = new SearchScrollRequest(searchResponse.getScrollId()); - request.scroll(scrollTimeout); - searchResponse = client.searchScroll(request).actionGet(scrollTimeout); - if (searchResponse.getHits().getHits().length == 0) { - searchResponse = null; - return false; - } else { - currentHits = searchResponse.getHits().getHits(); - currentIndex = 0; - return true; - } - } - } - - @Override - public SearchHit next() { - return currentHits[currentIndex++]; - } -} diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java index e824786bd82..cf145821859 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java @@ -48,11 +48,8 @@ import java.io.InputStream; * Instantiates and wires all the services that the ingest plugin will be needing. * Also the bootstrapper is in charge of starting and stopping the ingest plugin based on the cluster state. */ -public class IngestBootstrapper extends AbstractLifecycleComponent implements ClusterStateListener { +public class IngestBootstrapper extends AbstractLifecycleComponent { - static final String INGEST_INDEX_TEMPLATE_NAME = "ingest-template"; - - private final ThreadPool threadPool; private final Environment environment; private final PipelineStore pipelineStore; private final PipelineExecutionService pipelineExecutionService; @@ -64,31 +61,12 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl // pipelines into NodeModule? I'd really like to prevent adding yet another module. @Inject public IngestBootstrapper(Settings settings, ThreadPool threadPool, Environment environment, - ClusterService clusterService, TransportService transportService, - ProcessorsRegistry processorsRegistry) { + ClusterService clusterService, ProcessorsRegistry processorsRegistry) { super(settings); - this.threadPool = threadPool; this.environment = environment; this.processorsRegistry = processorsRegistry; - this.pipelineStore = new PipelineStore(settings, clusterService, transportService); + this.pipelineStore = new PipelineStore(settings, clusterService); this.pipelineExecutionService = new PipelineExecutionService(pipelineStore, threadPool); - - boolean isNoTribeNode = settings.getByPrefix("tribe.").getAsMap().isEmpty(); - if (isNoTribeNode) { - clusterService.add(this); - } - } - - // for testing: - IngestBootstrapper(Settings settings, ThreadPool threadPool, ClusterService clusterService, - PipelineStore pipelineStore, PipelineExecutionService pipelineExecutionService) { - super(settings); - this.threadPool = threadPool; - this.environment = null; - clusterService.add(this); - this.pipelineStore = pipelineStore; - this.pipelineExecutionService = pipelineExecutionService; - this.processorsRegistry = null; } public PipelineStore getPipelineStore() { @@ -99,49 +77,11 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl return pipelineExecutionService; } - @Inject - public void setClient(Client client) { - pipelineStore.setClient(client); - } - @Inject public void setScriptService(ScriptService scriptService) { pipelineStore.buildProcessorFactoryRegistry(processorsRegistry, environment, scriptService); } - @Override - public void clusterChanged(ClusterChangedEvent event) { - ClusterState state = event.state(); - if (state.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { - return; - } - - if (pipelineStore.isStarted()) { - if (validClusterState(state) == false) { - stopPipelineStore("cluster state invalid [" + state + "]"); - } - } else { - if (validClusterState(state)) { - startPipelineStore(state.metaData()); - } - } - } - - boolean validClusterState(ClusterState state) { - if (state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_WRITES) || - state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ALL)) { - return false; - } - - if (state.getMetaData().hasConcreteIndex(PipelineStore.INDEX)) { - IndexRoutingTable routingTable = state.getRoutingTable().index(PipelineStore.INDEX); - return routingTable.allPrimaryShardsActive(); - } else { - // it will be ready when auto create index kicks in before the first pipeline doc gets added - return true; - } - } - @Override protected void doStart() { } @@ -159,33 +99,4 @@ public class IngestBootstrapper extends AbstractLifecycleComponent implements Cl } } - void startPipelineStore(MetaData metaData) { - try { - threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { - try { - pipelineStore.start(); - } catch (Exception e1) { - logger.warn("pipeline store failed to start, retrying...", e1); - startPipelineStore(metaData); - } - }); - } catch (EsRejectedExecutionException e) { - logger.debug("async pipeline store start failed", e); - } - } - - void stopPipelineStore(String reason) { - try { - threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { - try { - pipelineStore.stop(reason); - } catch (Exception e) { - logger.error("pipeline store stop failure", e); - } - }); - } catch (EsRejectedExecutionException e) { - logger.debug("async pipeline store stop failed", e); - } - } - } diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java b/core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java new file mode 100644 index 00000000000..4ee41c58ad7 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java @@ -0,0 +1,124 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.cluster.AbstractDiffable; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +/** + * Holds the ingest pipelines that are available in the cluster + */ +public final class IngestMetadata extends AbstractDiffable implements MetaData.Custom { + + public final static String TYPE = "ingest"; + public final static IngestMetadata PROTO = new IngestMetadata(); + + // We can't use Pipeline class directly in cluster state, because we don't have the processor factories around when + // IngestMetadata is registered as custom metadata. + private final Map pipelines; + + private IngestMetadata() { + this.pipelines = Collections.emptyMap(); + } + + public IngestMetadata(Map pipelines) { + this.pipelines = Collections.unmodifiableMap(pipelines); + } + + @Override + public String type() { + return TYPE; + } + + public Map getPipelines() { + return pipelines; + } + + @Override + public MetaData.Custom readFrom(StreamInput in) throws IOException { + int size = in.readVInt(); + Map pipelines = new HashMap<>(size); + for (int i = 0; i < size; i++) { + PipelineConfiguration pipeline = PipelineConfiguration.readPipelineConfiguration(in); + pipelines.put(pipeline.getId(), pipeline); + } + return new IngestMetadata(pipelines); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(pipelines.size()); + for (PipelineConfiguration pipeline : pipelines.values()) { + pipeline.writeTo(out); + } + } + + @Override + public MetaData.Custom fromXContent(XContentParser parser) throws IOException { + XContentParser.Token token; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + switch (token) { + case FIELD_NAME: + currentFieldName = parser.currentName(); + break; + case START_ARRAY: + if ("pipelines".equals(currentFieldName)) { + Map pipelines = new HashMap<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token == XContentParser.Token.START_OBJECT) { + PipelineConfiguration pipeline = new PipelineConfiguration(parser); + pipelines.put(pipeline.getId(), pipeline); + } + } + return new IngestMetadata(pipelines); + } + break; + } + } + return PROTO; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray("pipelines"); + for (PipelineConfiguration pipeline : pipelines.values()) { + pipeline.toXContent(builder, params); + } + builder.endArray(); + return builder; + } + + @Override + public EnumSet context() { + return MetaData.API_AND_GATEWAY; + } + +} diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java b/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java new file mode 100644 index 00000000000..da0ff4c3e2a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java @@ -0,0 +1,112 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +/** + * Encapsulates a pipeline's id and configuration as a blob + */ +public final class PipelineConfiguration implements Writeable, ToXContent { + + private final static PipelineConfiguration PROTOTYPE = new PipelineConfiguration(null, null); + + public static PipelineConfiguration readPipelineConfiguration(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + private final String id; + // Store config as bytes reference, because the config is only used when the pipeline store reads the cluster state + // and the way the map of maps config is read requires a deep copy (it removes instead of gets entries to check for unused options) + // also the get pipeline api just directly returns this to the caller + private final BytesReference config; + + PipelineConfiguration(XContentParser parser) throws IOException { + String id = null; + BytesReference config = null; + + XContentParser.Token token; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + switch (token) { + case FIELD_NAME: + currentFieldName = parser.currentName(); + break; + case VALUE_STRING: + if ("id".equals(currentFieldName)) { + id = parser.text(); + } + break; + case START_OBJECT: + XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent()); + XContentHelper.copyCurrentEvent(builder.generator(), parser); + config = builder.bytes(); + break; + } + } + + this.id = Objects.requireNonNull(id); + this.config = Objects.requireNonNull(config); + } + + public PipelineConfiguration(String id, BytesReference config) { + this.id = id; + this.config = config; + } + + public String getId() { + return id; + } + + public Map getConfigAsMap() { + return XContentHelper.convertToMap(config, true).v2(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("id", id); + builder.field("config", getConfigAsMap()); + builder.endObject(); + return builder; + } + + @Override + public PipelineConfiguration readFrom(StreamInput in) throws IOException { + return new PipelineConfiguration(in.readString(), in.readBytesReference()); + } + + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + out.writeBytesReference(config); + } + +} diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineDefinition.java b/core/src/main/java/org/elasticsearch/ingest/PipelineDefinition.java deleted file mode 100644 index 94c584ad121..00000000000 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineDefinition.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.ingest.core.Pipeline; - -import java.io.IOException; - -public class PipelineDefinition implements Writeable, ToXContent { - - private static final PipelineDefinition PROTOTYPE = new PipelineDefinition((String) null, -1, null); - - public static PipelineDefinition readPipelineDefinitionFrom(StreamInput in) throws IOException { - return PROTOTYPE.readFrom(in); - } - - private final String id; - private final long version; - private final BytesReference source; - - private final Pipeline pipeline; - - PipelineDefinition(Pipeline pipeline, long version, BytesReference source) { - this.id = pipeline.getId(); - this.version = version; - this.source = source; - this.pipeline = pipeline; - } - - PipelineDefinition(String id, long version, BytesReference source) { - this.id = id; - this.version = version; - this.source = source; - this.pipeline = null; - } - - public String getId() { - return id; - } - - public long getVersion() { - return version; - } - - public BytesReference getSource() { - return source; - } - - Pipeline getPipeline() { - return pipeline; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - PipelineDefinition holder = (PipelineDefinition) o; - return source.equals(holder.source); - } - - @Override - public int hashCode() { - return source.hashCode(); - } - - @Override - public PipelineDefinition readFrom(StreamInput in) throws IOException { - String id = in.readString(); - long version = in.readLong(); - BytesReference source = in.readBytesReference(); - return new PipelineDefinition(id, version, source); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(id); - out.writeLong(version); - out.writeBytesReference(source); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(id); - XContentHelper.writeRawField("_source", source, builder, params); - builder.field("_version", version); - builder.endObject(); - return builder; - } -} diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineMissingException.java b/core/src/main/java/org/elasticsearch/ingest/PipelineMissingException.java new file mode 100644 index 00000000000..82637ae2ded --- /dev/null +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineMissingException.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; + +public class PipelineMissingException extends ElasticsearchException { + + public PipelineMissingException(String id) { + super("pipeline [{}] is missing", id); + } + + public PipelineMissingException(StreamInput in) throws IOException { + super(in); + } + + @Override + public RestStatus status() { + return RestStatus.NOT_FOUND; + } +} diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java index 785eb5829bc..ab11f99246e 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -21,43 +21,24 @@ package org.elasticsearch.ingest; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.Client; +import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.cluster.AckedClusterStateUpdateTask; +import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.Booleans; -import org.elasticsearch.common.SearchScrollIterator; -import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.env.Environment; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.action.ingest.ReloadPipelinesAction; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.sort.SortOrder; -import org.elasticsearch.transport.TransportService; import java.io.Closeable; import java.io.IOException; @@ -68,66 +49,22 @@ import java.util.List; import java.util.Map; import java.util.function.BiFunction; -public class PipelineStore extends AbstractComponent implements Closeable { +public class PipelineStore extends AbstractComponent implements Closeable, ClusterStateListener { - public final static String INDEX = ".ingest"; - public final static String TYPE = "pipeline"; - - final static Settings INGEST_INDEX_SETTING = Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put("index.mapper.dynamic", false) - .build(); - - final static String PIPELINE_MAPPING; - - static { - try { - PIPELINE_MAPPING = XContentFactory.jsonBuilder().startObject() - .field("dynamic", "strict") - .startObject("_all") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("processors") - .field("type", "object") - .field("enabled", false) - .field("dynamic", "true") - .endObject() - .startObject("on_failure") - .field("type", "object") - .field("enabled", false) - .field("dynamic", "true") - .endObject() - .startObject("description") - .field("type", "string") - .endObject() - .endObject() - .endObject().string(); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - private Client client; - private final TimeValue scrollTimeout; private final ClusterService clusterService; - private final ReloadPipelinesAction reloadPipelinesAction; private final Pipeline.Factory factory = new Pipeline.Factory(); private Map processorFactoryRegistry; - private volatile boolean started = false; - private volatile Map pipelines = new HashMap<>(); + // Ideally this should be in IngestMetadata class, but we don't have the processor factories around there. + // We know of all the processor factories when a node with all its plugin have been initialized. Also some + // processor factories rely on other node services. Custom metadata is statically registered when classes + // are loaded, so in the cluster state we just save the pipeline config and here we keep the actual pipelines around. + volatile Map pipelines = new HashMap<>(); - public PipelineStore(Settings settings, ClusterService clusterService, TransportService transportService) { + public PipelineStore(Settings settings, ClusterService clusterService) { super(settings); this.clusterService = clusterService; - this.scrollTimeout = settings.getAsTime("ingest.pipeline.store.scroll.timeout", TimeValue.timeValueSeconds(30)); - this.reloadPipelinesAction = new ReloadPipelinesAction(settings, this, clusterService, transportService); - } - - public void setClient(Client client) { - this.client = client; + clusterService.add(this); } public void buildProcessorFactoryRegistry(ProcessorsRegistry processorsRegistry, Environment environment, ScriptService scriptService) { @@ -142,7 +79,6 @@ public class PipelineStore extends AbstractComponent implements Closeable { @Override public void close() throws IOException { - stop("closing"); // TODO: When org.elasticsearch.node.Node can close Closable instances we should try to remove this code, // since any wired closable should be able to close itself List closeables = new ArrayList<>(); @@ -154,18 +90,63 @@ public class PipelineStore extends AbstractComponent implements Closeable { IOUtils.close(closeables); } + @Override + public void clusterChanged(ClusterChangedEvent event) { + innerUpdatePipelines(event.state()); + } + + void innerUpdatePipelines(ClusterState state) { + IngestMetadata ingestMetadata = state.getMetaData().custom(IngestMetadata.TYPE); + if (ingestMetadata == null) { + return; + } + + Map pipelines = new HashMap<>(); + for (PipelineConfiguration pipeline : ingestMetadata.getPipelines().values()) { + try { + pipelines.put(pipeline.getId(), constructPipeline(pipeline.getId(), pipeline.getConfigAsMap())); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + this.pipelines = Collections.unmodifiableMap(pipelines); + } + /** * Deletes the pipeline specified by id in the request. */ - public void delete(DeletePipelineRequest request, ActionListener listener) { - ensureReady(); + public void delete(DeletePipelineRequest request, ActionListener listener) { + clusterService.submitStateUpdateTask("delete-pipeline-" + request.id(), new AckedClusterStateUpdateTask(request, listener) { - DeleteRequest deleteRequest = new DeleteRequest(request); - deleteRequest.index(PipelineStore.INDEX); - deleteRequest.type(PipelineStore.TYPE); - deleteRequest.id(request.id()); - deleteRequest.refresh(true); - client.delete(deleteRequest, handleWriteResponseAndReloadPipelines(listener)); + @Override + protected WritePipelineResponse newResponse(boolean acknowledged) { + return new WritePipelineResponse(acknowledged); + } + + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + return innerDelete(request, currentState); + } + }); + } + + ClusterState innerDelete(DeletePipelineRequest request, ClusterState currentState) { + IngestMetadata currentIngestMetadata = currentState.metaData().custom(IngestMetadata.TYPE); + if (currentIngestMetadata == null) { + return currentState; + } + Map pipelines = currentIngestMetadata.getPipelines(); + if (pipelines.containsKey(request.id()) == false) { + throw new PipelineMissingException(request.id()); + } else { + pipelines = new HashMap<>(pipelines); + pipelines.remove(request.id()); + ClusterState.Builder newState = ClusterState.builder(currentState); + newState.metaData(MetaData.builder(currentState.getMetaData()) + .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines)) + .build()); + return newState.build(); + } } /** @@ -173,280 +154,92 @@ public class PipelineStore extends AbstractComponent implements Closeable { * * @throws IllegalArgumentException If the pipeline holds incorrect configuration */ - public void put(PutPipelineRequest request, ActionListener listener) throws IllegalArgumentException { - ensureReady(); - + public void put(PutPipelineRequest request, ActionListener listener) throws IllegalArgumentException { try { - // validates the pipeline and processor configuration: + // validates the pipeline and processor configuration before submitting a cluster update task: Map pipelineConfig = XContentHelper.convertToMap(request.source(), false).v2(); constructPipeline(request.id(), pipelineConfig); } catch (Exception e) { throw new IllegalArgumentException("Invalid pipeline configuration", e); } + clusterService.submitStateUpdateTask("put-pipeline-" + request.id(), new AckedClusterStateUpdateTask(request, listener) { - ClusterState state = clusterService.state(); - if (isIngestIndexPresent(state)) { - innerPut(request, listener); - } else { - CreateIndexRequest createIndexRequest = new CreateIndexRequest(INDEX); - createIndexRequest.settings(INGEST_INDEX_SETTING); - createIndexRequest.mapping(TYPE, PIPELINE_MAPPING); - client.admin().indices().create(createIndexRequest, new ActionListener() { - @Override - public void onResponse(CreateIndexResponse createIndexResponse) { - innerPut(request, listener); - } + @Override + protected WritePipelineResponse newResponse(boolean acknowledged) { + return new WritePipelineResponse(acknowledged); + } - @Override - public void onFailure(Throwable e) { - listener.onFailure(e); - } - }); - } + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + return innerPut(request, currentState); + } + }); } - private void innerPut(PutPipelineRequest request, ActionListener listener) { - IndexRequest indexRequest = new IndexRequest(request); - indexRequest.index(PipelineStore.INDEX); - indexRequest.type(PipelineStore.TYPE); - indexRequest.id(request.id()); - indexRequest.source(request.source()); - indexRequest.refresh(true); - client.index(indexRequest, handleWriteResponseAndReloadPipelines(listener)); + ClusterState innerPut(PutPipelineRequest request, ClusterState currentState) { + IngestMetadata currentIngestMetadata = currentState.metaData().custom(IngestMetadata.TYPE); + Map pipelines; + if (currentIngestMetadata != null) { + pipelines = new HashMap<>(currentIngestMetadata.getPipelines()); + } else { + pipelines = new HashMap<>(); + } + + pipelines.put(request.id(), new PipelineConfiguration(request.id(), request.source())); + ClusterState.Builder newState = ClusterState.builder(currentState); + newState.metaData(MetaData.builder(currentState.getMetaData()) + .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines)) + .build()); + return newState.build(); } /** * Returns the pipeline by the specified id */ public Pipeline get(String id) { - ensureReady(); - - PipelineDefinition ref = pipelines.get(id); - if (ref != null) { - return ref.getPipeline(); - } else { - return null; - } + return pipelines.get(id); } public Map getProcessorFactoryRegistry() { return processorFactoryRegistry; } - public List getReference(String... ids) { - ensureReady(); + /** + * @return pipeline configuration specified by id. If multiple ids or wildcards are specified multiple pipelines + * may be returned + */ + // Returning PipelineConfiguration instead of Pipeline, because Pipeline and Processor interface don't + // know how to serialize themselves. + public List getPipelines(String... ids) { + IngestMetadata ingestMetadata = clusterService.state().getMetaData().custom(IngestMetadata.TYPE); + return innerGetPipelines(ingestMetadata, ids); + } - List result = new ArrayList<>(ids.length); + List innerGetPipelines(IngestMetadata ingestMetadata, String... ids) { + if (ingestMetadata == null) { + return Collections.emptyList(); + } + + List result = new ArrayList<>(ids.length); for (String id : ids) { if (Regex.isSimpleMatchPattern(id)) { - for (Map.Entry entry : pipelines.entrySet()) { + for (Map.Entry entry : ingestMetadata.getPipelines().entrySet()) { if (Regex.simpleMatch(id, entry.getKey())) { result.add(entry.getValue()); } } } else { - PipelineDefinition reference = pipelines.get(id); - if (reference != null) { - result.add(reference); + PipelineConfiguration pipeline = ingestMetadata.getPipelines().get(id); + if (pipeline != null) { + result.add(pipeline); } } } return result; } - public synchronized void updatePipelines() throws Exception { - // note: this process isn't fast or smart, but the idea is that there will not be many pipelines, - // so for that reason the goal is to keep the update logic simple. - - int changed = 0; - Map newPipelines = new HashMap<>(pipelines); - for (SearchHit hit : readAllPipelines()) { - String pipelineId = hit.getId(); - BytesReference pipelineSource = hit.getSourceRef(); - PipelineDefinition current = newPipelines.get(pipelineId); - if (current != null) { - // If we first read from a primary shard copy and then from a replica copy, - // and a write did not yet make it into the replica shard - // then the source is not equal but we don't update because the current pipeline is the latest: - if (current.getVersion() > hit.getVersion()) { - continue; - } - if (current.getSource().equals(pipelineSource)) { - continue; - } - } - - changed++; - Pipeline pipeline = constructPipeline(hit.getId(), hit.sourceAsMap()); - newPipelines.put(pipelineId, new PipelineDefinition(pipeline, hit.getVersion(), pipelineSource)); - } - - int removed = 0; - for (String existingPipelineId : pipelines.keySet()) { - if (pipelineExists(existingPipelineId) == false) { - newPipelines.remove(existingPipelineId); - removed++; - } - } - - if (changed != 0 || removed != 0) { - logger.debug("adding or updating [{}] pipelines and [{}] pipelines removed", changed, removed); - pipelines = newPipelines; - } else { - logger.debug("no pipelines changes detected"); - } - } - private Pipeline constructPipeline(String id, Map config) throws Exception { return factory.create(id, config, processorFactoryRegistry); } - boolean pipelineExists(String pipelineId) { - GetRequest request = new GetRequest(PipelineStore.INDEX, PipelineStore.TYPE, pipelineId); - try { - GetResponse response = client.get(request).actionGet(); - return response.isExists(); - } catch (IndexNotFoundException e) { - // the ingest index doesn't exist, so the pipeline doesn't either: - return false; - } - } - - /** - * @param clusterState The cluster just to check whether the ingest index exists and the state of the ingest index - * @throws IllegalStateException If the ingest template exists, but is in an invalid state - * @return true when the ingest index exists and has the expected settings and mappings or returns - * false when the ingest index doesn't exists and needs to be created. - */ - boolean isIngestIndexPresent(ClusterState clusterState) throws IllegalStateException { - if (clusterState.getMetaData().hasIndex(INDEX)) { - IndexMetaData indexMetaData = clusterState.getMetaData().index(INDEX); - Settings indexSettings = indexMetaData.getSettings(); - int numberOfShards = indexSettings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, -1); - if (numberOfShards != 1) { - throw new IllegalStateException("illegal ingest index setting, [" + IndexMetaData.SETTING_NUMBER_OF_SHARDS + "] setting is [" + numberOfShards + "] while [1] is expected"); - } - int numberOfReplicas = indexSettings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, -1); - if (numberOfReplicas != 1) { - throw new IllegalStateException("illegal ingest index setting, [" + IndexMetaData.SETTING_NUMBER_OF_REPLICAS + "] setting is [" + numberOfReplicas + "] while [1] is expected"); - } - boolean dynamicMappings = indexSettings.getAsBoolean("index.mapper.dynamic", true); - if (dynamicMappings != false) { - throw new IllegalStateException("illegal ingest index setting, [index.mapper.dynamic] setting is [" + dynamicMappings + "] while [false] is expected"); - } - - if (indexMetaData.getMappings().size() != 1 && indexMetaData.getMappings().containsKey(TYPE) == false) { - throw new IllegalStateException("illegal ingest mappings, only [" + TYPE + "] mapping is allowed to exist in the " + INDEX +" index"); - } - - try { - Map pipelineMapping = indexMetaData.getMappings().get(TYPE).getSourceAsMap(); - String dynamicMapping = (String) XContentMapValues.extractValue("dynamic", pipelineMapping); - if ("strict".equals(dynamicMapping) == false) { - throw new IllegalStateException("illegal ingest mapping, pipeline mapping must be strict"); - } - Boolean allEnabled = (Boolean) XContentMapValues.extractValue("_all.enabled", pipelineMapping); - if (Boolean.FALSE.equals(allEnabled) == false) { - throw new IllegalStateException("illegal ingest mapping, _all field is enabled"); - } - - String processorsType = (String) XContentMapValues.extractValue("properties.processors.type", pipelineMapping); - if ("object".equals(processorsType) == false) { - throw new IllegalStateException("illegal ingest mapping, processors field's type is [" + processorsType + "] while [object] is expected"); - } - - Boolean processorsEnabled = (Boolean) XContentMapValues.extractValue("properties.processors.enabled", pipelineMapping); - if (Boolean.FALSE.equals(processorsEnabled) == false) { - throw new IllegalStateException("illegal ingest mapping, processors field enabled option is [true] while [false] is expected"); - } - - String processorsDynamic = (String) XContentMapValues.extractValue("properties.processors.dynamic", pipelineMapping); - if ("true".equals(processorsDynamic) == false) { - throw new IllegalStateException("illegal ingest mapping, processors field dynamic option is [false] while [true] is expected"); - } - - String onFailureType = (String) XContentMapValues.extractValue("properties.on_failure.type", pipelineMapping); - if ("object".equals(onFailureType) == false) { - throw new IllegalStateException("illegal ingest mapping, on_failure field type option is [" + onFailureType + "] while [object] is expected"); - } - - Boolean onFailureEnabled = (Boolean) XContentMapValues.extractValue("properties.on_failure.enabled", pipelineMapping); - if (Boolean.FALSE.equals(onFailureEnabled) == false) { - throw new IllegalStateException("illegal ingest mapping, on_failure field enabled option is [true] while [false] is expected"); - } - - String onFailureDynamic = (String) XContentMapValues.extractValue("properties.on_failure.dynamic", pipelineMapping); - if ("true".equals(onFailureDynamic) == false) { - throw new IllegalStateException("illegal ingest mapping, on_failure field dynamic option is [false] while [true] is expected"); - } - } catch (IOException e) { - throw new RuntimeException(e); - } - return true; - } else { - return false; - } - } - - - synchronized void start() throws Exception { - if (started) { - logger.debug("Pipeline already started"); - } else { - updatePipelines(); - started = true; - logger.debug("Pipeline store started with [{}] pipelines", pipelines.size()); - } - } - - synchronized void stop(String reason) { - if (started) { - started = false; - pipelines = new HashMap<>(); - logger.debug("Pipeline store stopped, reason [{}]", reason); - } else { - logger.debug("Pipeline alreadt stopped"); - } - } - - public boolean isStarted() { - return started; - } - - private Iterable readAllPipelines() { - // TODO: the search should be replaced with an ingest API when it is available - SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); - sourceBuilder.version(true); - sourceBuilder.sort("_doc", SortOrder.ASC); - SearchRequest searchRequest = new SearchRequest(PipelineStore.INDEX); - searchRequest.source(sourceBuilder); - searchRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); - return SearchScrollIterator.createIterator(client, scrollTimeout, searchRequest); - } - - private void ensureReady() { - if (started == false) { - throw new IllegalStateException("pipeline store isn't ready yet"); - } - } - - @SuppressWarnings("unchecked") - private ActionListener handleWriteResponseAndReloadPipelines(ActionListener listener) { - return new ActionListener() { - @Override - public void onResponse(T result) { - try { - reloadPipelinesAction.reloadPipelinesOnAllNodes(reloadResult -> listener.onResponse(result)); - } catch (Throwable e) { - listener.onFailure(e); - } - } - - @Override - public void onFailure(Throwable e) { - listener.onFailure(e); - } - }; - } - } diff --git a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java index 96e91737af7..84ddb6aa9e7 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java @@ -17,7 +17,6 @@ * under the License. */ - package org.elasticsearch.ingest.core; import java.util.ArrayList; @@ -78,20 +77,16 @@ public final class Pipeline { } public final static class Factory { - private Processor readProcessor(Map processorRegistry, String type, Map config) throws Exception { - Processor.Factory factory = processorRegistry.get(type); - if (factory != null) { - List onFailureProcessors = readProcessors("on_failure", processorRegistry, config); - Processor processor = factory.create(config); - if (config.isEmpty() == false) { - throw new IllegalArgumentException("processor [" + type + "] doesn't support one or more provided configuration parameters " + Arrays.toString(config.keySet().toArray())); - } - if (onFailureProcessors.isEmpty()) { - return processor; - } - return new CompoundProcessor(Collections.singletonList(processor), onFailureProcessors); + + public Pipeline create(String id, Map config, Map processorRegistry) throws Exception { + String description = ConfigurationUtils.readOptionalStringProperty(config, "description"); // TODO(simonw): can we make these strings constants? + List processors = readProcessors("processors", processorRegistry, config); + List onFailureProcessors = readProcessors("on_failure", processorRegistry, config); + if (config.isEmpty() == false) { + throw new IllegalArgumentException("pipeline [" + id + "] doesn't support one or more provided configuration parameters " + Arrays.toString(config.keySet().toArray())); } - throw new IllegalArgumentException("No processor type exists with name [" + type + "]"); + CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.unmodifiableList(processors), Collections.unmodifiableList(onFailureProcessors)); + return new Pipeline(id, description, compoundProcessor); } private List readProcessors(String fieldName, Map processorRegistry, Map config) throws Exception { @@ -108,12 +103,22 @@ public final class Pipeline { return onFailureProcessors; } - public Pipeline create(String id, Map config, Map processorRegistry) throws Exception { - String description = ConfigurationUtils.readOptionalStringProperty(config, "description"); // TODO(simonw): can we make these strings constants? - List processors = readProcessors("processors", processorRegistry, config); - List onFailureProcessors = readProcessors("on_failure", processorRegistry, config); - CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.unmodifiableList(processors), Collections.unmodifiableList(onFailureProcessors)); - return new Pipeline(id, description, compoundProcessor); + private Processor readProcessor(Map processorRegistry, String type, Map config) throws Exception { + Processor.Factory factory = processorRegistry.get(type); + if (factory != null) { + List onFailureProcessors = readProcessors("on_failure", processorRegistry, config); + Processor processor = factory.create(config); + if (config.isEmpty() == false) { + throw new IllegalArgumentException("processor [" + type + "] doesn't support one or more provided configuration parameters " + Arrays.toString(config.keySet().toArray())); + } + if (onFailureProcessors.isEmpty()) { + return processor; + } + return new CompoundProcessor(Collections.singletonList(processor), onFailureProcessors); + } + throw new IllegalArgumentException("No processor type exists with name [" + type + "]"); } + + } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java index cb70b5a79c8..723e3eb6840 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java @@ -27,7 +27,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.support.RestStatusToXContentListener; +import org.elasticsearch.rest.action.support.AcknowledgedRestListener; public class RestDeletePipelineAction extends BaseRestHandler { @@ -41,6 +41,8 @@ public class RestDeletePipelineAction extends BaseRestHandler { protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { DeletePipelineRequest request = new DeletePipelineRequest(); request.id(restRequest.param("id")); - client.deletePipeline(request, new RestStatusToXContentListener<>(channel)); + request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); + request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + client.deletePipeline(request, new AcknowledgedRestListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java index 7fae61eaed5..4860f5e7931 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java @@ -42,6 +42,7 @@ public class RestGetPipelineAction extends BaseRestHandler { protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { GetPipelineRequest request = new GetPipelineRequest(); request.ids(Strings.splitStringByCommaToArray(restRequest.param("id"))); + request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); client.getPipeline(request, new RestStatusToXContentListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java index 98ec67782d5..7c2d9a717dc 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java @@ -27,7 +27,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.support.RestStatusToXContentListener; +import org.elasticsearch.rest.action.support.AcknowledgedRestListener; public class RestPutPipelineAction extends BaseRestHandler { @@ -44,6 +44,8 @@ public class RestPutPipelineAction extends BaseRestHandler { if (restRequest.hasContent()) { request.source(restRequest.content()); } - client.putPipeline(request, new RestStatusToXContentListener<>(channel)); + request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); + request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + client.putPipeline(request, new AcknowledgedRestListener<>(channel)); } } diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index db7b5df6662..be229310043 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -776,6 +776,7 @@ public class ExceptionSerializationTests extends ESTestCase { ids.put(139, null); ids.put(140, org.elasticsearch.discovery.Discovery.FailedToCommitClusterStateException.class); ids.put(141, org.elasticsearch.index.query.QueryShardException.class); + ids.put(142, org.elasticsearch.ingest.PipelineMissingException.class); Map, Integer> reverse = new HashMap<>(); for (Map.Entry> entry : ids.entrySet()) { diff --git a/core/src/test/java/org/elasticsearch/action/ingest/ReloadPipelinesActionTests.java b/core/src/test/java/org/elasticsearch/action/ingest/ReloadPipelinesActionTests.java deleted file mode 100644 index 8a0284d80cf..00000000000 --- a/core/src/test/java/org/elasticsearch/action/ingest/ReloadPipelinesActionTests.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.ingest; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.LocalTransportAddress; -import org.elasticsearch.ingest.PipelineStore; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.transport.TransportException; -import org.elasticsearch.transport.TransportResponseHandler; -import org.elasticsearch.transport.TransportService; -import org.junit.Before; -import org.mockito.Matchers; - -import java.util.Collections; - -import static org.hamcrest.CoreMatchers.is; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class ReloadPipelinesActionTests extends ESTestCase { - - private ClusterService clusterService; - private TransportService transportService; - private ReloadPipelinesAction reloadPipelinesAction; - - @Before - public void init() { - Settings settings = Settings.EMPTY; - PipelineStore pipelineStore = mock(PipelineStore.class); - clusterService = mock(ClusterService.class); - transportService = mock(TransportService.class); - reloadPipelinesAction = new ReloadPipelinesAction(settings, pipelineStore, clusterService, transportService); - } - - public void testSuccess() { - int numNodes = randomIntBetween(1, 10); - ClusterState state = ClusterState.builder(new ClusterName("_name")).nodes(generateDiscoNodes(numNodes)).build(); - when(clusterService.state()).thenReturn(state); - - doAnswer(mock -> { - TransportResponseHandler handler = (TransportResponseHandler) mock.getArguments()[3]; - for (int i = 0; i < numNodes; i++) { - handler.handleResponse(new ReloadPipelinesAction.ReloadPipelinesResponse()); - } - return mock; - }).when(transportService).sendRequest(Matchers.any(), Matchers.eq(ReloadPipelinesAction.ACTION_NAME), Matchers.any(), Matchers.any()); - reloadPipelinesAction.reloadPipelinesOnAllNodes(result -> assertThat(result, is(true))); - } - - public void testWithAtLeastOneFailure() { - int numNodes = randomIntBetween(1, 10); - - ClusterState state = ClusterState.builder(new ClusterName("_name")).nodes(generateDiscoNodes(numNodes)).build(); - when(clusterService.state()).thenReturn(state); - - doAnswer(mock -> { - TransportResponseHandler handler = (TransportResponseHandler) mock.getArguments()[3]; - handler.handleException(new TransportException("test failure")); - for (int i = 1; i < numNodes; i++) { - if (randomBoolean()) { - handler.handleResponse(new ReloadPipelinesAction.ReloadPipelinesResponse()); - } else { - handler.handleException(new TransportException("test failure")); - } - } - return mock; - }).when(transportService).sendRequest(Matchers.any(), Matchers.eq(ReloadPipelinesAction.ACTION_NAME), Matchers.any(), Matchers.any()); - reloadPipelinesAction.reloadPipelinesOnAllNodes(result -> assertThat(result, is(false))); - } - - private static DiscoveryNodes.Builder generateDiscoNodes(int numNodes) { - DiscoveryNodes.Builder discoNodes = DiscoveryNodes.builder(); - for (int i = 0; i < numNodes; i++) { - String id = Integer.toString(i); - DiscoveryNode discoNode = new DiscoveryNode(id, id, new LocalTransportAddress(id), Collections.emptyMap(), Version.CURRENT); - discoNodes.put(discoNode); - } - return discoNodes; - } -} diff --git a/core/src/test/java/org/elasticsearch/common/SearchScrollIteratorTests.java b/core/src/test/java/org/elasticsearch/common/SearchScrollIteratorTests.java deleted file mode 100644 index 886d9b94e84..00000000000 --- a/core/src/test/java/org/elasticsearch/common/SearchScrollIteratorTests.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common; - -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.test.ESSingleNodeTestCase; - -import static org.hamcrest.Matchers.equalTo; - -// Not a real unit tests with mocks, but with a single node, because we mock the scroll -// search behaviour and it changes then this test will not catch this. -public class SearchScrollIteratorTests extends ESSingleNodeTestCase { - - public void testSearchScrollIterator() { - createIndex("index"); - int numDocs = scaledRandomIntBetween(0, 128); - for (int i = 0; i < numDocs; i++) { - client().prepareIndex("index", "type", Integer.toString(i)) - .setSource("field", "value" + i) - .get(); - } - client().admin().indices().prepareRefresh().get(); - - int i = 0; - SearchRequest searchRequest = new SearchRequest("index"); - SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); - // randomize size, because that also controls how many actual searches will happen: - sourceBuilder.size(scaledRandomIntBetween(1, 10)); - searchRequest.source(sourceBuilder); - Iterable hits = SearchScrollIterator.createIterator(client(), TimeValue.timeValueSeconds(10), searchRequest); - for (SearchHit hit : hits) { - assertThat(hit.getId(), equalTo(Integer.toString(i))); - assertThat(hit.getSource().get("field"), equalTo("value" + i)); - i++; - } - assertThat(i, equalTo(numDocs)); - } - -} diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestBootstrapperTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestBootstrapperTests.java deleted file mode 100644 index a352c3af723..00000000000 --- a/core/src/test/java/org/elasticsearch/ingest/IngestBootstrapperTests.java +++ /dev/null @@ -1,276 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest; - -import org.elasticsearch.Version; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlocks; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.routing.IndexRoutingTable; -import org.elasticsearch.cluster.routing.IndexShardRoutingTable; -import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.cluster.routing.TestShardRouting; -import org.elasticsearch.cluster.routing.UnassignedInfo; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.Text; -import org.elasticsearch.discovery.DiscoverySettings; -import org.elasticsearch.gateway.GatewayService; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.internal.InternalSearchHit; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.junit.Before; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.core.Is.is; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class IngestBootstrapperTests extends ESTestCase { - - private PipelineStore store; - private IngestBootstrapper bootstrapper; - - @Before - public void init() { - ThreadPool threadPool = mock(ThreadPool.class); - when(threadPool.executor(any())).thenReturn(Runnable::run); - ClusterService clusterService = mock(ClusterService.class); - store = mock(PipelineStore.class); - when(store.isStarted()).thenReturn(false); - PipelineExecutionService pipelineExecutionService = mock(PipelineExecutionService.class); - bootstrapper = new IngestBootstrapper(Settings.EMPTY, threadPool, clusterService, store, pipelineExecutionService); - } - - public void testStartAndStopInBackground() throws Exception { - ThreadPool threadPool = new ThreadPool("test"); - Client client = mock(Client.class); - TransportService transportService = mock(TransportService.class); - - ClusterService clusterService = mock(ClusterService.class); - when(client.search(any())).thenReturn(PipelineStoreTests.expectedSearchReponse(Collections.emptyList())); - when(client.searchScroll(any())).thenReturn(PipelineStoreTests.expectedSearchReponse(Collections.emptyList())); - Settings settings = Settings.EMPTY; - PipelineStore store = new PipelineStore(settings, clusterService, transportService); - IngestBootstrapper bootstrapper = new IngestBootstrapper( - settings, threadPool, clusterService, store, null - ); - bootstrapper.setClient(client); - - List hits = new ArrayList<>(); - hits.add(new InternalSearchHit(0, "1", new Text("type"), Collections.emptyMap()) - .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) - ); - when(client.search(any())).thenReturn(PipelineStoreTests.expectedSearchReponse(hits)); - when(client.get(any())).thenReturn(PipelineStoreTests.expectedGetResponse(true)); - - try { - store.get("1"); - fail("IllegalStateException expected"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), equalTo("pipeline store isn't ready yet")); - } - - MetaData metadata = MetaData.builder() - .put(IndexTemplateMetaData.builder(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME)) - .build(); - bootstrapper.startPipelineStore(metadata); - assertBusy(() -> { - assertThat(store.isStarted(), is(true)); - assertThat(store.get("1"), notNullValue()); - assertThat(store.get("1").getId(), equalTo("1")); - assertThat(store.get("1").getDescription(), equalTo("_description1")); - }); - - bootstrapper.stopPipelineStore("testing stop"); - assertBusy(() -> assertThat(store.isStarted(), is(false))); - - // the map internal search hit holds gets emptied after use, which is ok, but in this test we need to reset the source: - hits.get(0).sourceRef(new BytesArray("{\"description\": \"_description1\"}")); - hits.add(new InternalSearchHit(0, "2", new Text("type"), Collections.emptyMap()) - .sourceRef(new BytesArray("{\"description\": \"_description2\"}")) - ); - bootstrapper.startPipelineStore(metadata); - assertBusy(() -> { - assertThat(store.isStarted(), is(true)); - assertThat(store.get("1"), notNullValue()); - assertThat(store.get("1").getId(), equalTo("1")); - assertThat(store.get("1").getDescription(), equalTo("_description1")); - assertThat(store.get("2"), notNullValue()); - assertThat(store.get("2").getId(), equalTo("2")); - assertThat(store.get("2").getDescription(), equalTo("_description2")); - }); - threadPool.shutdown(); - } - - public void testPipelineStoreBootstrappingGlobalStateNotRecoveredBlock() throws Exception { - ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); - csBuilder.blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)); - ClusterState cs = csBuilder.metaData(MetaData.builder()).build(); - bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); - verify(store, never()).start(); - verify(store, never()).stop(anyString()); - } - - public void testPipelineStoreBootstrappingGlobalStateNoMasterBlock() throws Exception { - ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); - csBuilder.blocks(ClusterBlocks.builder() - .addGlobalBlock(randomBoolean() ? DiscoverySettings.NO_MASTER_BLOCK_WRITES : DiscoverySettings.NO_MASTER_BLOCK_ALL)); - ClusterState cs = csBuilder.metaData( - MetaData.builder() - .put(IndexTemplateMetaData.builder(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME)) - ).build(); - - // We're not started and there is a no master block, doing nothing: - bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); - verify(store, never()).start(); - verify(store, never()).stop(anyString()); - - // We're started and there is a no master block, so we stop the store: - when(store.isStarted()).thenReturn(true); - bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); - verify(store, never()).start(); - verify(store, times(1)).stop(anyString()); - } - - public void testPipelineStoreBootstrappingNoIngestIndex() throws Exception { - ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); - ClusterState cs = csBuilder.metaData(MetaData.builder() - .put(IndexTemplateMetaData.builder(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME))) - .build(); - bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); - verify(store, times(1)).start(); - } - - public void testPipelineStoreBootstrappingIngestIndexShardsNotStarted() throws Exception { - // .ingest index, but not all primary shards started: - ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); - MetaData.Builder metaDateBuilder = MetaData.builder(); - metaDateBuilder.put(IndexTemplateMetaData.builder(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME)); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); - Settings settings = settings(Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .build(); - metaDateBuilder.put(IndexMetaData.builder(PipelineStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1)); - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(PipelineStore.INDEX); - indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(PipelineStore.INDEX, 0)) - .addShard(TestShardRouting.newShardRouting(PipelineStore.INDEX, 0, "_node_id", null, null, true, ShardRoutingState.INITIALIZING, 1, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""))) - .build()); - indexRoutingTableBuilder.addReplica(); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - csBuilder.metaData(metaDateBuilder); - csBuilder.routingTable(routingTableBuilder.build()); - ClusterState cs = csBuilder.build(); - - // We're not running and the cluster state isn't ready, so we don't start. - bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); - verify(store, never()).start(); - verify(store, never()).stop(anyString()); - - // We're running and the cluster state indicates that all our shards are unassigned, so we stop. - when(store.isStarted()).thenReturn(true); - bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); - verify(store, never()).start(); - verify(store, times(1)).stop(anyString()); - } - - public void testPipelineStoreBootstrappingIngestIndexShardsStarted() throws Exception { - // .ingest index, but not all primary shards started: - ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); - MetaData.Builder metaDateBuilder = MetaData.builder(); - metaDateBuilder.put(IndexTemplateMetaData.builder(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME)); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); - Settings settings = settings(Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .build(); - metaDateBuilder.put(IndexMetaData.builder(PipelineStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1)); - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(PipelineStore.INDEX); - indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(PipelineStore.INDEX, 0)) - .addShard(TestShardRouting.newShardRouting(PipelineStore.INDEX, 0, "_node_id", null, null, true, ShardRoutingState.STARTED, 1, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""))) - .build()); - indexRoutingTableBuilder.addReplica(); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - csBuilder.metaData(metaDateBuilder); - csBuilder.routingTable(routingTableBuilder.build()); - ClusterState cs = csBuilder.build(); - - // We're not running and the cluster state is ready, so we start. - bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); - verify(store, times(1)).start(); - verify(store, never()).stop(anyString()); - - // We're running and the cluster state is good, so we do nothing. - when(store.isStarted()).thenReturn(true); - bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); - verify(store, times(1)).start(); - verify(store, never()).stop(anyString()); - } - - public void testPipelineStoreBootstrappingFailure() throws Exception { - // .ingest index, but not all primary shards started: - ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); - MetaData.Builder metaDateBuilder = MetaData.builder(); - metaDateBuilder.put(IndexTemplateMetaData.builder(IngestBootstrapper.INGEST_INDEX_TEMPLATE_NAME)); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); - Settings settings = settings(Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .build(); - metaDateBuilder.put(IndexMetaData.builder(PipelineStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1)); - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(PipelineStore.INDEX); - indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(PipelineStore.INDEX, 0)) - .addShard(TestShardRouting.newShardRouting(PipelineStore.INDEX, 0, "_node_id", null, null, true, ShardRoutingState.STARTED, 1, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""))) - .build()); - indexRoutingTableBuilder.addReplica(); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - csBuilder.metaData(metaDateBuilder); - csBuilder.routingTable(routingTableBuilder.build()); - ClusterState cs = csBuilder.build(); - - // fail the first call with an runtime exception and subsequent calls just return: - doThrow(new RuntimeException()).doNothing().when(store).start(); - bootstrapper.clusterChanged(new ClusterChangedEvent("test", cs, cs)); - verify(store, times(2)).start(); - verify(store, never()).stop(anyString()); - } - -} diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 1c50bc38b6a..9742dd1b978 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -22,7 +22,6 @@ package org.elasticsearch.ingest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.ingest.DeletePipelineRequest; @@ -32,6 +31,7 @@ import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.ingest.SimulateDocumentSimpleResult; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineResponse; +import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.core.IngestDocument; @@ -164,7 +164,6 @@ public class IngestClientIT extends ESIntegTestCase { } public void test() throws Exception { - PutPipelineRequest putPipelineRequest = new PutPipelineRequest(); putPipelineRequest.id("_id"); putPipelineRequest.source(jsonBuilder().startObject() @@ -200,9 +199,8 @@ public class IngestClientIT extends ESIntegTestCase { DeletePipelineRequest deletePipelineRequest = new DeletePipelineRequest(); deletePipelineRequest.id("_id"); - DeleteResponse response = client().deletePipeline(deletePipelineRequest).get(); - assertThat(response.isFound(), is(true)); - assertThat(response.getId(), equalTo("_id")); + WritePipelineResponse response = client().deletePipeline(deletePipelineRequest).get(); + assertThat(response.isAcknowledged(), is(true)); getResponse = client().prepareGetPipeline().setIds("_id").get(); assertThat(getResponse.isFound(), is(false)); diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index 57086cc02a9..19d56858c8f 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -19,444 +19,175 @@ package org.elasticsearch.ingest; -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionFuture; -import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.client.Client; +import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.get.GetResult; -import org.elasticsearch.search.internal.InternalSearchHit; -import org.elasticsearch.search.internal.InternalSearchHits; -import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.ingest.core.Pipeline; +import org.elasticsearch.ingest.processor.SetProcessor; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.transport.TransportService; import org.junit.Before; -import org.mockito.ArgumentMatcher; -import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; -import java.util.Objects; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Matchers.any; -import static org.mockito.Matchers.argThat; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class PipelineStoreTests extends ESTestCase { private PipelineStore store; - private Client client; @Before public void init() throws Exception { - Settings settings = Settings.EMPTY; ClusterService clusterService = mock(ClusterService.class); - TransportService transportService = mock(TransportService.class); - - client = mock(Client.class); - when(client.search(any())).thenReturn(expectedSearchReponse(Collections.emptyList())); - when(client.searchScroll(any())).thenReturn(expectedSearchReponse(Collections.emptyList())); - store = new PipelineStore(settings, clusterService, transportService); - store.setClient(client); - store.start(); + store = new PipelineStore(Settings.EMPTY, clusterService); + ProcessorsRegistry registry = new ProcessorsRegistry(); + registry.registerProcessor("set", (environment, templateService) -> new SetProcessor.Factory(TestTemplateService.instance())); + store.buildProcessorFactoryRegistry(registry, null, null); } - public void testUpdatePipeline() throws Exception { - List hits = new ArrayList<>(); - hits.add(new InternalSearchHit(0, "1", new Text("type"), Collections.emptyMap()) - .sourceRef(new BytesArray("{\"description\": \"_description1\"}")) + public void testUpdatePipelines() { + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); + store.innerUpdatePipelines(clusterState); + assertThat(store.pipelines.size(), is(0)); + + PipelineConfiguration pipeline = new PipelineConfiguration( + "_id",new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}") ); + IngestMetadata ingestMetadata = new IngestMetadata(Collections.singletonMap("_id", pipeline)); + clusterState = ClusterState.builder(clusterState) + .metaData(MetaData.builder().putCustom(IngestMetadata.TYPE, ingestMetadata)) + .build(); + store.innerUpdatePipelines(clusterState); + assertThat(store.pipelines.size(), is(1)); + assertThat(store.pipelines.get("_id").getId(), equalTo("_id")); + assertThat(store.pipelines.get("_id").getDescription(), nullValue()); + assertThat(store.pipelines.get("_id").getProcessors().size(), equalTo(1)); + assertThat(store.pipelines.get("_id").getProcessors().get(0).getType(), equalTo("set")); + } - when(client.search(any())).thenReturn(expectedSearchReponse(hits)); - when(client.get(any())).thenReturn(expectedGetResponse(true)); - assertThat(store.get("1"), nullValue()); + public void testPut() { + String id = "_id"; + Pipeline pipeline = store.get(id); + assertThat(pipeline, nullValue()); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); - store.updatePipelines(); - assertThat(store.get("1").getId(), equalTo("1")); - assertThat(store.get("1").getDescription(), equalTo("_description1")); + // add a new pipeline: + PutPipelineRequest putRequest = new PutPipelineRequest(); + putRequest.id(id); + putRequest.source(new BytesArray("{\"processors\": []}")); + clusterState = store.innerPut(putRequest, clusterState); + store.innerUpdatePipelines(clusterState); + pipeline = store.get(id); + assertThat(pipeline, notNullValue()); + assertThat(pipeline.getId(), equalTo(id)); + assertThat(pipeline.getDescription(), nullValue()); + assertThat(pipeline.getProcessors().size(), equalTo(0)); - when(client.get(any())).thenReturn(expectedGetResponse(true)); - hits.add(new InternalSearchHit(0, "2", new Text("type"), Collections.emptyMap()) - .sourceRef(new BytesArray("{\"description\": \"_description2\"}")) + // overwrite existing pipeline: + putRequest = new PutPipelineRequest(); + putRequest.id(id); + putRequest.source(new BytesArray("{\"processors\": [], \"description\": \"_description\"}")); + clusterState = store.innerPut(putRequest, clusterState); + store.innerUpdatePipelines(clusterState); + pipeline = store.get(id); + assertThat(pipeline, notNullValue()); + assertThat(pipeline.getId(), equalTo(id)); + assertThat(pipeline.getDescription(), equalTo("_description")); + assertThat(pipeline.getProcessors().size(), equalTo(0)); + } + + public void testDelete() { + PipelineConfiguration config = new PipelineConfiguration( + "_id",new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}") ); - store.updatePipelines(); - assertThat(store.get("1").getId(), equalTo("1")); - assertThat(store.get("1").getDescription(), equalTo("_description1")); - assertThat(store.get("2").getId(), equalTo("2")); - assertThat(store.get("2").getDescription(), equalTo("_description2")); - - hits.remove(1); - when(client.get(eqGetRequest(PipelineStore.INDEX, PipelineStore.TYPE, "2"))).thenReturn(expectedGetResponse(false)); - store.updatePipelines(); - assertThat(store.get("1").getId(), equalTo("1")); - assertThat(store.get("1").getDescription(), equalTo("_description1")); - assertThat(store.get("2"), nullValue()); - } - - public void testGetReference() throws Exception { - // fill the store up for the test: - List hits = new ArrayList<>(); - hits.add(new InternalSearchHit(0, "foo", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); - hits.add(new InternalSearchHit(0, "bar", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); - hits.add(new InternalSearchHit(0, "foobar", new Text("type"), Collections.emptyMap()).sourceRef(new BytesArray("{\"description\": \"_description\"}"))); - when(client.search(any())).thenReturn(expectedSearchReponse(hits)); - store.updatePipelines(); - - List result = store.getReference("foo"); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0).getPipeline().getId(), equalTo("foo")); - - result = store.getReference("foo*"); - // to make sure the order is consistent in the test: - result.sort((first, second) -> { - return first.getPipeline().getId().compareTo(second.getPipeline().getId()); - }); - assertThat(result.size(), equalTo(2)); - assertThat(result.get(0).getPipeline().getId(), equalTo("foo")); - assertThat(result.get(1).getPipeline().getId(), equalTo("foobar")); - - result = store.getReference("bar*"); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0).getPipeline().getId(), equalTo("bar")); - - result = store.getReference("*"); - // to make sure the order is consistent in the test: - result.sort((first, second) -> { - return first.getPipeline().getId().compareTo(second.getPipeline().getId()); - }); - assertThat(result.size(), equalTo(3)); - assertThat(result.get(0).getPipeline().getId(), equalTo("bar")); - assertThat(result.get(1).getPipeline().getId(), equalTo("foo")); - assertThat(result.get(2).getPipeline().getId(), equalTo("foobar")); - - result = store.getReference("foo", "bar"); - assertThat(result.size(), equalTo(2)); - assertThat(result.get(0).getPipeline().getId(), equalTo("foo")); - assertThat(result.get(1).getPipeline().getId(), equalTo("bar")); - } - - public void testValidateIngestIndex() throws Exception { - // ingest index doesn't exist: - ClusterState state = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder()) + IngestMetadata ingestMetadata = new IngestMetadata(Collections.singletonMap("_id", config)); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder().putCustom(IngestMetadata.TYPE, ingestMetadata)) .build(); - assertThat(store.isIngestIndexPresent(state), equalTo(false)); + store.innerUpdatePipelines(clusterState); + assertThat(store.get("_id"), notNullValue()); - // ingest index does exist and is valid: - IndexMetaData.Builder indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) - .settings(Settings.builder() - .put(PipelineStore.INGEST_INDEX_SETTING) - .put("index.version.created", Version.CURRENT) - ) - .putMapping(PipelineStore.TYPE, PipelineStore.PIPELINE_MAPPING); - state = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().put(indexMetaData)) - .build(); - assertThat(store.isIngestIndexPresent(state), equalTo(true)); + // Delete pipeline: + DeletePipelineRequest deleteRequest = new DeletePipelineRequest(); + deleteRequest.id("_id"); + clusterState = store.innerDelete(deleteRequest, clusterState); + store.innerUpdatePipelines(clusterState); + assertThat(store.get("_id"), nullValue()); - // fails, has dynamic mapping - indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) - .settings(Settings.builder() - .put(PipelineStore.INGEST_INDEX_SETTING) - .put("index.mapper.dynamic", true) - .put("index.version.created", Version.CURRENT) - ) - .putMapping(PipelineStore.TYPE, PipelineStore.PIPELINE_MAPPING); - state = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().put(indexMetaData)) - .build(); + // Delete existing pipeline: try { - store.isIngestIndexPresent(state); + store.innerDelete(deleteRequest, clusterState); fail("exception expected"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), equalTo("illegal ingest index setting, [index.mapper.dynamic] setting is [true] while [false] is expected")); - } - - // fails, incorrect number of primary shards - indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) - .settings(Settings.builder() - .put(PipelineStore.INGEST_INDEX_SETTING) - .put("index.number_of_shards", 2) - .put("index.version.created", Version.CURRENT) - ) - .putMapping(PipelineStore.TYPE, PipelineStore.PIPELINE_MAPPING); - state = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().put(indexMetaData)) - .build(); - try { - store.isIngestIndexPresent(state); - fail("exception expected"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), equalTo("illegal ingest index setting, [index.number_of_shards] setting is [2] while [1] is expected")); - } - - // fails, incorrect number of replica shards - indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) - .settings(Settings.builder() - .put(PipelineStore.INGEST_INDEX_SETTING) - .put("index.number_of_replicas", 2) - .put("index.version.created", Version.CURRENT) - ) - .putMapping(PipelineStore.TYPE, PipelineStore.PIPELINE_MAPPING); - state = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().put(indexMetaData)) - .build(); - try { - store.isIngestIndexPresent(state); - fail("exception expected"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), equalTo("illegal ingest index setting, [index.number_of_replicas] setting is [2] while [1] is expected")); - } - - // fails not a strict mapping: - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("_all") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("processors") - .field("type", "object") - .field("enabled", false) - .field("dynamic", true) - .endObject() - .startObject("on_failure") - .field("type", "object") - .field("enabled", false) - .field("dynamic", true) - .endObject() - .startObject("description") - .field("type", "string") - .endObject() - .endObject() - .endObject().string(); - indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) - .settings(Settings.builder() - .put(PipelineStore.INGEST_INDEX_SETTING) - .put("index.version.created", Version.CURRENT) - ) - .putMapping(PipelineStore.TYPE, mapping); - state = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().put(indexMetaData)) - .build(); - try { - store.isIngestIndexPresent(state); - fail("exception expected"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), equalTo("illegal ingest mapping, pipeline mapping must be strict")); - } - - // fails _all field is enabled: - mapping = XContentFactory.jsonBuilder().startObject() - .field("dynamic", "strict") - .startObject("_all") - .field("enabled", true) - .endObject() - .startObject("properties") - .startObject("processors") - .field("type", "object") - .field("enabled", false) - .field("dynamic", "true") - .endObject() - .startObject("on_failure") - .field("type", "object") - .field("enabled", false) - .field("dynamic", "true") - .endObject() - .startObject("description") - .field("type", "string") - .endObject() - .endObject() - .endObject().string(); - indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) - .settings(Settings.builder() - .put(PipelineStore.INGEST_INDEX_SETTING) - .put("index.version.created", Version.CURRENT) - ) - .putMapping(PipelineStore.TYPE, mapping); - state = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().put(indexMetaData)) - .build(); - try { - store.isIngestIndexPresent(state); - fail("exception expected"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), equalTo("illegal ingest mapping, _all field is enabled")); - } - - // fails processor field not of type object: - mapping = XContentFactory.jsonBuilder().startObject() - .field("dynamic", "strict") - .startObject("_all") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("processors") - .field("type", "nested") - .field("enabled", false) - .field("dynamic", "true") - .endObject() - .startObject("on_failure") - .field("type", "object") - .field("enabled", false) - .field("dynamic", "true") - .endObject() - .startObject("description") - .field("type", "string") - .endObject() - .endObject() - .endObject().string(); - indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) - .settings(Settings.builder() - .put(PipelineStore.INGEST_INDEX_SETTING) - .put("index.version.created", Version.CURRENT) - ) - .putMapping(PipelineStore.TYPE, mapping); - state = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().put(indexMetaData)) - .build(); - try { - store.isIngestIndexPresent(state); - fail("exception expected"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), equalTo("illegal ingest mapping, processors field's type is [nested] while [object] is expected")); - } - - // fails processor field enabled option is true: - mapping = XContentFactory.jsonBuilder().startObject() - .field("dynamic", "strict") - .startObject("_all") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("processors") - .field("type", "object") - .field("enabled", true) - .field("dynamic", "true") - .endObject() - .startObject("on_failure") - .field("type", "object") - .field("enabled", false) - .field("dynamic", "true") - .endObject() - .startObject("description") - .field("type", "string") - .endObject() - .endObject() - .endObject().string(); - indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) - .settings(Settings.builder() - .put(PipelineStore.INGEST_INDEX_SETTING) - .put("index.version.created", Version.CURRENT) - ) - .putMapping(PipelineStore.TYPE, mapping); - state = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().put(indexMetaData)) - .build(); - try { - store.isIngestIndexPresent(state); - fail("exception expected"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), equalTo("illegal ingest mapping, processors field enabled option is [true] while [false] is expected")); - } - - // fails processor field dynamic option is false: - mapping = XContentFactory.jsonBuilder().startObject() - .field("dynamic", "strict") - .startObject("_all") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("processors") - .field("type", "object") - .field("enabled", false) - .field("dynamic", "false") - .endObject() - .startObject("on_failure") - .field("type", "object") - .field("enabled", false) - .field("dynamic", "true") - .endObject() - .startObject("description") - .field("type", "string") - .endObject() - .endObject() - .endObject().string(); - indexMetaData = IndexMetaData.builder(PipelineStore.INDEX) - .settings(Settings.builder() - .put(PipelineStore.INGEST_INDEX_SETTING) - .put("index.version.created", Version.CURRENT) - ) - .putMapping(PipelineStore.TYPE, mapping); - state = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().put(indexMetaData)) - .build(); - try { - store.isIngestIndexPresent(state); - fail("exception expected"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), equalTo("illegal ingest mapping, processors field dynamic option is [false] while [true] is expected")); + } catch (PipelineMissingException e) { + assertThat(e.getMessage(), equalTo("pipeline [_id] is missing")); } } - static ActionFuture expectedSearchReponse(List hits) { - return new PlainActionFuture() { + public void testGetPipelines() { + Map configs = new HashMap<>(); + configs.put("_id1", new PipelineConfiguration( + "_id1", new BytesArray("{\"processors\": []}") + )); + configs.put("_id2", new PipelineConfiguration( + "_id2", new BytesArray("{\"processors\": []}") + )); - @Override - public SearchResponse get(long timeout, TimeUnit unit) { - InternalSearchHits hits1 = new InternalSearchHits(hits.toArray(new InternalSearchHit[0]), hits.size(), 1f); - return new SearchResponse(new InternalSearchResponse(hits1, null, null, null, false, null), "_scrollId", 1, 1, 1, null); - } - }; + assertThat(store.innerGetPipelines(null, "_id1").isEmpty(), is(true)); + + IngestMetadata ingestMetadata = new IngestMetadata(configs); + List pipelines = store.innerGetPipelines(ingestMetadata, "_id1"); + assertThat(pipelines.size(), equalTo(1)); + assertThat(pipelines.get(0).getId(), equalTo("_id1")); + + pipelines = store.innerGetPipelines(ingestMetadata, "_id1", "_id2"); + assertThat(pipelines.size(), equalTo(2)); + assertThat(pipelines.get(0).getId(), equalTo("_id1")); + assertThat(pipelines.get(1).getId(), equalTo("_id2")); + + pipelines = store.innerGetPipelines(ingestMetadata, "_id*"); + pipelines.sort((o1, o2) -> o1.getId().compareTo(o2.getId())); + assertThat(pipelines.size(), equalTo(2)); + assertThat(pipelines.get(0).getId(), equalTo("_id1")); + assertThat(pipelines.get(1).getId(), equalTo("_id2")); } - static ActionFuture expectedGetResponse(boolean exists) { - return new PlainActionFuture() { - @Override - public GetResponse get() throws InterruptedException, ExecutionException { - return new GetResponse(new GetResult("_index", "_type", "_id", 1, exists, null, null)); - } - }; - } + public void testCrud() throws Exception { + String id = "_id"; + Pipeline pipeline = store.get(id); + assertThat(pipeline, nullValue()); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty - static GetRequest eqGetRequest(String index, String type, String id) { - return argThat(new GetRequestMatcher(index, type, id)); - } + PutPipelineRequest putRequest = new PutPipelineRequest(); + putRequest.id(id); + putRequest.source(new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}")); + clusterState = store.innerPut(putRequest, clusterState); + store.innerUpdatePipelines(clusterState); + pipeline = store.get(id); + assertThat(pipeline, notNullValue()); + assertThat(pipeline.getId(), equalTo(id)); + assertThat(pipeline.getDescription(), nullValue()); + assertThat(pipeline.getProcessors().size(), equalTo(1)); + assertThat(pipeline.getProcessors().get(0).getType(), equalTo("set")); - static class GetRequestMatcher extends ArgumentMatcher { - - private final String index; - private final String type; - private final String id; - - public GetRequestMatcher(String index, String type, String id) { - this.index = index; - this.type = type; - this.id = id; - } - - @Override - public boolean matches(Object o) { - GetRequest getRequest = (GetRequest) o; - return Objects.equals(getRequest.index(), index) && - Objects.equals(getRequest.type(), type) && - Objects.equals(getRequest.id(), id); - } + DeletePipelineRequest deleteRequest = new DeletePipelineRequest(); + deleteRequest.id(id); + clusterState = store.innerDelete(deleteRequest, clusterState); + store.innerUpdatePipelines(clusterState); + pipeline = store.get(id); + assertThat(pipeline, nullValue()); } } diff --git a/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/20_grok.yaml b/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/20_grok.yaml index 70b1c8a9d40..f88136d8a79 100644 --- a/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/20_grok.yaml +++ b/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/20_grok.yaml @@ -15,7 +15,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: index: @@ -54,7 +54,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: index: @@ -91,7 +91,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: index: diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml index 91e0c7ac844..e8da23e0edd 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml @@ -14,7 +14,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: index: @@ -58,7 +58,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: index: @@ -103,7 +103,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: index: diff --git a/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml b/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml index 24ac604989f..01d674053ae 100644 --- a/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml +++ b/qa/ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yaml @@ -15,25 +15,18 @@ } ] } - - match: { _index: ".ingest" } - - match: { _type: "pipeline" } - - match: { _version: 1 } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: ingest.get_pipeline: id: "my_pipeline" - - match: { my_pipeline._source.description: "_description" } - - match: { my_pipeline._version: 1 } + - match: { pipelines.0.id: "my_pipeline" } + - match: { pipelines.0.config.description: "_description" } - do: ingest.delete_pipeline: id: "my_pipeline" - - match: { _index: ".ingest" } - - match: { _type: "pipeline" } - - match: { _version: 2 } - - match: { _id: "my_pipeline" } - - match: { found: true } + - match: { acknowledged: true } --- "Test ingest simulate API works fine when node.ingest is set to false": @@ -52,7 +45,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: ingest.simulate: diff --git a/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml b/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml index e65e68fdd45..9e644773c6a 100644 --- a/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml +++ b/qa/ingest-with-mustache/src/test/resources/rest-api-spec/test/ingest_mustache/10_pipeline_with_mustache_templates.yaml @@ -25,7 +25,7 @@ } ] } - - match: { _id: "my_pipeline_1" } + - match: { acknowledged: true } - do: index: @@ -72,7 +72,7 @@ ] } - - match: { _id: "my_pipeline_1" } + - match: { acknowledged: true } - do: ingest.put_pipeline: @@ -89,7 +89,7 @@ } ] } - - match: { _id: "my_pipeline_2" } + - match: { acknowledged: true } - do: ingest.put_pipeline: @@ -105,7 +105,7 @@ } ] } - - match: { _id: "my_pipeline_3" } + - match: { acknowledged: true } - do: index: @@ -198,7 +198,7 @@ } ] } - - match: { _id: "my_handled_pipeline" } + - match: { acknowledged: true } - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_pipeline.json index 69b8f53d63a..1c515e45095 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_pipeline.json @@ -13,6 +13,14 @@ } }, "params": { + "master_timeout": { + "type" : "time", + "description" : "Explicit operation timeout for connection to master node" + }, + "timeout": { + "type" : "time", + "description" : "Explicit operation timeout" + } } }, "body": null diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json index 71772a28a76..6c50657ae1a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json @@ -13,6 +13,10 @@ } }, "params": { + "master_timeout": { + "type" : "time", + "description" : "Explicit operation timeout for connection to master node" + } } }, "body": null diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_pipeline.json index fd88d352731..e4c3c2eb3f9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_pipeline.json @@ -13,6 +13,14 @@ } }, "params": { + "master_timeout": { + "type" : "time", + "description" : "Explicit operation timeout for connection to master node" + }, + "timeout": { + "type" : "time", + "description" : "Explicit operation timeout" + } } }, "body": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml index 5a62247df41..bf0817f2da1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml @@ -15,25 +15,18 @@ } ] } - - match: { _index: ".ingest" } - - match: { _type: "pipeline" } - - match: { _version: 1 } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: ingest.get_pipeline: id: "my_pipeline" - - match: { my_pipeline._source.description: "_description" } - - match: { my_pipeline._version: 1 } + - match: { pipelines.0.id: "my_pipeline" } + - match: { pipelines.0.config.description: "_description" } - do: ingest.delete_pipeline: id: "my_pipeline" - - match: { _index: ".ingest" } - - match: { _type: "pipeline" } - - match: { _version: 2 } - - match: { _id: "my_pipeline" } - - match: { found: true } + - match: { acknowledged: true } - do: catch: missing @@ -82,25 +75,18 @@ } ] } - - match: { _index: ".ingest" } - - match: { _type: "pipeline" } - - match: { _version: 1 } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: ingest.get_pipeline: id: "my_pipeline" - - match: { my_pipeline._source.description: "_description" } - - match: { my_pipeline._version: 1 } + - match: { pipelines.0.id: "my_pipeline" } + - match: { pipelines.0.config.description: "_description" } - do: ingest.delete_pipeline: id: "my_pipeline" - - match: { _index: ".ingest" } - - match: { _type: "pipeline" } - - match: { _version: 2 } - - match: { _id: "my_pipeline" } - - match: { found: true } + - match: { acknowledged: true } - do: catch: missing diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/20_date_processor.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/20_date_processor.yaml index 8852e5e5749..71c5c4069b2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/20_date_processor.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/20_date_processor.yaml @@ -17,7 +17,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/30_mutate.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/30_mutate.yaml index a0a29e9c050..1e7911e519a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/30_mutate.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/30_mutate.yaml @@ -72,7 +72,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: index: @@ -130,7 +130,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml index 9947129788b..a5965adec6e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml @@ -15,7 +15,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: ingest.simulate: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/50_on_failure.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/50_on_failure.yaml index a01b0dacac0..7bce12d2ec5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/50_on_failure.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/50_on_failure.yaml @@ -30,7 +30,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: index: @@ -88,7 +88,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/60_fail.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/60_fail.yaml index d491a95686e..019c229ae38 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/60_fail.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/60_fail.yaml @@ -14,7 +14,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: catch: request @@ -49,7 +49,7 @@ } ] } - - match: { _id: "my_pipeline" } + - match: { acknowledged: true } - do: index: From a7730b05b20d83f14ce9bcd53450f15baa272073 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 13 Jan 2016 23:35:41 +0100 Subject: [PATCH 217/347] Fix some todos --- .../elasticsearch/ingest/core/Pipeline.java | 12 ++++++---- .../ingest/core/ValueSource.java | 4 +--- .../{ => core}/PipelineFactoryTests.java | 23 ++++++++++--------- 3 files changed, 21 insertions(+), 18 deletions(-) rename core/src/test/java/org/elasticsearch/ingest/{ => core}/PipelineFactoryTests.java (78%) diff --git a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java index 84ddb6aa9e7..ebf570710e5 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java @@ -30,6 +30,10 @@ import java.util.Map; */ public final class Pipeline { + final static String DESCRIPTION_KEY = "description"; + final static String PROCESSORS_KEY = "processors"; + final static String ON_FAILURE_KEY = "on_failure"; + private final String id; private final String description; private final CompoundProcessor compoundProcessor; @@ -79,9 +83,9 @@ public final class Pipeline { public final static class Factory { public Pipeline create(String id, Map config, Map processorRegistry) throws Exception { - String description = ConfigurationUtils.readOptionalStringProperty(config, "description"); // TODO(simonw): can we make these strings constants? - List processors = readProcessors("processors", processorRegistry, config); - List onFailureProcessors = readProcessors("on_failure", processorRegistry, config); + String description = ConfigurationUtils.readOptionalStringProperty(config, DESCRIPTION_KEY); + List processors = readProcessors(PROCESSORS_KEY, processorRegistry, config); + List onFailureProcessors = readProcessors(ON_FAILURE_KEY, processorRegistry, config); if (config.isEmpty() == false) { throw new IllegalArgumentException("pipeline [" + id + "] doesn't support one or more provided configuration parameters " + Arrays.toString(config.keySet().toArray())); } @@ -106,7 +110,7 @@ public final class Pipeline { private Processor readProcessor(Map processorRegistry, String type, Map config) throws Exception { Processor.Factory factory = processorRegistry.get(type); if (factory != null) { - List onFailureProcessors = readProcessors("on_failure", processorRegistry, config); + List onFailureProcessors = readProcessors(ON_FAILURE_KEY, processorRegistry, config); Processor processor = factory.create(config); if (config.isEmpty() == false) { throw new IllegalArgumentException("processor [" + type + "] doesn't support one or more provided configuration parameters " + Arrays.toString(config.keySet().toArray())); diff --git a/core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java b/core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java index 5280b3e6702..e9f09a1a9f8 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java @@ -57,9 +57,7 @@ public interface ValueSource { valueSourceList.add(wrap(item, templateService)); } return new ListValue(valueSourceList); - } else if (value == null || value instanceof Integer || // TODO(simonw): maybe we just check for Number? - value instanceof Long || value instanceof Float || - value instanceof Double || value instanceof Boolean) { + } else if (value == null || value instanceof Number || value instanceof Boolean) { return new ObjectValue(value); } else if (value instanceof String) { return new TemplatedValue(templateService.compile((String) value)); diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java similarity index 78% rename from core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java rename to core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java index e1a46e7f0d6..153dc30019e 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java @@ -17,8 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest; +package org.elasticsearch.ingest.core; +import org.elasticsearch.ingest.TestProcessor; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; @@ -34,8 +35,8 @@ public class PipelineFactoryTests extends ESTestCase { public void testCreate() throws Exception { Map processorConfig = new HashMap<>(); Map pipelineConfig = new HashMap<>(); - pipelineConfig.put("description", "_description"); - pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("test", processorConfig))); + pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); + pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig))); Pipeline.Factory factory = new Pipeline.Factory(); Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); @@ -48,9 +49,9 @@ public class PipelineFactoryTests extends ESTestCase { public void testCreateWithPipelineOnFailure() throws Exception { Map processorConfig = new HashMap<>(); Map pipelineConfig = new HashMap<>(); - pipelineConfig.put("description", "_description"); - pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("test", processorConfig))); - pipelineConfig.put("on_failure", Collections.singletonList(Collections.singletonMap("test", processorConfig))); + pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); + pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig))); + pipelineConfig.put(Pipeline.ON_FAILURE_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig))); Pipeline.Factory factory = new Pipeline.Factory(); Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); @@ -66,8 +67,8 @@ public class PipelineFactoryTests extends ESTestCase { Map processorConfig = new HashMap<>(); processorConfig.put("unused", "value"); Map pipelineConfig = new HashMap<>(); - pipelineConfig.put("description", "_description"); - pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("test", processorConfig))); + pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); + pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig))); Pipeline.Factory factory = new Pipeline.Factory(); Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); try { @@ -79,11 +80,11 @@ public class PipelineFactoryTests extends ESTestCase { public void testCreateProcessorsWithOnFailureProperties() throws Exception { Map processorConfig = new HashMap<>(); - processorConfig.put("on_failure", Collections.singletonList(Collections.singletonMap("test", new HashMap<>()))); + processorConfig.put(Pipeline.ON_FAILURE_KEY, Collections.singletonList(Collections.singletonMap("test", new HashMap<>()))); Map pipelineConfig = new HashMap<>(); - pipelineConfig.put("description", "_description"); - pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("test", processorConfig))); + pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); + pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig))); Pipeline.Factory factory = new Pipeline.Factory(); Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); From 4d88da5ad5c9f342a6daa806b3f8a62c37c7b20d Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 14 Jan 2016 14:09:42 +0100 Subject: [PATCH 218/347] ingest: Use BiConsumer instead of Cunsumer to pass down the failed index request with throwable --- .../action/ingest/IngestActionFilter.java | 4 +- .../ingest/PipelineExecutionService.java | 6 +-- .../ingest/PipelineExecutionServiceTests.java | 38 ++++++++++--------- 3 files changed, 24 insertions(+), 24 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java index c9467b07a05..f2d402da346 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java @@ -109,9 +109,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio void processBulkIndexRequest(Task task, BulkRequest original, String action, ActionFilterChain chain, ActionListener listener) { BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original); - executionService.execute(() -> bulkRequestModifier, tuple -> { - IndexRequest indexRequest = tuple.v1(); - Throwable throwable = tuple.v2(); + executionService.execute(() -> bulkRequestModifier, (indexRequest, throwable) -> { logger.debug("failed to execute pipeline [{}] for document [{}/{}/{}]", indexRequest.pipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id(), throwable); bulkRequestModifier.markCurrentItemAsFailed(throwable); }, (success) -> { diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index 5553374880e..73639ff36d1 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -22,12 +22,12 @@ package org.elasticsearch.ingest; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.threadpool.ThreadPool; import java.util.Map; +import java.util.function.BiConsumer; import java.util.function.Consumer; public class PipelineExecutionService { @@ -53,7 +53,7 @@ public class PipelineExecutionService { } public void execute(Iterable actionRequests, - Consumer> itemFailureHandler, Consumer completionHandler) { + BiConsumer itemFailureHandler, Consumer completionHandler) { threadPool.executor(ThreadPool.Names.INGEST).execute(() -> { for (ActionRequest actionRequest : actionRequests) { if ((actionRequest instanceof IndexRequest)) { @@ -64,7 +64,7 @@ public class PipelineExecutionService { //this shouldn't be needed here but we do it for consistency with index api which requires it to prevent double execution indexRequest.pipeline(null); } catch (Throwable e) { - itemFailureHandler.accept(new Tuple<>(indexRequest, e)); + itemFailureHandler.accept(indexRequest, e); } } } diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java index a0fc2b8b4ba..9872eb79f0e 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.ingest.core.CompoundProcessor; import org.elasticsearch.ingest.core.IngestDocument; @@ -41,9 +40,11 @@ import org.mockito.invocation.InvocationOnMock; import java.util.Collections; import java.util.Map; import java.util.Objects; +import java.util.function.BiConsumer; import java.util.function.Consumer; import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Matchers.eq; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyString; @@ -95,24 +96,25 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest2 = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("does_not_exist"); bulkRequest.add(indexRequest2); @SuppressWarnings("unchecked") - Consumer> failureHandler = mock(Consumer.class); + BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.execute(bulkRequest.requests(), failureHandler, completionHandler); - verify(failureHandler, times(1)).accept(argThat(new CustomTypeSafeMatcher>("failure handler was not called with the expected arguments") { - @Override - protected boolean matchesSafely(Tuple item) { - if( item.v1() != indexRequest2) { - return false; + verify(failureHandler, times(1)).accept( + argThat(new CustomTypeSafeMatcher("failure handler was not called with the expected arguments") { + @Override + protected boolean matchesSafely(IndexRequest item) { + return item == indexRequest2; } - if (item.v2() instanceof IllegalArgumentException == false) { - return false; - } - IllegalArgumentException iae = (IllegalArgumentException) item.v2(); - return "pipeline with id [does_not_exist] does not exist".equals(iae.getMessage()); - } - })); + }), + argThat(new CustomTypeSafeMatcher("failure handler was not called with the expected arguments") { + @Override + protected boolean matchesSafely(IllegalArgumentException iae) { + return "pipeline with id [does_not_exist] does not exist".equals(iae.getMessage()); + } + }) + ); verify(completionHandler, times(1)).accept(anyBoolean()); } @@ -308,11 +310,11 @@ public class PipelineExecutionServiceTests extends ESTestCase { doThrow(error).when(processor).execute(any()); when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, processor)); - Consumer> requestItemErrorHandler = mock(Consumer.class); + BiConsumer requestItemErrorHandler = mock(BiConsumer.class); Consumer completionHandler = mock(Consumer.class); executionService.execute(bulkRequest.requests(), requestItemErrorHandler, completionHandler); - verify(requestItemErrorHandler, times(numIndexRequests)).accept(new Tuple<>(any(IndexRequest.class), error)); + verify(requestItemErrorHandler, times(numIndexRequests)).accept(any(IndexRequest.class), eq(error)); verify(completionHandler, times(1)).accept(true); } @@ -330,12 +332,12 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, new CompoundProcessor())); @SuppressWarnings("unchecked") - Consumer> requestItemErrorHandler = mock(Consumer.class); + BiConsumer requestItemErrorHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.execute(bulkRequest.requests(), requestItemErrorHandler, completionHandler); - verify(requestItemErrorHandler, never()).accept(any()); + verify(requestItemErrorHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(true); } From 4fb9fd8020131359afb0d86c5ba4b61a391c7d9c Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 14 Jan 2016 15:50:11 +0100 Subject: [PATCH 219/347] fix alphabetical order of modules --- settings.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/settings.gradle b/settings.gradle index 39e0b4fb5b0..63e78dea047 100644 --- a/settings.gradle +++ b/settings.gradle @@ -11,10 +11,10 @@ List projects = [ 'test:framework', 'test:fixtures:example-fixture', 'test:fixtures:hdfs-fixture', + 'modules:ingest-grok', 'modules:lang-expression', 'modules:lang-groovy', 'modules:lang-mustache', - 'modules:ingest-grok', 'plugins:analysis-icu', 'plugins:analysis-kuromoji', 'plugins:analysis-phonetic', From 1e68ad0887a8b13ce85569f450c748005afd15a9 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 14 Jan 2016 15:50:59 +0100 Subject: [PATCH 220/347] add ingest-geoip to official plugins --- core/src/main/java/org/elasticsearch/plugins/PluginManager.java | 1 + .../main/resources/org/elasticsearch/plugins/plugin-install.help | 1 + 2 files changed, 2 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java index 7cd50409fb6..56da7176e9d 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java @@ -101,6 +101,7 @@ public class PluginManager { "discovery-ec2", "discovery-gce", "discovery-multicast", + "ingest-geoip", "lang-javascript", "lang-plan-a", "lang-python", diff --git a/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help b/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help index 8c73e3837a4..d46f7dca29a 100644 --- a/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help +++ b/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help @@ -43,6 +43,7 @@ OFFICIAL PLUGINS - discovery-ec2 - discovery-gce - discovery-multicast + - ingest-geoip - lang-javascript - lang-plan-a - lang-python From 169b3c75c5674a617c19eaf22ce8a5292fecdb5f Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 14 Jan 2016 16:24:07 +0100 Subject: [PATCH 221/347] move grok patterns loading to IngestGrokPlugin class out of the GrokProcessor.Factory --- .../ingest/grok/GrokProcessor.java | 46 ++--------------- .../ingest/grok/IngestGrokPlugin.java | 51 ++++++++++++++++--- .../grok/GrokProcessorFactoryTests.java | 5 +- .../elasticsearch/ingest/grok/GrokTests.java | 18 +------ 4 files changed, 53 insertions(+), 67 deletions(-) diff --git a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java index 2c63646ff81..56c04f09709 100644 --- a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java +++ b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java @@ -19,16 +19,10 @@ package org.elasticsearch.ingest.grok; -import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Processor; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -70,47 +64,17 @@ public final class GrokProcessor implements Processor { public final static class Factory implements Processor.Factory { - private final static String[] PATTERN_NAMES = new String[] { - "aws", "bacula", "bro", "exim", "firewalls", "grok-patterns", "haproxy", - "java", "junos", "linux-syslog", "mcollective-patterns", "mongodb", "nagios", - "postgresql", "rails", "redis", "ruby" - }; - private final Map builtinPatternBank; + private final Map builtinPatterns; - public Factory() throws IOException { - // TODO(simonw): we should have a static helper method to load these patterns and make this - // factory only accept a String->String map instead. That way we can load - // the patterns in the IngestGrokPlugin ctor or even in a static context and this ctor doesn't need to throw any exception. - Map builtinPatterns = new HashMap<>(); - for (String pattern : PATTERN_NAMES) { - try(InputStream is = getClass().getResourceAsStream("/patterns/" + pattern)) { - loadBankFromStream(builtinPatterns, is); - } - } - this.builtinPatternBank = Collections.unmodifiableMap(builtinPatterns); - } - - static void loadBankFromStream(Map patternBank, InputStream inputStream) throws IOException { - String line; - BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); - while ((line = br.readLine()) != null) { - String trimmedLine = line.replaceAll("^\\s+", ""); - if (trimmedLine.startsWith("#") || trimmedLine.length() == 0) { - continue; - } - - String[] parts = trimmedLine.split("\\s+", 2); - if (parts.length == 2) { - patternBank.put(parts[0], parts[1]); - } - } + public Factory(Map builtinPatterns) { + this.builtinPatterns = builtinPatterns; } public GrokProcessor create(Map config) throws Exception { String matchField = ConfigurationUtils.readStringProperty(config, "field"); String matchPattern = ConfigurationUtils.readStringProperty(config, "pattern"); Map customPatternBank = ConfigurationUtils.readOptionalMap(config, "pattern_definitions"); - Map patternBank = new HashMap<>(builtinPatternBank); + Map patternBank = new HashMap<>(builtinPatterns); if (customPatternBank != null) { patternBank.putAll(customPatternBank); } diff --git a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java index 2b61b5e9073..f6423fe40cf 100644 --- a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java +++ b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java @@ -22,10 +22,29 @@ package org.elasticsearch.ingest.grok; import org.elasticsearch.ingest.IngestModule; import org.elasticsearch.plugins.Plugin; +import java.io.BufferedReader; import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; public class IngestGrokPlugin extends Plugin { + private static final String[] PATTERN_NAMES = new String[] { + "aws", "bacula", "bro", "exim", "firewalls", "grok-patterns", "haproxy", + "java", "junos", "linux-syslog", "mcollective-patterns", "mongodb", "nagios", + "postgresql", "rails", "redis", "ruby" + }; + + private final Map builtinPatterns; + + public IngestGrokPlugin() throws IOException { + this.builtinPatterns = loadBuiltinPatterns(); + } + @Override public String name() { return "ingest-grok"; @@ -37,12 +56,32 @@ public class IngestGrokPlugin extends Plugin { } public void onModule(IngestModule ingestModule) { - ingestModule.registerProcessor(GrokProcessor.TYPE, (environment, templateService) -> { - try { - return new GrokProcessor.Factory(); - } catch (IOException e) { - throw new RuntimeException(e); + ingestModule.registerProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(builtinPatterns)); + } + + static Map loadBuiltinPatterns() throws IOException { + Map builtinPatterns = new HashMap<>(); + for (String pattern : PATTERN_NAMES) { + try(InputStream is = IngestGrokPlugin.class.getResourceAsStream("/patterns/" + pattern)) { + loadPatterns(builtinPatterns, is); } - }); + } + return Collections.unmodifiableMap(builtinPatterns); + } + + private static void loadPatterns(Map patternBank, InputStream inputStream) throws IOException { + String line; + BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); + while ((line = br.readLine()) != null) { + String trimmedLine = line.replaceAll("^\\s+", ""); + if (trimmedLine.startsWith("#") || trimmedLine.length() == 0) { + continue; + } + + String[] parts = trimmedLine.split("\\s+", 2); + if (parts.length == 2) { + patternBank.put(parts[0], parts[1]); + } + } } } diff --git a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java index 11c1024eb82..419e48874cd 100644 --- a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java +++ b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.ingest.grok; -import org.elasticsearch.ingest.grok.GrokProcessor; import org.elasticsearch.test.ESTestCase; import java.util.Collections; @@ -32,7 +31,7 @@ import static org.hamcrest.Matchers.notNullValue; public class GrokProcessorFactoryTests extends ESTestCase { public void testBuild() throws Exception { - GrokProcessor.Factory factory = new GrokProcessor.Factory(); + GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap()); Map config = new HashMap<>(); config.put("field", "_field"); @@ -43,7 +42,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { } public void testCreateWithCustomPatterns() throws Exception { - GrokProcessor.Factory factory = new GrokProcessor.Factory(); + GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap()); Map config = new HashMap<>(); config.put("field", "_field"); diff --git a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokTests.java b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokTests.java index 89e44fc7239..21ca17ad713 100644 --- a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokTests.java +++ b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokTests.java @@ -19,13 +19,10 @@ package org.elasticsearch.ingest.grok; -import org.elasticsearch.ingest.grok.Grok; -import org.elasticsearch.ingest.grok.GrokProcessor; import org.elasticsearch.test.ESTestCase; import org.junit.Before; import java.io.IOException; -import java.io.InputStream; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -39,22 +36,9 @@ import static org.hamcrest.Matchers.nullValue; public class GrokTests extends ESTestCase { private Map basePatterns; - private Map newBankFromStreams(InputStream... inputStreams) throws IOException { - Map patternBank = new HashMap<>(); - - for (InputStream is : inputStreams) { - GrokProcessor.Factory.loadBankFromStream(patternBank, is); - } - - return patternBank; - } - @Before public void setup() throws IOException { - basePatterns = newBankFromStreams( - getClass().getResourceAsStream("/patterns/grok-patterns"), - getClass().getResourceAsStream("/patterns/linux-syslog") - ); + basePatterns = IngestGrokPlugin.loadBuiltinPatterns(); } public void testMatchWithoutCaptures() { From 63ee2224f771d23874bbc6e081b606618bcf2b73 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 14 Jan 2016 13:49:50 +0100 Subject: [PATCH 222/347] ingest: remove ingest threadpool and use index threadpool instead. To main concern with the dedicated ingest TP is that there are already many TPs and in the case with beefy nodes we would many more threads. In the case ingest isn't used the all these threads are just idle. --- .../org/elasticsearch/ingest/PipelineExecutionService.java | 4 ++-- .../main/java/org/elasticsearch/threadpool/ThreadPool.java | 5 ----- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index 73639ff36d1..7a382ca1ce7 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -42,7 +42,7 @@ public class PipelineExecutionService { public void execute(IndexRequest request, Consumer failureHandler, Consumer completionHandler) { Pipeline pipeline = getPipeline(request.pipeline()); - threadPool.executor(ThreadPool.Names.INGEST).execute(() -> { + threadPool.executor(ThreadPool.Names.INDEX).execute(() -> { try { innerExecute(request, pipeline); completionHandler.accept(true); @@ -54,7 +54,7 @@ public class PipelineExecutionService { public void execute(Iterable actionRequests, BiConsumer itemFailureHandler, Consumer completionHandler) { - threadPool.executor(ThreadPool.Names.INGEST).execute(() -> { + threadPool.executor(ThreadPool.Names.INDEX).execute(() -> { for (ActionRequest actionRequest : actionRequests) { if ((actionRequest instanceof IndexRequest)) { IndexRequest indexRequest = (IndexRequest) actionRequest; diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index bf4f75a569f..6eca00f2aab 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -88,7 +88,6 @@ public class ThreadPool extends AbstractComponent { public static final String FORCE_MERGE = "force_merge"; public static final String FETCH_SHARD_STARTED = "fetch_shard_started"; public static final String FETCH_SHARD_STORE = "fetch_shard_store"; - public static final String INGEST = "ingest"; //TODO(simonw): wow what is the reason for having yet another threadpool? I really think we should just use index for this. } public enum ThreadPoolType { @@ -147,7 +146,6 @@ public class ThreadPool extends AbstractComponent { map.put(Names.FORCE_MERGE, ThreadPoolType.FIXED); map.put(Names.FETCH_SHARD_STARTED, ThreadPoolType.SCALING); map.put(Names.FETCH_SHARD_STORE, ThreadPoolType.SCALING); - map.put(Names.INGEST, ThreadPoolType.FIXED); THREAD_POOL_TYPES = Collections.unmodifiableMap(map); } @@ -237,9 +235,6 @@ public class ThreadPool extends AbstractComponent { add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.FORCE_MERGE).size(1)); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.FETCH_SHARD_STARTED).size(availableProcessors * 2).keepAlive("5m")); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.FETCH_SHARD_STORE).size(availableProcessors * 2).keepAlive("5m")); - if (IngestModule.isIngestEnabled(settings)) { - add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.INGEST).size(availableProcessors).queueSize(200)); - } this.defaultExecutorTypeSettings = unmodifiableMap(defaultExecutorTypeSettings); From 57b88076f7ae8d69912959ea6873040086f2800b Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 14 Jan 2016 17:32:09 +0100 Subject: [PATCH 223/347] [TEST] remove thread pool tests that check whether the ingest tp was started The ingest thread pool has been removed, no reason to test it ;) --- .../threadpool/ThreadPoolTests.java | 64 ------------------- 1 file changed, 64 deletions(-) delete mode 100644 core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java deleted file mode 100644 index 7488de3fada..00000000000 --- a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.threadpool; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.not; - -public class ThreadPoolTests extends ESTestCase { - - public void testIngestThreadPoolNotStartedWithIngestDisabled() throws Exception { - Settings settings = Settings.builder().put("name", "test").put("node.ingest", false).build(); - ThreadPool threadPool = null; - try { - threadPool = new ThreadPool(settings); - for (ThreadPool.Info info : threadPool.info()) { - assertThat(info.getName(), not(equalTo("ingest"))); - } - } finally { - if (threadPool != null) { - terminate(threadPool); - } - } - } - - public void testIngestThreadPoolStartedWithIngestEnabled() throws Exception { - Settings settings = Settings.builder().put("name", "test").put("node.ingest", true).build(); - ThreadPool threadPool = null; - try { - threadPool = new ThreadPool(settings); - boolean ingestFound = false; - for (ThreadPool.Info info : threadPool.info()) { - if (info.getName().equals("ingest")) { - ingestFound = true; - break; - } - } - assertThat(ingestFound, equalTo(true)); - } finally { - if (threadPool != null) { - terminate(threadPool); - } - } - } -} From 21cc0b231669b973956c7fd19fe269061e2cf657 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 14 Jan 2016 16:30:44 +0100 Subject: [PATCH 224/347] Cleanup ingest initialization code. * Folded IngestModule into NodeModule * Renamed IngestBootstrapper to IngestService * Let NodeService construct IngestService and removed the Guice annotations * Let IngestService implement Closable --- .../elasticsearch/action/ActionModule.java | 4 +- .../ingest/DeletePipelineTransportAction.java | 6 +- .../ingest/GetPipelineTransportAction.java | 6 +- .../action/ingest/IngestActionFilter.java | 6 +- .../ingest/IngestProxyActionFilter.java | 6 +- .../ingest/PutPipelineTransportAction.java | 8 +- .../SimulatePipelineTransportAction.java | 6 +- .../elasticsearch/ingest/IngestModule.java | 92 ------------------- ...stBootstrapper.java => IngestService.java} | 47 ++-------- .../java/org/elasticsearch/node/Node.java | 9 +- .../org/elasticsearch/node/NodeModule.java | 56 +++++++++++ .../node/service/NodeService.java | 22 ++++- .../elasticsearch/threadpool/ThreadPool.java | 1 - .../ingest/IngestActionFilterTests.java | 15 ++- .../ingest/IngestProxyActionFilterTests.java | 4 +- .../elasticsearch/ingest/IngestClientIT.java | 3 +- .../NodeModuleTests.java} | 36 ++++---- .../ingest/grok/IngestGrokPlugin.java | 6 +- .../ingest/geoip/IngestGeoIpPlugin.java | 6 +- 19 files changed, 145 insertions(+), 194 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/ingest/IngestModule.java rename core/src/main/java/org/elasticsearch/ingest/{IngestBootstrapper.java => IngestService.java} (52%) rename core/src/test/java/org/elasticsearch/{ingest/IngestModuleTests.java => node/NodeModuleTests.java} (56%) diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index c784eba55dc..b6acfe8dd84 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -197,7 +197,7 @@ import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.IngestModule; +import org.elasticsearch.node.NodeModule; import java.util.ArrayList; import java.util.HashMap; @@ -228,7 +228,7 @@ public class ActionModule extends AbstractModule { private final boolean proxy; public ActionModule(Settings settings, boolean proxy) { - this.ingestEnabled = IngestModule.isIngestEnabled(settings); + this.ingestEnabled = NodeModule.isNodeIngestEnabled(settings); this.proxy = proxy; } diff --git a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java index 03f63ff26b7..4f270572df4 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java @@ -29,8 +29,8 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.IngestBootstrapper; import org.elasticsearch.ingest.PipelineStore; +import org.elasticsearch.node.service.NodeService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -41,9 +41,9 @@ public class DeletePipelineTransportAction extends TransportMasterNodeAction ingestNodes = new ArrayList<>(); for (DiscoveryNode node : clusterService.state().nodes()) { - if (IngestModule.isIngestEnabled(node.getAttributes())) { + if (NodeModule.isNodeIngestEnabled(node.getAttributes())) { ingestNodes.add(node); } } diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index 3b9e738f69b..123a5c59038 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -20,9 +20,7 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -31,8 +29,8 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.IngestBootstrapper; import org.elasticsearch.ingest.PipelineStore; +import org.elasticsearch.node.service.NodeService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -43,9 +41,9 @@ public class PutPipelineTransportAction extends TransportMasterNodeAction new DateProcessor.Factory()); - registerProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); - registerProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); - registerProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); - registerProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); - registerProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); - registerProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); - registerProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); - registerProcessor(LowercaseProcessor.TYPE, (environment, templateService) -> new LowercaseProcessor.Factory()); - registerProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); - registerProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); - registerProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); - registerProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); - } - - @Override - protected void configure() { - bind(ProcessorsRegistry.class).toInstance(processorsRegistry); - binder().bind(IngestBootstrapper.class).asEagerSingleton(); - } - - /** - * Adds a processor factory under a specific type name. - */ - public void registerProcessor(String type, BiFunction> processorFactoryProvider) { - processorsRegistry.registerProcessor(type, processorFactoryProvider); - } - - public static boolean isIngestEnabled(Settings settings) { - return settings.getAsBoolean("node.ingest", true); - } - - public static boolean isIngestEnabled(ImmutableOpenMap nodeAttributes) { - String ingestEnabled = nodeAttributes.get("ingest"); - //reproduces same logic used in settings.getAsBoolean used above - return Booleans.parseBoolean(ingestEnabled, true); - } -} diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java b/core/src/main/java/org/elasticsearch/ingest/IngestService.java similarity index 52% rename from core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java rename to core/src/main/java/org/elasticsearch/ingest/IngestService.java index cf145821859..da26903a056 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestBootstrapper.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -19,50 +19,28 @@ package org.elasticsearch.ingest; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.routing.IndexRoutingTable; -import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; -import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.env.Environment; -import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; +import java.io.Closeable; import java.io.IOException; -import java.io.InputStream; /** * Instantiates and wires all the services that the ingest plugin will be needing. * Also the bootstrapper is in charge of starting and stopping the ingest plugin based on the cluster state. */ -public class IngestBootstrapper extends AbstractLifecycleComponent { +public class IngestService implements Closeable { private final Environment environment; private final PipelineStore pipelineStore; private final PipelineExecutionService pipelineExecutionService; private final ProcessorsRegistry processorsRegistry; - // TODO(simonw): I would like to stress this abstraction a little more and move it's construction into - // NodeService and instead of making it AbstractLifecycleComponent just impl Closeable. - // that way we can start the effort of making NodeModule the central point of required service and also move the registration of the - // pipelines into NodeModule? I'd really like to prevent adding yet another module. - @Inject - public IngestBootstrapper(Settings settings, ThreadPool threadPool, Environment environment, - ClusterService clusterService, ProcessorsRegistry processorsRegistry) { - super(settings); + public IngestService(Settings settings, ThreadPool threadPool, Environment environment, + ClusterService clusterService, ProcessorsRegistry processorsRegistry) { this.environment = environment; this.processorsRegistry = processorsRegistry; this.pipelineStore = new PipelineStore(settings, clusterService); @@ -77,26 +55,13 @@ public class IngestBootstrapper extends AbstractLifecycleComponent { return pipelineExecutionService; } - @Inject public void setScriptService(ScriptService scriptService) { pipelineStore.buildProcessorFactoryRegistry(processorsRegistry, environment, scriptService); } @Override - protected void doStart() { - } - - @Override - protected void doStop() { - } - - @Override - protected void doClose() { - try { - pipelineStore.close(); - } catch (IOException e) { - throw new RuntimeException(e); - } + public void close() throws IOException { + pipelineStore.close(); } } diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 2c4bcbc84a8..ce9a3742876 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -71,10 +71,10 @@ import org.elasticsearch.indices.cluster.IndicesClusterStateService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.ttl.IndicesTTLService; -import org.elasticsearch.ingest.IngestModule; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.node.internal.InternalSettingsPreparer; +import org.elasticsearch.node.service.NodeService; import org.elasticsearch.percolator.PercolatorModule; import org.elasticsearch.percolator.PercolatorService; import org.elasticsearch.plugins.Plugin; @@ -196,7 +196,6 @@ public class Node implements Releasable { modules.add(new RepositoriesModule()); modules.add(new TribeModule()); modules.add(new AnalysisModule(environment)); - modules.add(new IngestModule()); pluginsService.processModules(modules); @@ -346,6 +345,12 @@ public class Node implements Releasable { StopWatch stopWatch = new StopWatch("node_close"); stopWatch.start("tribe"); injector.getInstance(TribeService.class).close(); + stopWatch.stop().start("ingest_service"); + try { + injector.getInstance(NodeService.class).getIngestService().close(); + } catch (IOException e) { + logger.warn("IngestService close failed", e); + } stopWatch.stop().start("http"); if (settings.getAsBoolean("http.enabled", true)) { injector.getInstance(HttpServer.class).close(); diff --git a/core/src/main/java/org/elasticsearch/node/NodeModule.java b/core/src/main/java/org/elasticsearch/node/NodeModule.java index aa52d389340..c1b707bfd0f 100644 --- a/core/src/main/java/org/elasticsearch/node/NodeModule.java +++ b/core/src/main/java/org/elasticsearch/node/NodeModule.java @@ -20,11 +20,33 @@ package org.elasticsearch.node; import org.elasticsearch.cache.recycler.PageCacheRecycler; +import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.env.Environment; +import org.elasticsearch.ingest.ProcessorsRegistry; +import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.ingest.core.TemplateService; +import org.elasticsearch.ingest.processor.AppendProcessor; +import org.elasticsearch.ingest.processor.ConvertProcessor; +import org.elasticsearch.ingest.processor.DateProcessor; +import org.elasticsearch.ingest.processor.FailProcessor; +import org.elasticsearch.ingest.processor.GsubProcessor; +import org.elasticsearch.ingest.processor.JoinProcessor; +import org.elasticsearch.ingest.processor.LowercaseProcessor; +import org.elasticsearch.ingest.processor.RemoveProcessor; +import org.elasticsearch.ingest.processor.RenameProcessor; +import org.elasticsearch.ingest.processor.SetProcessor; +import org.elasticsearch.ingest.processor.SplitProcessor; +import org.elasticsearch.ingest.processor.TrimProcessor; +import org.elasticsearch.ingest.processor.UppercaseProcessor; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.node.service.NodeService; +import java.util.function.BiFunction; + /** * */ @@ -32,6 +54,7 @@ public class NodeModule extends AbstractModule { private final Node node; private final MonitorService monitorService; + private final ProcessorsRegistry processorsRegistry; // pkg private so tests can mock Class pageCacheRecyclerImpl = PageCacheRecycler.class; @@ -40,6 +63,21 @@ public class NodeModule extends AbstractModule { public NodeModule(Node node, MonitorService monitorService) { this.node = node; this.monitorService = monitorService; + this.processorsRegistry = new ProcessorsRegistry(); + + registerProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); + registerProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); + registerProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); + registerProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); + registerProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); + registerProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); + registerProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); + registerProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); + registerProcessor(LowercaseProcessor.TYPE, (environment, templateService) -> new LowercaseProcessor.Factory()); + registerProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); + registerProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); + registerProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); + registerProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); } @Override @@ -58,5 +96,23 @@ public class NodeModule extends AbstractModule { bind(Node.class).toInstance(node); bind(MonitorService.class).toInstance(monitorService); bind(NodeService.class).asEagerSingleton(); + bind(ProcessorsRegistry.class).toInstance(processorsRegistry); + } + + /** + * Adds a processor factory under a specific type name. + */ + public void registerProcessor(String type, BiFunction> processorFactoryProvider) { + processorsRegistry.registerProcessor(type, processorFactoryProvider); + } + + public static boolean isNodeIngestEnabled(Settings settings) { + return settings.getAsBoolean("node.ingest", true); + } + + public static boolean isNodeIngestEnabled(ImmutableOpenMap nodeAttributes) { + String ingestEnabled = nodeAttributes.get("ingest"); + //reproduces same logic used in settings.getAsBoolean used above + return Booleans.parseBoolean(ingestEnabled, true); } } diff --git a/core/src/main/java/org/elasticsearch/node/service/NodeService.java b/core/src/main/java/org/elasticsearch/node/service/NodeService.java index b4fe59e3473..ecf5cd07e11 100644 --- a/core/src/main/java/org/elasticsearch/node/service/NodeService.java +++ b/core/src/main/java/org/elasticsearch/node/service/NodeService.java @@ -24,20 +24,27 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; +import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; +import org.elasticsearch.env.Environment; import org.elasticsearch.http.HttpServer; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.ingest.IngestService; +import org.elasticsearch.ingest.PipelineExecutionService; +import org.elasticsearch.ingest.PipelineStore; +import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.io.Closeable; import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -55,6 +62,7 @@ public class NodeService extends AbstractComponent { private final IndicesService indicesService; private final PluginsService pluginService; private final CircuitBreakerService circuitBreakerService; + private final IngestService ingestService; private ScriptService scriptService; @Nullable @@ -67,10 +75,10 @@ public class NodeService extends AbstractComponent { private final Discovery discovery; @Inject - public NodeService(Settings settings, ThreadPool threadPool, MonitorService monitorService, Discovery discovery, - TransportService transportService, IndicesService indicesService, - PluginsService pluginService, CircuitBreakerService circuitBreakerService, - Version version) { + public NodeService(Settings settings, Environment environment, ThreadPool threadPool, MonitorService monitorService, + Discovery discovery, TransportService transportService, IndicesService indicesService, + PluginsService pluginService, CircuitBreakerService circuitBreakerService, Version version, + ProcessorsRegistry processorsRegistry, ClusterService clusterService) { super(settings); this.threadPool = threadPool; this.monitorService = monitorService; @@ -81,12 +89,14 @@ public class NodeService extends AbstractComponent { this.version = version; this.pluginService = pluginService; this.circuitBreakerService = circuitBreakerService; + this.ingestService = new IngestService(settings, threadPool, environment, clusterService, processorsRegistry); } // can not use constructor injection or there will be a circular dependency @Inject(optional = true) public void setScriptService(ScriptService scriptService) { this.scriptService = scriptService; + this.ingestService.setScriptService(scriptService); } public void setHttpServer(@Nullable HttpServer httpServer) { @@ -176,4 +186,8 @@ public class NodeService extends AbstractComponent { discoveryStats ? discovery.stats() : null ); } + + public IngestService getIngestService() { + return ingestService; + } } diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 6eca00f2aab..0e6204ddd10 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -38,7 +38,6 @@ import org.elasticsearch.common.util.concurrent.XRejectedExecutionHandler; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.ingest.IngestModule; import java.io.IOException; import java.util.ArrayList; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java index 91c6765520c..8f613aeb258 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java @@ -29,13 +29,14 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.IngestBootstrapper; +import org.elasticsearch.ingest.IngestService; import org.elasticsearch.ingest.PipelineExecutionService; import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.ingest.core.CompoundProcessor; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.node.service.NodeService; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -63,8 +64,10 @@ public class IngestActionFilterTests extends ESTestCase { @Before public void setup() { executionService = mock(PipelineExecutionService.class); - IngestBootstrapper bootstrapper = mock(IngestBootstrapper.class); - when(bootstrapper.getPipelineExecutionService()).thenReturn(executionService); + IngestService ingestService = mock(IngestService.class); + when(ingestService.getPipelineExecutionService()).thenReturn(executionService); + NodeService bootstrapper = mock(NodeService.class); + when(bootstrapper.getIngestService()).thenReturn(ingestService); filter = new IngestActionFilter(Settings.EMPTY, bootstrapper); } @@ -170,8 +173,10 @@ public class IngestActionFilterTests extends ESTestCase { }; when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor))); executionService = new PipelineExecutionService(store, threadPool); - IngestBootstrapper bootstrapper = mock(IngestBootstrapper.class); - when(bootstrapper.getPipelineExecutionService()).thenReturn(executionService); + IngestService ingestService = mock(IngestService.class); + when(ingestService.getPipelineExecutionService()).thenReturn(executionService); + NodeService bootstrapper = mock(NodeService.class); + when(bootstrapper.getIngestService()).thenReturn(ingestService); filter = new IngestActionFilter(Settings.EMPTY, bootstrapper); BulkRequest bulkRequest = new BulkRequest(); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java index ed1bc7ee0ce..042dacce223 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java @@ -33,7 +33,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.transport.DummyTransportAddress; -import org.elasticsearch.ingest.IngestModule; +import org.elasticsearch.node.NodeModule; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; @@ -245,7 +245,7 @@ public class IngestProxyActionFilterTests extends ESTestCase { @Override protected boolean matchesSafely(DiscoveryNode node) { - return IngestModule.isIngestEnabled(node.getAttributes()); + return NodeModule.isNodeIngestEnabled(node.getAttributes()); } } } diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 9742dd1b978..251a070409d 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -35,6 +35,7 @@ import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.node.NodeModule; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -224,7 +225,7 @@ public class IngestClientIT extends ESIntegTestCase { return "ingest mock"; } - public void onModule(IngestModule ingestModule) { + public void onModule(NodeModule ingestModule) { ingestModule.registerProcessor("test", (environment, templateService) -> config -> new TestProcessor("test", ingestDocument -> { ingestDocument.setFieldValue("processed", true); diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestModuleTests.java b/core/src/test/java/org/elasticsearch/node/NodeModuleTests.java similarity index 56% rename from core/src/test/java/org/elasticsearch/ingest/IngestModuleTests.java rename to core/src/test/java/org/elasticsearch/node/NodeModuleTests.java index c5abb491161..ad8005d2901 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestModuleTests.java +++ b/core/src/test/java/org/elasticsearch/node/NodeModuleTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.ingest; +package org.elasticsearch.node; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; @@ -25,42 +25,42 @@ import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; -public class IngestModuleTests extends ESTestCase { +public class NodeModuleTests extends ESTestCase { - public void testIsIngestEnabledSettings() { - assertThat(IngestModule.isIngestEnabled(Settings.EMPTY), equalTo(true)); - assertThat(IngestModule.isIngestEnabled(Settings.builder().put("node.ingest", true).build()), equalTo(true)); - assertThat(IngestModule.isIngestEnabled(Settings.builder().put("node.ingest", "true").build()), equalTo(true)); - assertThat(IngestModule.isIngestEnabled(Settings.builder().put("node.ingest", false).build()), equalTo(false)); + public void testIsNodeIngestEnabledSettings() { + assertThat(NodeModule.isNodeIngestEnabled(Settings.EMPTY), equalTo(true)); + assertThat(NodeModule.isNodeIngestEnabled(Settings.builder().put("node.ingest", true).build()), equalTo(true)); + assertThat(NodeModule.isNodeIngestEnabled(Settings.builder().put("node.ingest", "true").build()), equalTo(true)); + assertThat(NodeModule.isNodeIngestEnabled(Settings.builder().put("node.ingest", false).build()), equalTo(false)); - assertThat(IngestModule.isIngestEnabled(Settings.builder().put("node.ingest", "false").build()), equalTo(false)); - assertThat(IngestModule.isIngestEnabled(Settings.builder().put("node.ingest", "off").build()), equalTo(false)); - assertThat(IngestModule.isIngestEnabled(Settings.builder().put("node.ingest", "no").build()), equalTo(false)); - assertThat(IngestModule.isIngestEnabled(Settings.builder().put("node.ingest", "0").build()), equalTo(false)); + assertThat(NodeModule.isNodeIngestEnabled(Settings.builder().put("node.ingest", "false").build()), equalTo(false)); + assertThat(NodeModule.isNodeIngestEnabled(Settings.builder().put("node.ingest", "off").build()), equalTo(false)); + assertThat(NodeModule.isNodeIngestEnabled(Settings.builder().put("node.ingest", "no").build()), equalTo(false)); + assertThat(NodeModule.isNodeIngestEnabled(Settings.builder().put("node.ingest", "0").build()), equalTo(false)); } public void testIsIngestEnabledAttributes() { - assertThat(IngestModule.isIngestEnabled(ImmutableOpenMap.builder().build()), equalTo(true)); + assertThat(NodeModule.isNodeIngestEnabled(ImmutableOpenMap.builder().build()), equalTo(true)); ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(); builder.put("ingest", "true"); - assertThat(IngestModule.isIngestEnabled(builder.build()), equalTo(true)); + assertThat(NodeModule.isNodeIngestEnabled(builder.build()), equalTo(true)); builder = ImmutableOpenMap.builder(); builder.put("ingest", "false"); - assertThat(IngestModule.isIngestEnabled(builder.build()), equalTo(false)); + assertThat(NodeModule.isNodeIngestEnabled(builder.build()), equalTo(false)); builder = ImmutableOpenMap.builder(); builder.put("ingest", "off"); - assertThat(IngestModule.isIngestEnabled(builder.build()), equalTo(false)); + assertThat(NodeModule.isNodeIngestEnabled(builder.build()), equalTo(false)); builder = ImmutableOpenMap.builder(); builder.put("ingest", "no"); - assertThat(IngestModule.isIngestEnabled(builder.build()), equalTo(false)); + assertThat(NodeModule.isNodeIngestEnabled(builder.build()), equalTo(false)); builder = ImmutableOpenMap.builder(); builder.put("ingest", "0"); - assertThat(IngestModule.isIngestEnabled(builder.build()), equalTo(false)); + assertThat(NodeModule.isNodeIngestEnabled(builder.build()), equalTo(false)); } public void testIsIngestEnabledMethodsReturnTheSameValue() { @@ -75,6 +75,6 @@ public class IngestModuleTests extends ESTestCase { builder.put("ingest", randomString); ImmutableOpenMap attributes = builder.build(); - assertThat(IngestModule.isIngestEnabled(settings), equalTo(IngestModule.isIngestEnabled(attributes))); + assertThat(NodeModule.isNodeIngestEnabled(settings), equalTo(NodeModule.isNodeIngestEnabled(attributes))); } } diff --git a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java index f6423fe40cf..9ca5bc24c9a 100644 --- a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java +++ b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java @@ -19,7 +19,7 @@ package org.elasticsearch.ingest.grok; -import org.elasticsearch.ingest.IngestModule; +import org.elasticsearch.node.NodeModule; import org.elasticsearch.plugins.Plugin; import java.io.BufferedReader; @@ -55,8 +55,8 @@ public class IngestGrokPlugin extends Plugin { return "Ingest processor that uses grok patterns to split text"; } - public void onModule(IngestModule ingestModule) { - ingestModule.registerProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(builtinPatterns)); + public void onModule(NodeModule nodeModule) { + nodeModule.registerProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(builtinPatterns)); } static Map loadBuiltinPatterns() throws IOException { diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 81cecb76f5d..4b6a60902ea 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -19,7 +19,7 @@ package org.elasticsearch.ingest.geoip; -import org.elasticsearch.ingest.IngestModule; +import org.elasticsearch.node.NodeModule; import org.elasticsearch.plugins.Plugin; public class IngestGeoIpPlugin extends Plugin { @@ -34,7 +34,7 @@ public class IngestGeoIpPlugin extends Plugin { return "Plugin that allows to plug in ingest processors"; } - public void onModule(IngestModule ingestModule) { - ingestModule.registerProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); + public void onModule(NodeModule nodeModule) { + nodeModule.registerProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); } } From f8bb06a6640c2afbc159775e04d67c109dc3adf6 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 15 Jan 2016 10:27:42 +0100 Subject: [PATCH 225/347] renamed variables --- .../action/ingest/IngestActionFilterTests.java | 12 ++++++------ .../org/elasticsearch/ingest/IngestClientIT.java | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java index 8f613aeb258..33c8a4b565f 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java @@ -66,9 +66,9 @@ public class IngestActionFilterTests extends ESTestCase { executionService = mock(PipelineExecutionService.class); IngestService ingestService = mock(IngestService.class); when(ingestService.getPipelineExecutionService()).thenReturn(executionService); - NodeService bootstrapper = mock(NodeService.class); - when(bootstrapper.getIngestService()).thenReturn(ingestService); - filter = new IngestActionFilter(Settings.EMPTY, bootstrapper); + NodeService nodeService = mock(NodeService.class); + when(nodeService.getIngestService()).thenReturn(ingestService); + filter = new IngestActionFilter(Settings.EMPTY, nodeService); } public void testApplyNoPipelineId() throws Exception { @@ -175,9 +175,9 @@ public class IngestActionFilterTests extends ESTestCase { executionService = new PipelineExecutionService(store, threadPool); IngestService ingestService = mock(IngestService.class); when(ingestService.getPipelineExecutionService()).thenReturn(executionService); - NodeService bootstrapper = mock(NodeService.class); - when(bootstrapper.getIngestService()).thenReturn(ingestService); - filter = new IngestActionFilter(Settings.EMPTY, bootstrapper); + NodeService nodeService = mock(NodeService.class); + when(nodeService.getIngestService()).thenReturn(ingestService); + filter = new IngestActionFilter(Settings.EMPTY, nodeService); BulkRequest bulkRequest = new BulkRequest(); int numRequest = scaledRandomIntBetween(8, 64); diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 251a070409d..fe60c9e04ba 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -225,8 +225,8 @@ public class IngestClientIT extends ESIntegTestCase { return "ingest mock"; } - public void onModule(NodeModule ingestModule) { - ingestModule.registerProcessor("test", (environment, templateService) -> config -> + public void onModule(NodeModule nodeModule) { + nodeModule.registerProcessor("test", (environment, templateService) -> config -> new TestProcessor("test", ingestDocument -> { ingestDocument.setFieldValue("processed", true); if (ingestDocument.getFieldValue("fail", Boolean.class)) { From d2eda422cfce74b24eb98236474220d9b1ec2371 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 15 Jan 2016 14:25:43 +0100 Subject: [PATCH 226/347] expose node in NodeModule and Environment in Node --- core/src/main/java/org/elasticsearch/node/Node.java | 7 +++++++ core/src/main/java/org/elasticsearch/node/NodeModule.java | 7 +++++++ 2 files changed, 14 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index ce9a3742876..1778867f5a1 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -230,6 +230,13 @@ public class Node implements Releasable { return client; } + /** + * Returns the environment of the node + */ + public Environment getEnvironment() { + return environment; + } + /** * Start the node. If the node is already started, this method is no-op. */ diff --git a/core/src/main/java/org/elasticsearch/node/NodeModule.java b/core/src/main/java/org/elasticsearch/node/NodeModule.java index c1b707bfd0f..8622b14e925 100644 --- a/core/src/main/java/org/elasticsearch/node/NodeModule.java +++ b/core/src/main/java/org/elasticsearch/node/NodeModule.java @@ -99,6 +99,13 @@ public class NodeModule extends AbstractModule { bind(ProcessorsRegistry.class).toInstance(processorsRegistry); } + /** + * Returns the node + */ + public Node getNode() { + return node; + } + /** * Adds a processor factory under a specific type name. */ From dd7cae7c19717429099c98536c0186a0677ed593 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 15 Jan 2016 15:02:35 +0100 Subject: [PATCH 227/347] move DatabaseReaders initialization to IngestGeoIpPlugin#onModule --- .../ingest/geoip/GeoIpProcessor.java | 34 +-------------- .../ingest/geoip/IngestGeoIpPlugin.java | 41 ++++++++++++++++++- .../geoip/GeoIpProcessorFactoryTests.java | 37 +++++++++-------- 3 files changed, 61 insertions(+), 51 deletions(-) diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index ab87d51318b..0ffc3cf3a59 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -37,24 +37,17 @@ import org.elasticsearch.ingest.core.Processor; import java.io.Closeable; import java.io.IOException; -import java.io.InputStream; import java.net.InetAddress; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.PathMatcher; -import java.nio.file.StandardOpenOption; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; -import java.util.stream.Stream; import static org.elasticsearch.ingest.core.ConfigurationUtils.readOptionalList; import static org.elasticsearch.ingest.core.ConfigurationUtils.readStringProperty; @@ -230,31 +223,8 @@ public final class GeoIpProcessor implements Processor { private final Map databaseReaders; - public Factory(Path configDirectory) { - - // TODO(simonw): same as fro grok we should load this outside of the factory in a static method and hass the map to the ctor - Path geoIpConfigDirectory = configDirectory.resolve("ingest-geoip"); - if (Files.exists(geoIpConfigDirectory) == false && Files.isDirectory(geoIpConfigDirectory)) { - throw new IllegalStateException("the geoip directory [" + geoIpConfigDirectory + "] containing databases doesn't exist"); - } - - try (Stream databaseFiles = Files.list(geoIpConfigDirectory)) { - Map databaseReaders = new HashMap<>(); - PathMatcher pathMatcher = geoIpConfigDirectory.getFileSystem().getPathMatcher("glob:**.mmdb"); - // Use iterator instead of forEach otherwise IOException needs to be caught twice... - Iterator iterator = databaseFiles.iterator(); - while (iterator.hasNext()) { - Path databasePath = iterator.next(); - if (Files.isRegularFile(databasePath) && pathMatcher.matches(databasePath)) { - try (InputStream inputStream = Files.newInputStream(databasePath, StandardOpenOption.READ)) { - databaseReaders.put(databasePath.getFileName().toString(), new DatabaseReader.Builder(inputStream).build()); - } - } - } - this.databaseReaders = Collections.unmodifiableMap(databaseReaders); - } catch (IOException e) { - throw new RuntimeException(e); - } + public Factory(Map databaseReaders) { + this.databaseReaders = databaseReaders; } public GeoIpProcessor create(Map config) throws Exception { diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 4b6a60902ea..6fdb8703342 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -19,9 +19,22 @@ package org.elasticsearch.ingest.geoip; +import com.maxmind.geoip2.DatabaseReader; import org.elasticsearch.node.NodeModule; import org.elasticsearch.plugins.Plugin; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.PathMatcher; +import java.nio.file.StandardOpenOption; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.stream.Stream; + public class IngestGeoIpPlugin extends Plugin { @Override @@ -34,7 +47,31 @@ public class IngestGeoIpPlugin extends Plugin { return "Plugin that allows to plug in ingest processors"; } - public void onModule(NodeModule nodeModule) { - nodeModule.registerProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(environment.configFile())); + public void onModule(NodeModule nodeModule) throws IOException { + Path geoIpConfigDirectory = nodeModule.getNode().getEnvironment().configFile().resolve("ingest-geoip"); + Map databaseReaders = loadDatabaseReaders(geoIpConfigDirectory); + nodeModule.registerProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(databaseReaders)); + } + + static Map loadDatabaseReaders(Path geoIpConfigDirectory) throws IOException { + if (Files.exists(geoIpConfigDirectory) == false && Files.isDirectory(geoIpConfigDirectory)) { + throw new IllegalStateException("the geoip directory [" + geoIpConfigDirectory + "] containing databases doesn't exist"); + } + + Map databaseReaders = new HashMap<>(); + try (Stream databaseFiles = Files.list(geoIpConfigDirectory)) { + PathMatcher pathMatcher = geoIpConfigDirectory.getFileSystem().getPathMatcher("glob:**.mmdb"); + // Use iterator instead of forEach otherwise IOException needs to be caught twice... + Iterator iterator = databaseFiles.iterator(); + while (iterator.hasNext()) { + Path databasePath = iterator.next(); + if (Files.isRegularFile(databasePath) && pathMatcher.matches(databasePath)) { + try (InputStream inputStream = Files.newInputStream(databasePath, StandardOpenOption.READ)) { + databaseReaders.put(databasePath.getFileName().toString(), new DatabaseReader.Builder(inputStream).build()); + } + } + } + } + return Collections.unmodifiableMap(databaseReaders); } } diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 78dd86d4fdc..20ffe7fe43a 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -19,11 +19,13 @@ package org.elasticsearch.ingest.geoip; +import com.maxmind.geoip2.DatabaseReader; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; -import org.junit.Before; +import org.junit.BeforeClass; import java.io.ByteArrayInputStream; +import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; @@ -40,19 +42,20 @@ import static org.hamcrest.Matchers.sameInstance; public class GeoIpProcessorFactoryTests extends ESTestCase { - private Path configDir; + private static Map databaseReaders; - @Before - public void prepareConfigDirectory() throws Exception { - this.configDir = createTempDir(); + @BeforeClass + public static void loadDatabaseReaders() throws IOException { + Path configDir = createTempDir(); Path geoIpConfigDir = configDir.resolve("ingest-geoip"); Files.createDirectories(geoIpConfigDir); Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-City.mmdb")), geoIpConfigDir.resolve("GeoLite2-City.mmdb")); Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-Country.mmdb")), geoIpConfigDir.resolve("GeoLite2-Country.mmdb")); + databaseReaders = IngestGeoIpPlugin.loadDatabaseReaders(geoIpConfigDir); } - public void testBuild_defaults() throws Exception { - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(configDir); + public void testBuildDefaults() throws Exception { + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); config.put("source_field", "_field"); @@ -64,8 +67,8 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { assertThat(processor.getFields(), sameInstance(GeoIpProcessor.Factory.DEFAULT_FIELDS)); } - public void testBuild_targetField() throws Exception { - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(configDir); + public void testBuildTargetField() throws Exception { + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); config.put("source_field", "_field"); config.put("target_field", "_field"); @@ -74,8 +77,8 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { assertThat(processor.getTargetField(), equalTo("_field")); } - public void testBuild_dbFile() throws Exception { - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(configDir); + public void testBuildDbFile() throws Exception { + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); config.put("source_field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb"); @@ -85,8 +88,8 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country")); } - public void testBuild_nonExistingDbFile() throws Exception { - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(configDir); + public void testBuildNonExistingDbFile() throws Exception { + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); config.put("source_field", "_field"); @@ -99,8 +102,8 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { } } - public void testBuild_fields() throws Exception { - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(configDir); + public void testBuildFields() throws Exception { + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Set fields = EnumSet.noneOf(GeoIpProcessor.Field.class); List fieldNames = new ArrayList<>(); @@ -118,8 +121,8 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { assertThat(processor.getFields(), equalTo(fields)); } - public void testBuild_illegalFieldOption() throws Exception { - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(configDir); + public void testBuildIllegalFieldOption() throws Exception { + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); config.put("source_field", "_field"); From 050585e89f8a510dcd9bf2749918d84297c839c3 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 15 Jan 2016 15:12:53 +0100 Subject: [PATCH 228/347] remove BiFunction in favour of Function the environment is now available through NodeModule#getNode#getEnvironment and can be retrieved during onModule(NodeModule), no need for this indirection anymore using the BiFunction --- .../elasticsearch/ingest/IngestService.java | 8 ++--- .../elasticsearch/ingest/PipelineStore.java | 13 ++++---- .../ingest/ProcessorsRegistry.java | 11 +++---- .../org/elasticsearch/node/NodeModule.java | 31 +++++++++---------- .../node/service/NodeService.java | 5 +-- .../elasticsearch/ingest/IngestClientIT.java | 2 +- .../ingest/PipelineStoreTests.java | 6 ++-- .../ingest/ProcessorsRegistryTests.java | 17 +++++----- .../ingest/grok/IngestGrokPlugin.java | 2 +- .../ingest/geoip/IngestGeoIpPlugin.java | 2 +- 10 files changed, 42 insertions(+), 55 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestService.java b/core/src/main/java/org/elasticsearch/ingest/IngestService.java index da26903a056..40bc803f08a 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; @@ -34,14 +33,11 @@ import java.io.IOException; */ public class IngestService implements Closeable { - private final Environment environment; private final PipelineStore pipelineStore; private final PipelineExecutionService pipelineExecutionService; private final ProcessorsRegistry processorsRegistry; - public IngestService(Settings settings, ThreadPool threadPool, Environment environment, - ClusterService clusterService, ProcessorsRegistry processorsRegistry) { - this.environment = environment; + public IngestService(Settings settings, ThreadPool threadPool, ClusterService clusterService, ProcessorsRegistry processorsRegistry) { this.processorsRegistry = processorsRegistry; this.pipelineStore = new PipelineStore(settings, clusterService); this.pipelineExecutionService = new PipelineExecutionService(pipelineStore, threadPool); @@ -56,7 +52,7 @@ public class IngestService implements Closeable { } public void setScriptService(ScriptService scriptService) { - pipelineStore.buildProcessorFactoryRegistry(processorsRegistry, environment, scriptService); + pipelineStore.buildProcessorFactoryRegistry(processorsRegistry, scriptService); } @Override diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java index ab11f99246e..6db1d6c0681 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -21,6 +21,8 @@ package org.elasticsearch.ingest; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -32,9 +34,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.env.Environment; -import org.elasticsearch.action.ingest.DeletePipelineRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.ingest.core.TemplateService; @@ -47,7 +46,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.BiFunction; +import java.util.function.Function; public class PipelineStore extends AbstractComponent implements Closeable, ClusterStateListener { @@ -67,11 +66,11 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust clusterService.add(this); } - public void buildProcessorFactoryRegistry(ProcessorsRegistry processorsRegistry, Environment environment, ScriptService scriptService) { + public void buildProcessorFactoryRegistry(ProcessorsRegistry processorsRegistry, ScriptService scriptService) { Map processorFactories = new HashMap<>(); TemplateService templateService = new InternalTemplateService(scriptService); - for (Map.Entry>> entry : processorsRegistry.entrySet()) { - Processor.Factory processorFactory = entry.getValue().apply(environment, templateService); + for (Map.Entry>> entry : processorsRegistry.entrySet()) { + Processor.Factory processorFactory = entry.getValue().apply(templateService); processorFactories.put(entry.getKey(), processorFactory); } this.processorFactoryRegistry = Collections.unmodifiableMap(processorFactories); diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java index 3561d8079c9..766ba772932 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java +++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java @@ -19,30 +19,29 @@ package org.elasticsearch.ingest; -import org.elasticsearch.env.Environment; import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.ingest.core.TemplateService; import java.util.HashMap; import java.util.Map; import java.util.Set; -import java.util.function.BiFunction; +import java.util.function.Function; public class ProcessorsRegistry { - private final Map>> processorFactoryProviders = new HashMap<>(); + private final Map>> processorFactoryProviders = new HashMap<>(); /** * Adds a processor factory under a specific name. */ - public void registerProcessor(String name, BiFunction> processorFactoryProvider) { - BiFunction> provider = processorFactoryProviders.putIfAbsent(name, processorFactoryProvider); + public void registerProcessor(String name, Function> processorFactoryProvider) { + Function> provider = processorFactoryProviders.putIfAbsent(name, processorFactoryProvider); if (provider != null) { throw new IllegalArgumentException("Processor factory already registered for name [" + name + "]"); } } - public Set>>> entrySet() { + public Set>>> entrySet() { return processorFactoryProviders.entrySet(); } } diff --git a/core/src/main/java/org/elasticsearch/node/NodeModule.java b/core/src/main/java/org/elasticsearch/node/NodeModule.java index 8622b14e925..8ef26296fe2 100644 --- a/core/src/main/java/org/elasticsearch/node/NodeModule.java +++ b/core/src/main/java/org/elasticsearch/node/NodeModule.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.env.Environment; import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.ingest.core.TemplateService; @@ -45,7 +44,7 @@ import org.elasticsearch.ingest.processor.UppercaseProcessor; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.node.service.NodeService; -import java.util.function.BiFunction; +import java.util.function.Function; /** * @@ -65,19 +64,19 @@ public class NodeModule extends AbstractModule { this.monitorService = monitorService; this.processorsRegistry = new ProcessorsRegistry(); - registerProcessor(DateProcessor.TYPE, (environment, templateService) -> new DateProcessor.Factory()); - registerProcessor(SetProcessor.TYPE, (environment, templateService) -> new SetProcessor.Factory(templateService)); - registerProcessor(AppendProcessor.TYPE, (environment, templateService) -> new AppendProcessor.Factory(templateService)); - registerProcessor(RenameProcessor.TYPE, (environment, templateService) -> new RenameProcessor.Factory()); - registerProcessor(RemoveProcessor.TYPE, (environment, templateService) -> new RemoveProcessor.Factory(templateService)); - registerProcessor(SplitProcessor.TYPE, (environment, templateService) -> new SplitProcessor.Factory()); - registerProcessor(JoinProcessor.TYPE, (environment, templateService) -> new JoinProcessor.Factory()); - registerProcessor(UppercaseProcessor.TYPE, (environment, templateService) -> new UppercaseProcessor.Factory()); - registerProcessor(LowercaseProcessor.TYPE, (environment, templateService) -> new LowercaseProcessor.Factory()); - registerProcessor(TrimProcessor.TYPE, (environment, templateService) -> new TrimProcessor.Factory()); - registerProcessor(ConvertProcessor.TYPE, (environment, templateService) -> new ConvertProcessor.Factory()); - registerProcessor(GsubProcessor.TYPE, (environment, templateService) -> new GsubProcessor.Factory()); - registerProcessor(FailProcessor.TYPE, (environment, templateService) -> new FailProcessor.Factory(templateService)); + registerProcessor(DateProcessor.TYPE, (templateService) -> new DateProcessor.Factory()); + registerProcessor(SetProcessor.TYPE, SetProcessor.Factory::new); + registerProcessor(AppendProcessor.TYPE, AppendProcessor.Factory::new); + registerProcessor(RenameProcessor.TYPE, (templateService) -> new RenameProcessor.Factory()); + registerProcessor(RemoveProcessor.TYPE, RemoveProcessor.Factory::new); + registerProcessor(SplitProcessor.TYPE, (templateService) -> new SplitProcessor.Factory()); + registerProcessor(JoinProcessor.TYPE, (templateService) -> new JoinProcessor.Factory()); + registerProcessor(UppercaseProcessor.TYPE, (templateService) -> new UppercaseProcessor.Factory()); + registerProcessor(LowercaseProcessor.TYPE, (templateService) -> new LowercaseProcessor.Factory()); + registerProcessor(TrimProcessor.TYPE, (templateService) -> new TrimProcessor.Factory()); + registerProcessor(ConvertProcessor.TYPE, (templateService) -> new ConvertProcessor.Factory()); + registerProcessor(GsubProcessor.TYPE, (templateService) -> new GsubProcessor.Factory()); + registerProcessor(FailProcessor.TYPE, FailProcessor.Factory::new); } @Override @@ -109,7 +108,7 @@ public class NodeModule extends AbstractModule { /** * Adds a processor factory under a specific type name. */ - public void registerProcessor(String type, BiFunction> processorFactoryProvider) { + public void registerProcessor(String type, Function> processorFactoryProvider) { processorsRegistry.registerProcessor(type, processorFactoryProvider); } diff --git a/core/src/main/java/org/elasticsearch/node/service/NodeService.java b/core/src/main/java/org/elasticsearch/node/service/NodeService.java index ecf5cd07e11..15352eeadd2 100644 --- a/core/src/main/java/org/elasticsearch/node/service/NodeService.java +++ b/core/src/main/java/org/elasticsearch/node/service/NodeService.java @@ -35,8 +35,6 @@ import org.elasticsearch.http.HttpServer; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.ingest.IngestService; -import org.elasticsearch.ingest.PipelineExecutionService; -import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.plugins.PluginsService; @@ -44,7 +42,6 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.io.Closeable; import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -89,7 +86,7 @@ public class NodeService extends AbstractComponent { this.version = version; this.pluginService = pluginService; this.circuitBreakerService = circuitBreakerService; - this.ingestService = new IngestService(settings, threadPool, environment, clusterService, processorsRegistry); + this.ingestService = new IngestService(settings, threadPool, clusterService, processorsRegistry); } // can not use constructor injection or there will be a circular dependency diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index fe60c9e04ba..6227b01a30c 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -226,7 +226,7 @@ public class IngestClientIT extends ESIntegTestCase { } public void onModule(NodeModule nodeModule) { - nodeModule.registerProcessor("test", (environment, templateService) -> config -> + nodeModule.registerProcessor("test", (templateService) -> config -> new TestProcessor("test", ingestDocument -> { ingestDocument.setFieldValue("processed", true); if (ingestDocument.getFieldValue("fail", Boolean.class)) { diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index 19d56858c8f..c1f14b26eb8 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -41,8 +41,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.sameInstance; -import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; public class PipelineStoreTests extends ESTestCase { @@ -54,8 +52,8 @@ public class PipelineStoreTests extends ESTestCase { ClusterService clusterService = mock(ClusterService.class); store = new PipelineStore(Settings.EMPTY, clusterService); ProcessorsRegistry registry = new ProcessorsRegistry(); - registry.registerProcessor("set", (environment, templateService) -> new SetProcessor.Factory(TestTemplateService.instance())); - store.buildProcessorFactoryRegistry(registry, null, null); + registry.registerProcessor("set", (templateService) -> new SetProcessor.Factory(TestTemplateService.instance())); + store.buildProcessorFactoryRegistry(registry, null); } public void testUpdatePipelines() { diff --git a/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java b/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java index 2869fffbafc..ad18488d990 100644 --- a/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java @@ -19,14 +19,13 @@ package org.elasticsearch.ingest; -import org.elasticsearch.env.Environment; import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.test.ESTestCase; import java.util.Map; import java.util.Set; -import java.util.function.BiFunction; +import java.util.function.Function; import static org.hamcrest.CoreMatchers.equalTo; @@ -35,24 +34,24 @@ public class ProcessorsRegistryTests extends ESTestCase { public void testAddProcessor() { ProcessorsRegistry processorsRegistry = new ProcessorsRegistry(); TestProcessor.Factory factory1 = new TestProcessor.Factory(); - processorsRegistry.registerProcessor("1", (environment, templateService) -> factory1); + processorsRegistry.registerProcessor("1", (templateService) -> factory1); TestProcessor.Factory factory2 = new TestProcessor.Factory(); - processorsRegistry.registerProcessor("2", (environment, templateService) -> factory2); + processorsRegistry.registerProcessor("2", (templateService) -> factory2); TestProcessor.Factory factory3 = new TestProcessor.Factory(); try { - processorsRegistry.registerProcessor("1", (environment, templateService) -> factory3); + processorsRegistry.registerProcessor("1", (templateService) -> factory3); fail("addProcessor should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("Processor factory already registered for name [1]")); } - Set>>> entrySet = processorsRegistry.entrySet(); + Set>>> entrySet = processorsRegistry.entrySet(); assertThat(entrySet.size(), equalTo(2)); - for (Map.Entry>> entry : entrySet) { + for (Map.Entry>> entry : entrySet) { if (entry.getKey().equals("1")) { - assertThat(entry.getValue().apply(null, null), equalTo(factory1)); + assertThat(entry.getValue().apply(null), equalTo(factory1)); } else if (entry.getKey().equals("2")) { - assertThat(entry.getValue().apply(null, null), equalTo(factory2)); + assertThat(entry.getValue().apply(null), equalTo(factory2)); } else { fail("unexpected processor id [" + entry.getKey() + "]"); } diff --git a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java index 9ca5bc24c9a..54800ac1603 100644 --- a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java +++ b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/IngestGrokPlugin.java @@ -56,7 +56,7 @@ public class IngestGrokPlugin extends Plugin { } public void onModule(NodeModule nodeModule) { - nodeModule.registerProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(builtinPatterns)); + nodeModule.registerProcessor(GrokProcessor.TYPE, (templateService) -> new GrokProcessor.Factory(builtinPatterns)); } static Map loadBuiltinPatterns() throws IOException { diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 6fdb8703342..4b67ffcbd71 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -50,7 +50,7 @@ public class IngestGeoIpPlugin extends Plugin { public void onModule(NodeModule nodeModule) throws IOException { Path geoIpConfigDirectory = nodeModule.getNode().getEnvironment().configFile().resolve("ingest-geoip"); Map databaseReaders = loadDatabaseReaders(geoIpConfigDirectory); - nodeModule.registerProcessor(GeoIpProcessor.TYPE, (environment, templateService) -> new GeoIpProcessor.Factory(databaseReaders)); + nodeModule.registerProcessor(GeoIpProcessor.TYPE, (templateService) -> new GeoIpProcessor.Factory(databaseReaders)); } static Map loadDatabaseReaders(Path geoIpConfigDirectory) throws IOException { From af36e5f01ec741686c8bf13110e7be887376bae6 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 15 Jan 2016 15:14:48 +0100 Subject: [PATCH 229/347] updated ingest-geoip plugin description --- .../java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 4b67ffcbd71..f92cb7b479f 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -44,7 +44,7 @@ public class IngestGeoIpPlugin extends Plugin { @Override public String description() { - return "Plugin that allows to plug in ingest processors"; + return "Ingest processor that adds information about the geographical location of ip addresses"; } public void onModule(NodeModule nodeModule) throws IOException { From 1754eece664d4a47fb4668ce5ef728534e14fa99 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Tue, 12 Jan 2016 16:58:44 -0800 Subject: [PATCH 230/347] introduce DeDotProcessor fixes #15944. --- .../ingest/core/IngestDocument.java | 2 +- .../ingest/processor/DeDotProcessor.java | 102 ++++++++++++++++++ .../org/elasticsearch/node/NodeModule.java | 2 + .../processor/DeDotProcessorFactoryTests.java | 52 +++++++++ .../ingest/processor/DeDotProcessorTests.java | 75 +++++++++++++ docs/plugins/ingest.asciidoc | 14 +++ .../test/ingest/80_dedot_processor.yaml | 33 ++++++ 7 files changed, 279 insertions(+), 1 deletion(-) create mode 100644 core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java create mode 100644 core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java create mode 100644 core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorTests.java create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/ingest/80_dedot_processor.yaml diff --git a/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java b/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java index b5c40e172af..c8f87faa53e 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java @@ -463,7 +463,7 @@ public final class IngestDocument { /** * Returns the document including its metadata fields, unless {@link #extractMetadata()} has been called, in which case the - * metadata fields will not be present anymore. Should be used only for reading. + * metadata fields will not be present anymore. * Modify the document instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)} */ public Map getSourceAndMetadata() { diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java new file mode 100644 index 00000000000..81349bac1cf --- /dev/null +++ b/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java @@ -0,0 +1,102 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor; + +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Processor; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +/** + * Processor that replaces dots in document field names with a + * specified separator. + */ +public class DeDotProcessor implements Processor { + + public static final String TYPE = "dedot"; + static final String DEFAULT_SEPARATOR = "_"; + + private final String separator; + + public DeDotProcessor(String separator) { + this.separator = separator; + } + + public String getSeparator() { + return separator; + } + + @Override + public void execute(IngestDocument document) { + deDot(document.getSourceAndMetadata()); + } + + @Override + public String getType() { + return TYPE; + } + + /** + * Recursively iterates through Maps and Lists in search of map entries with + * keys containing dots. The dots in these fields are replaced with {@link #separator}. + * + * @param obj The current object in context to be checked for dots in its fields. + */ + private void deDot(Object obj) { + if (obj instanceof Map) { + @SuppressWarnings("unchecked") + Map doc = (Map) obj; + Iterator> it = doc.entrySet().iterator(); + Map deDottedFields = new HashMap<>(); + while (it.hasNext()) { + Map.Entry entry = it.next(); + deDot(entry.getValue()); + String fieldName = entry.getKey(); + if (fieldName.contains(".")) { + String deDottedFieldName = fieldName.replaceAll("\\.", separator); + deDottedFields.put(deDottedFieldName, entry.getValue()); + it.remove(); + } + } + doc.putAll(deDottedFields); + } else if (obj instanceof List) { + @SuppressWarnings("unchecked") + List list = (List) obj; + list.forEach(this::deDot); + } + } + + public static class Factory implements Processor.Factory { + + @Override + public DeDotProcessor create(Map config) throws Exception { + String separator = ConfigurationUtils.readOptionalStringProperty(config, "separator"); + if (separator == null) { + separator = DEFAULT_SEPARATOR; + } + return new DeDotProcessor(separator); + } + } +} + diff --git a/core/src/main/java/org/elasticsearch/node/NodeModule.java b/core/src/main/java/org/elasticsearch/node/NodeModule.java index 8ef26296fe2..1844c269754 100644 --- a/core/src/main/java/org/elasticsearch/node/NodeModule.java +++ b/core/src/main/java/org/elasticsearch/node/NodeModule.java @@ -31,6 +31,7 @@ import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.ingest.processor.AppendProcessor; import org.elasticsearch.ingest.processor.ConvertProcessor; import org.elasticsearch.ingest.processor.DateProcessor; +import org.elasticsearch.ingest.processor.DeDotProcessor; import org.elasticsearch.ingest.processor.FailProcessor; import org.elasticsearch.ingest.processor.GsubProcessor; import org.elasticsearch.ingest.processor.JoinProcessor; @@ -77,6 +78,7 @@ public class NodeModule extends AbstractModule { registerProcessor(ConvertProcessor.TYPE, (templateService) -> new ConvertProcessor.Factory()); registerProcessor(GsubProcessor.TYPE, (templateService) -> new GsubProcessor.Factory()); registerProcessor(FailProcessor.TYPE, FailProcessor.Factory::new); + registerProcessor(DeDotProcessor.TYPE, (templateService) -> new DeDotProcessor.Factory()); } @Override diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java new file mode 100644 index 00000000000..620958b142c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor; + +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class DeDotProcessorFactoryTests extends ESTestCase { + + private DeDotProcessor.Factory factory; + + @Before + public void init() { + factory = new DeDotProcessor.Factory(); + } + + public void testCreate() throws Exception { + Map config = new HashMap<>(); + config.put("separator", "_"); + DeDotProcessor deDotProcessor = factory.create(config); + assertThat(deDotProcessor.getSeparator(), equalTo("_")); + } + + public void testCreateMissingSeparatorField() throws Exception { + Map config = new HashMap<>(); + DeDotProcessor deDotProcessor = factory.create(config); + assertThat(deDotProcessor.getSeparator(), equalTo(DeDotProcessor.DEFAULT_SEPARATOR)); + } + +} diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorTests.java new file mode 100644 index 00000000000..be6426ede36 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorTests.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor; + +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class DeDotProcessorTests extends ESTestCase { + + public void testSimple() throws Exception { + Map source = new HashMap<>(); + source.put("a.b", "hello world!"); + IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); + String separator = randomUnicodeOfCodepointLengthBetween(1, 10); + Processor processor = new DeDotProcessor(separator); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSourceAndMetadata().get("a" + separator + "b" ), equalTo("hello world!")); + } + + public void testSimpleMap() throws Exception { + Map source = new HashMap<>(); + Map subField = new HashMap<>(); + subField.put("b.c", "hello world!"); + source.put("a", subField); + IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); + Processor processor = new DeDotProcessor("_"); + processor.execute(ingestDocument); + + IngestDocument expectedDocument = new IngestDocument( + Collections.singletonMap("a", Collections.singletonMap("b_c", "hello world!")), + Collections.emptyMap()); + assertThat(ingestDocument, equalTo(expectedDocument)); + } + + public void testSimpleList() throws Exception { + Map source = new HashMap<>(); + Map subField = new HashMap<>(); + subField.put("b.c", "hello world!"); + source.put("a", Arrays.asList(subField)); + IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); + Processor processor = new DeDotProcessor("_"); + processor.execute(ingestDocument); + + IngestDocument expectedDocument = new IngestDocument( + Collections.singletonMap("a", + Collections.singletonList(Collections.singletonMap("b_c", "hello world!"))), + Collections.emptyMap()); + assertThat(ingestDocument, equalTo(expectedDocument)); + } +} diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index ea42932e561..906479412e7 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -463,6 +463,20 @@ to the requester. } -------------------------------------------------- +==== DeDot Processor +The DeDot Processor is used to remove dots (".") from field names and +replace them with a specific `separator` string. + +[source,js] +-------------------------------------------------- +{ + "dedot": { + "separator": "_" + } +} +-------------------------------------------------- + + === Accessing data in pipelines Processors in pipelines have read and write access to documents that pass through the pipeline. diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/80_dedot_processor.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/80_dedot_processor.yaml new file mode 100644 index 00000000000..8aedb4099d8 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/80_dedot_processor.yaml @@ -0,0 +1,33 @@ +--- +"Test De-Dot Processor": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "dedot" : { + "separator" : "3" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {"a.b.c": "hello world"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.a3b3c: "hello world" } + From 9f48df9736655af2038a41af4f210e9336121f9e Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Wed, 6 Jan 2016 16:46:37 -0800 Subject: [PATCH 231/347] Add on_failure support for verbose _simulate execution and introduce optional processor_tag to Processors --- .../ingest/SimulateExecutionService.java | 39 ++++-- .../ingest/SimulateProcessorResult.java | 24 ++-- .../ingest/core/AbstractProcessorFactory.java | 39 ++++++ .../ingest/core/CompoundProcessor.java | 9 +- .../elasticsearch/ingest/core/Pipeline.java | 14 +- .../elasticsearch/ingest/core/Processor.java | 6 +- .../processor/AbstractStringProcessor.java | 19 ++- .../ingest/processor/AppendProcessor.java | 16 ++- .../ingest/processor/ConvertProcessor.java | 16 ++- .../ingest/processor/DateProcessor.java | 16 ++- .../ingest/processor/DeDotProcessor.java | 16 ++- .../ingest/processor/FailProcessor.java | 16 ++- .../ingest/processor/GsubProcessor.java | 16 ++- .../ingest/processor/JoinProcessor.java | 16 ++- .../ingest/processor/LowercaseProcessor.java | 8 +- .../ingest/processor/RemoveProcessor.java | 15 ++- .../ingest/processor/RenameProcessor.java | 16 ++- .../ingest/processor/SetProcessor.java | 16 ++- .../ingest/processor/SplitProcessor.java | 16 ++- .../ingest/processor/TrimProcessor.java | 8 +- .../ingest/processor/UppercaseProcessor.java | 8 +- .../ingest/IngestActionFilterTests.java | 5 + .../SimulateDocumentSimpleResultTests.java | 1 - .../ingest/SimulateExecutionServiceTests.java | 100 ++++++++++++-- .../SimulatePipelineRequestParsingTests.java | 1 - .../ingest/SimulatePipelineResponseTests.java | 7 +- .../ingest/SimulateProcessorResultTests.java | 3 +- .../elasticsearch/ingest/IngestClientIT.java | 4 +- .../ingest/core/CompoundProcessorTests.java | 6 +- .../ingest/core/PipelineFactoryTests.java | 15 ++- .../AppendProcessorFactoryTests.java | 9 +- .../processor/AppendProcessorTests.java | 2 +- .../ConvertProcessorFactoryTests.java | 3 + .../processor/ConvertProcessorTests.java | 28 ++-- .../processor/DateProcessorFactoryTests.java | 4 +- .../ingest/processor/DateProcessorTests.java | 14 +- .../processor/DeDotProcessorFactoryTests.java | 3 + .../ingest/processor/DeDotProcessorTests.java | 6 +- .../processor/FailProcessorFactoryTests.java | 3 + .../ingest/processor/FailProcessorTests.java | 2 +- .../processor/GsubProcessorFactoryTests.java | 3 + .../ingest/processor/GsubProcessorTests.java | 8 +- .../processor/JoinProcessorFactoryTests.java | 3 + .../ingest/processor/JoinProcessorTests.java | 10 +- .../LowercaseProcessorFactoryTests.java | 3 + .../processor/LowercaseProcessorTests.java | 2 +- .../RemoveProcessorFactoryTests.java | 3 + .../processor/RemoveProcessorTests.java | 4 +- .../RenameProcessorFactoryTests.java | 3 + .../processor/RenameProcessorTests.java | 16 +-- .../processor/SetProcessorFactoryTests.java | 3 + .../ingest/processor/SetProcessorTests.java | 2 +- .../processor/SplitProcessorFactoryTests.java | 3 + .../ingest/processor/SplitProcessorTests.java | 8 +- .../processor/TrimProcessorFactoryTests.java | 3 + .../ingest/processor/TrimProcessorTests.java | 2 +- .../UppercaseProcessorFactoryTests.java | 3 + .../processor/UppercaseProcessorTests.java | 2 +- .../ingest/grok/GrokProcessor.java | 17 ++- .../grok/GrokProcessorFactoryTests.java | 3 + .../ingest/grok/GrokProcessorTests.java | 10 +- .../ingest/geoip/GeoIpProcessor.java | 17 ++- .../geoip/GeoIpProcessorFactoryTests.java | 4 + .../ingest/geoip/GeoIpProcessorTests.java | 8 +- .../test/ingest/40_simulate.yaml | 122 ++++++++++++++++-- .../elasticsearch/ingest/TestProcessor.java | 18 ++- 66 files changed, 631 insertions(+), 214 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/ingest/core/AbstractProcessorFactory.java diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java index 99dd1358cda..85c76cf18ae 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.ingest.core.CompoundProcessor; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; @@ -39,20 +40,38 @@ class SimulateExecutionService { this.threadPool = threadPool; } + void executeVerboseDocument(Processor processor, IngestDocument ingestDocument, List processorResultList) throws Exception { + if (processor instanceof CompoundProcessor) { + CompoundProcessor cp = (CompoundProcessor) processor; + try { + for (Processor p : cp.getProcessors()) { + executeVerboseDocument(p, ingestDocument, processorResultList); + } + } catch (Exception e) { + for (Processor p : cp.getOnFailureProcessors()) { + executeVerboseDocument(p, ingestDocument, processorResultList); + } + } + } else { + try { + processor.execute(ingestDocument); + processorResultList.add(new SimulateProcessorResult(processor.getTag(), new IngestDocument(ingestDocument))); + } catch (Exception e) { + processorResultList.add(new SimulateProcessorResult(processor.getTag(), e)); + throw e; + } + } + } + SimulateDocumentResult executeDocument(Pipeline pipeline, IngestDocument ingestDocument, boolean verbose) { if (verbose) { List processorResultList = new ArrayList<>(); IngestDocument currentIngestDocument = new IngestDocument(ingestDocument); - for (int i = 0; i < pipeline.getProcessors().size(); i++) { - Processor processor = pipeline.getProcessors().get(i); - String processorId = "processor[" + processor.getType() + "]-" + i; - try { - processor.execute(currentIngestDocument); - processorResultList.add(new SimulateProcessorResult(processorId, currentIngestDocument)); - } catch (Exception e) { - processorResultList.add(new SimulateProcessorResult(processorId, e)); - } - currentIngestDocument = new IngestDocument(currentIngestDocument); + CompoundProcessor pipelineProcessor = new CompoundProcessor(pipeline.getProcessors(), pipeline.getOnFailureProcessors()); + try { + executeVerboseDocument(pipelineProcessor, currentIngestDocument, processorResultList); + } catch (Exception e) { + return new SimulateDocumentSimpleResult(e); } return new SimulateDocumentVerboseResult(processorResultList); } else { diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java index 25680152049..664925b3640 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java @@ -24,19 +24,17 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.ingest.core.IngestDocument; import java.io.IOException; -import java.util.Collections; public class SimulateProcessorResult implements Writeable, ToXContent { - private final String processorId; + private final String processorTag; private final WriteableIngestDocument ingestDocument; private final Exception failure; public SimulateProcessorResult(StreamInput in) throws IOException { - this.processorId = in.readString(); + this.processorTag = in.readString(); if (in.readBoolean()) { this.failure = in.readThrowable(); this.ingestDocument = null; @@ -46,14 +44,14 @@ public class SimulateProcessorResult implements Writeable implements Processor.Factory

{ + static final String PROCESSOR_TAG_KEY = "processor_tag"; + + @Override + public P create(Map config) throws Exception { + String tag = ConfigurationUtils.readOptionalStringProperty(config, PROCESSOR_TAG_KEY); + return doCreate(tag, config); + } + + protected abstract P doCreate(String tag, Map config) throws Exception; +} diff --git a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java index f25e13637c4..bad94de6c2b 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java @@ -24,6 +24,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.stream.Collectors; /** @@ -42,6 +43,7 @@ public class CompoundProcessor implements Processor { } public CompoundProcessor(List processors, List onFailureProcessors) { + super(); this.processors = processors; this.onFailureProcessors = onFailureProcessors; } @@ -56,7 +58,12 @@ public class CompoundProcessor implements Processor { @Override public String getType() { - return "compound[" + processors.stream().map(Processor::getType).collect(Collectors.joining(",")) + "]"; + return "compound"; + } + + @Override + public String getTag() { + return "compound-processor-" + Objects.hash(processors, onFailureProcessors); } @Override diff --git a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java index ebf570710e5..68ba8da4855 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java @@ -94,17 +94,17 @@ public final class Pipeline { } private List readProcessors(String fieldName, Map processorRegistry, Map config) throws Exception { - List>> onFailureProcessorConfigs = ConfigurationUtils.readOptionalList(config, fieldName); - List onFailureProcessors = new ArrayList<>(); - if (onFailureProcessorConfigs != null) { - for (Map> processorConfigWithKey : onFailureProcessorConfigs) { + List>> processorConfigs = ConfigurationUtils.readOptionalList(config, fieldName); + List processors = new ArrayList<>(); + if (processorConfigs != null) { + for (Map> processorConfigWithKey : processorConfigs) { for (Map.Entry> entry : processorConfigWithKey.entrySet()) { - onFailureProcessors.add(readProcessor(processorRegistry, entry.getKey(), entry.getValue())); + processors.add(readProcessor(processorRegistry, entry.getKey(), entry.getValue())); } } } - return onFailureProcessors; + return processors; } private Processor readProcessor(Map processorRegistry, String type, Map config) throws Exception { @@ -122,7 +122,5 @@ public final class Pipeline { } throw new IllegalArgumentException("No processor type exists with name [" + type + "]"); } - - } } diff --git a/core/src/main/java/org/elasticsearch/ingest/core/Processor.java b/core/src/main/java/org/elasticsearch/ingest/core/Processor.java index 9c29894fa5f..f178051b751 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/Processor.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/Processor.java @@ -38,6 +38,11 @@ public interface Processor { */ String getType(); + /** + * Gets the tag of a processor. + */ + String getTag(); + /** * A factory that knows how to construct a processor based on a map of maps. */ @@ -50,6 +55,5 @@ public interface Processor { * verify if all configurations settings have been used. */ P create(Map config) throws Exception; - } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java index 8fb73cff896..c5dbef4b363 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Processor; @@ -31,9 +32,11 @@ import java.util.Map; */ public abstract class AbstractStringProcessor implements Processor { + private final String processorTag; private final String field; - protected AbstractStringProcessor(String field) { + protected AbstractStringProcessor(String processorTag, String field) { + this.processorTag = processorTag; this.field = field; } @@ -50,15 +53,21 @@ public abstract class AbstractStringProcessor implements Processor { document.setFieldValue(field, process(val)); } + @Override + public String getTag() { + return processorTag; + } + protected abstract String process(String value); - public static abstract class Factory implements Processor.Factory { + public static abstract class Factory extends AbstractProcessorFactory { + @Override - public T create(Map config) throws Exception { + public T doCreate(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(config, "field"); - return newProcessor(field); + return newProcessor(processorTag, field); } - protected abstract T newProcessor(String field); + protected abstract T newProcessor(String processorTag, String field); } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java index 108cc5d40d0..5917d5c5da2 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.ingest.core.ValueSource; @@ -36,10 +37,12 @@ public class AppendProcessor implements Processor { public static final String TYPE = "append"; + private final String processorTag; private final TemplateService.Template field; private final ValueSource value; - AppendProcessor(TemplateService.Template field, ValueSource value) { + AppendProcessor(String processorTag, TemplateService.Template field, ValueSource value) { + this.processorTag = processorTag; this.field = field; this.value = value; } @@ -62,7 +65,12 @@ public class AppendProcessor implements Processor { return TYPE; } - public static final class Factory implements Processor.Factory { + @Override + public String getTag() { + return processorTag; + } + + public static final class Factory extends AbstractProcessorFactory { private final TemplateService templateService; @@ -71,10 +79,10 @@ public class AppendProcessor implements Processor { } @Override - public AppendProcessor create(Map config) throws Exception { + public AppendProcessor doCreate(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(config, "field"); Object value = ConfigurationUtils.readObject(config, "value"); - return new AppendProcessor(templateService.compile(field), ValueSource.wrap(value, templateService)); + return new AppendProcessor(processorTag, templateService.compile(field), ValueSource.wrap(value, templateService)); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java index c7f260c2e3d..58f5bc57d16 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.Processor; @@ -90,10 +91,12 @@ public class ConvertProcessor implements Processor { public static final String TYPE = "convert"; + private final String processorTag; private final String field; private final Type convertType; - ConvertProcessor(String field, Type convertType) { + ConvertProcessor(String processorTag, String field, Type convertType) { + this.processorTag = processorTag; this.field = field; this.convertType = convertType; } @@ -132,12 +135,17 @@ public class ConvertProcessor implements Processor { return TYPE; } - public static class Factory implements Processor.Factory { + @Override + public String getTag() { + return processorTag; + } + + public static class Factory extends AbstractProcessorFactory { @Override - public ConvertProcessor create(Map config) throws Exception { + public ConvertProcessor doCreate(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(config, "field"); Type convertType = Type.fromString(ConfigurationUtils.readStringProperty(config, "type")); - return new ConvertProcessor(field, convertType); + return new ConvertProcessor(processorTag, field, convertType); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java index 46a6e92fedf..45f05aab7e0 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.Processor; @@ -39,6 +40,7 @@ public final class DateProcessor implements Processor { public static final String TYPE = "date"; static final String DEFAULT_TARGET_FIELD = "@timestamp"; + private final String processorTag; private final DateTimeZone timezone; private final Locale locale; private final String matchField; @@ -46,7 +48,8 @@ public final class DateProcessor implements Processor { private final List matchFormats; private final List> dateParsers; - DateProcessor(DateTimeZone timezone, Locale locale, String matchField, List matchFormats, String targetField) { + DateProcessor(String processorTag, DateTimeZone timezone, Locale locale, String matchField, List matchFormats, String targetField) { + this.processorTag = processorTag; this.timezone = timezone; this.locale = locale; this.matchField = matchField; @@ -93,6 +96,11 @@ public final class DateProcessor implements Processor { return TYPE; } + @Override + public String getTag() { + return processorTag; + } + DateTimeZone getTimezone() { return timezone; } @@ -113,10 +121,10 @@ public final class DateProcessor implements Processor { return matchFormats; } - public static class Factory implements Processor.Factory { + public static class Factory extends AbstractProcessorFactory { @SuppressWarnings("unchecked") - public DateProcessor create(Map config) throws Exception { + public DateProcessor doCreate(String processorTag, Map config) throws Exception { String matchField = ConfigurationUtils.readStringProperty(config, "match_field"); String targetField = ConfigurationUtils.readStringProperty(config, "target_field", DEFAULT_TARGET_FIELD); String timezoneString = ConfigurationUtils.readOptionalStringProperty(config, "timezone"); @@ -131,7 +139,7 @@ public final class DateProcessor implements Processor { } } List matchFormats = ConfigurationUtils.readList(config, "match_formats"); - return new DateProcessor(timezone, locale, matchField, matchFormats, targetField); + return new DateProcessor(processorTag, timezone, locale, matchField, matchFormats, targetField); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java index 81349bac1cf..12cc3654967 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Processor; @@ -37,9 +38,11 @@ public class DeDotProcessor implements Processor { public static final String TYPE = "dedot"; static final String DEFAULT_SEPARATOR = "_"; + private final String processorTag; private final String separator; - public DeDotProcessor(String separator) { + public DeDotProcessor(String processorTag, String separator) { + this.processorTag = processorTag; this.separator = separator; } @@ -57,6 +60,11 @@ public class DeDotProcessor implements Processor { return TYPE; } + @Override + public String getTag() { + return processorTag; + } + /** * Recursively iterates through Maps and Lists in search of map entries with * keys containing dots. The dots in these fields are replaced with {@link #separator}. @@ -87,15 +95,15 @@ public class DeDotProcessor implements Processor { } } - public static class Factory implements Processor.Factory { + public static class Factory extends AbstractProcessorFactory { @Override - public DeDotProcessor create(Map config) throws Exception { + public DeDotProcessor doCreate(String processorTag, Map config) throws Exception { String separator = ConfigurationUtils.readOptionalStringProperty(config, "separator"); if (separator == null) { separator = DEFAULT_SEPARATOR; } - return new DeDotProcessor(separator); + return new DeDotProcessor(processorTag, separator); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java index 574e41fedb7..1af1a5f9a62 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Processor; @@ -34,9 +35,11 @@ public class FailProcessor implements Processor { public static final String TYPE = "fail"; + private final String processorTag; private final TemplateService.Template message; - FailProcessor(TemplateService.Template message) { + FailProcessor(String processorTag, TemplateService.Template message) { + this.processorTag = processorTag; this.message = message; } @@ -54,7 +57,12 @@ public class FailProcessor implements Processor { return TYPE; } - public static class Factory implements Processor.Factory { + @Override + public String getTag() { + return processorTag; + } + + public static class Factory extends AbstractProcessorFactory { private final TemplateService templateService; @@ -63,9 +71,9 @@ public class FailProcessor implements Processor { } @Override - public FailProcessor create(Map config) throws Exception { + public FailProcessor doCreate(String processorTag, Map config) throws Exception { String message = ConfigurationUtils.readStringProperty(config, "message"); - return new FailProcessor(templateService.compile(message)); + return new FailProcessor(processorTag, templateService.compile(message)); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java index c201729eb6d..763aefead8b 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.Processor; @@ -35,11 +36,13 @@ public class GsubProcessor implements Processor { public static final String TYPE = "gsub"; + private final String processorTag; private final String field; private final Pattern pattern; private final String replacement; - GsubProcessor(String field, Pattern pattern, String replacement) { + GsubProcessor(String processorTag, String field, Pattern pattern, String replacement) { + this.processorTag = processorTag; this.field = field; this.pattern = pattern; this.replacement = replacement; @@ -74,14 +77,19 @@ public class GsubProcessor implements Processor { return TYPE; } - public static class Factory implements Processor.Factory { + @Override + public String getTag() { + return processorTag; + } + + public static class Factory extends AbstractProcessorFactory { @Override - public GsubProcessor create(Map config) throws Exception { + public GsubProcessor doCreate(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(config, "field"); String pattern = ConfigurationUtils.readStringProperty(config, "pattern"); String replacement = ConfigurationUtils.readStringProperty(config, "replacement"); Pattern searchPattern = Pattern.compile(pattern); - return new GsubProcessor(field, searchPattern, replacement); + return new GsubProcessor(processorTag, field, searchPattern, replacement); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java index 08ac9d1939c..a2f50867c0d 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.Processor; @@ -35,10 +36,12 @@ public class JoinProcessor implements Processor { public static final String TYPE = "join"; + private final String processorTag; private final String field; private final String separator; - JoinProcessor(String field, String separator) { + JoinProcessor(String processorTag, String field, String separator) { + this.processorTag = processorTag; this.field = field; this.separator = separator; } @@ -68,12 +71,17 @@ public class JoinProcessor implements Processor { return TYPE; } - public static class Factory implements Processor.Factory { + @Override + public String getTag() { + return processorTag; + } + + public static class Factory extends AbstractProcessorFactory { @Override - public JoinProcessor create(Map config) throws Exception { + public JoinProcessor doCreate(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(config, "field"); String separator = ConfigurationUtils.readStringProperty(config, "separator"); - return new JoinProcessor(field, separator); + return new JoinProcessor(processorTag, field, separator); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java index e3e49f77c59..f03d0b1a2c3 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java @@ -30,8 +30,8 @@ public class LowercaseProcessor extends AbstractStringProcessor { public static final String TYPE = "lowercase"; - LowercaseProcessor(String field) { - super(field); + LowercaseProcessor(String processorId, String field) { + super(processorId, field); } @Override @@ -46,8 +46,8 @@ public class LowercaseProcessor extends AbstractStringProcessor { public static class Factory extends AbstractStringProcessor.Factory { @Override - protected LowercaseProcessor newProcessor(String field) { - return new LowercaseProcessor(field); + protected LowercaseProcessor newProcessor(String processorTag, String field) { + return new LowercaseProcessor(processorTag, field); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java index a3c5f761b65..1a9ad7323bb 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.ingest.core.ConfigurationUtils; @@ -33,9 +34,11 @@ public class RemoveProcessor implements Processor { public static final String TYPE = "remove"; + private final String processorTag; private final TemplateService.Template field; - RemoveProcessor(TemplateService.Template field) { + RemoveProcessor(String processorTag, TemplateService.Template field) { + this.processorTag = processorTag; this.field = field; } @@ -53,7 +56,11 @@ public class RemoveProcessor implements Processor { return TYPE; } - public static class Factory implements Processor.Factory { + @Override + public String getTag() { + return processorTag; + } + public static class Factory extends AbstractProcessorFactory { private final TemplateService templateService; @@ -62,9 +69,9 @@ public class RemoveProcessor implements Processor { } @Override - public RemoveProcessor create(Map config) throws Exception { + public RemoveProcessor doCreate(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(config, "field"); - return new RemoveProcessor(templateService.compile(field)); + return new RemoveProcessor(processorTag, templateService.compile(field)); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java index 5a9e4d5d40c..5528d2f5210 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.Processor; @@ -32,10 +33,12 @@ public class RenameProcessor implements Processor { public static final String TYPE = "rename"; + private final String processorTag; private final String oldFieldName; private final String newFieldName; - RenameProcessor(String oldFieldName, String newFieldName) { + RenameProcessor(String processorTag, String oldFieldName, String newFieldName) { + this.processorTag = processorTag; this.oldFieldName = oldFieldName; this.newFieldName = newFieldName; } @@ -73,12 +76,17 @@ public class RenameProcessor implements Processor { return TYPE; } - public static class Factory implements Processor.Factory { + @Override + public String getTag() { + return processorTag; + } + + public static class Factory extends AbstractProcessorFactory { @Override - public RenameProcessor create(Map config) throws Exception { + public RenameProcessor doCreate(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(config, "field"); String newField = ConfigurationUtils.readStringProperty(config, "to"); - return new RenameProcessor(field, newField); + return new RenameProcessor(processorTag, field, newField); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java index a43e60587de..8f706bd8893 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.ingest.core.ValueSource; @@ -35,10 +36,12 @@ public class SetProcessor implements Processor { public static final String TYPE = "set"; + private final String processorTag; private final TemplateService.Template field; private final ValueSource value; - SetProcessor(TemplateService.Template field, ValueSource value) { + SetProcessor(String processorTag, TemplateService.Template field, ValueSource value) { + this.processorTag = processorTag; this.field = field; this.value = value; } @@ -61,7 +64,12 @@ public class SetProcessor implements Processor { return TYPE; } - public static final class Factory implements Processor.Factory { + @Override + public String getTag() { + return processorTag; + } + + public static final class Factory extends AbstractProcessorFactory { private final TemplateService templateService; @@ -70,10 +78,10 @@ public class SetProcessor implements Processor { } @Override - public SetProcessor create(Map config) throws Exception { + public SetProcessor doCreate(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(config, "field"); Object value = ConfigurationUtils.readObject(config, "value"); - return new SetProcessor(templateService.compile(field), ValueSource.wrap(value, templateService)); + return new SetProcessor(processorTag, templateService.compile(field), ValueSource.wrap(value, templateService)); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java index b1d9c23eb54..a0f62769802 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.Processor; @@ -35,10 +36,12 @@ public class SplitProcessor implements Processor { public static final String TYPE = "split"; + private final String processorTag; private final String field; private final String separator; - SplitProcessor(String field, String separator) { + SplitProcessor(String processorTag, String field, String separator) { + this.processorTag = processorTag; this.field = field; this.separator = separator; } @@ -65,11 +68,16 @@ public class SplitProcessor implements Processor { return TYPE; } - public static class Factory implements Processor.Factory { + @Override + public String getTag() { + return processorTag; + } + + public static class Factory extends AbstractProcessorFactory { @Override - public SplitProcessor create(Map config) throws Exception { + public SplitProcessor doCreate(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(config, "field"); - return new SplitProcessor(field, ConfigurationUtils.readStringProperty(config, "separator")); + return new SplitProcessor(processorTag, field, ConfigurationUtils.readStringProperty(config, "separator")); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java index d1325510cd9..a9591f92ebe 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java @@ -27,8 +27,8 @@ public class TrimProcessor extends AbstractStringProcessor { public static final String TYPE = "trim"; - TrimProcessor(String field) { - super(field); + TrimProcessor(String processorId, String field) { + super(processorId, field); } @Override @@ -43,8 +43,8 @@ public class TrimProcessor extends AbstractStringProcessor { public static class Factory extends AbstractStringProcessor.Factory { @Override - protected TrimProcessor newProcessor(String field) { - return new TrimProcessor(field); + protected TrimProcessor newProcessor(String processorTag, String field) { + return new TrimProcessor(processorTag, field); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java index 0f1757eaeec..600365e5afe 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java @@ -29,8 +29,8 @@ public class UppercaseProcessor extends AbstractStringProcessor { public static final String TYPE = "uppercase"; - UppercaseProcessor(String field) { - super(field); + UppercaseProcessor(String processorTag, String field) { + super(processorTag, field); } @Override @@ -45,8 +45,8 @@ public class UppercaseProcessor extends AbstractStringProcessor { public static class Factory extends AbstractStringProcessor.Factory { @Override - protected UppercaseProcessor newProcessor(String field) { - return new UppercaseProcessor(field); + protected UppercaseProcessor newProcessor(String processorTag, String field) { + return new UppercaseProcessor(processorTag, field); } } } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java index 33c8a4b565f..8c8b4ba35ff 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java @@ -170,6 +170,11 @@ public class IngestActionFilterTests extends ESTestCase { public String getType() { return null; } + + @Override + public String getTag() { + return null; + } }; when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor))); executionService = new PipelineExecutionService(store, threadPool); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java index dc8f7fcb7b9..bc66c64ccab 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.action.ingest.SimulateDocumentSimpleResult; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.ingest.core.IngestDocument; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java index e292b45c47a..11cc0a0f6d0 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java @@ -19,14 +19,11 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.action.ingest.SimulateDocumentResult; -import org.elasticsearch.action.ingest.SimulateDocumentSimpleResult; -import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult; -import org.elasticsearch.action.ingest.SimulateExecutionService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TestProcessor; import org.elasticsearch.ingest.core.CompoundProcessor; +import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.test.ESTestCase; @@ -34,6 +31,11 @@ import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; @@ -44,6 +46,8 @@ public class SimulateExecutionServiceTests extends ESTestCase { private ThreadPool threadPool; private SimulateExecutionService executionService; + private Pipeline pipeline; + private Processor processor; private IngestDocument ingestDocument; @Before @@ -54,6 +58,8 @@ public class SimulateExecutionServiceTests extends ESTestCase { .build() ); executionService = new SimulateExecutionService(threadPool); + processor = new TestProcessor("id", "mock", ingestDocument -> {}); + pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor)); ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); } @@ -62,21 +68,89 @@ public class SimulateExecutionServiceTests extends ESTestCase { threadPool.shutdown(); } + public void testExecuteVerboseDocumentSimple() throws Exception { + List processorResultList = new ArrayList<>(); + executionService.executeVerboseDocument(processor, ingestDocument, processorResultList); + SimulateProcessorResult result = new SimulateProcessorResult("id", ingestDocument); + assertThat(processorResultList.size(), equalTo(1)); + assertThat(processorResultList.get(0).getProcessorTag(), equalTo(result.getProcessorTag())); + assertThat(processorResultList.get(0).getIngestDocument(), equalTo(result.getIngestDocument())); + assertThat(processorResultList.get(0).getFailure(), nullValue()); + } + + public void testExecuteVerboseDocumentSimpleException() throws Exception { + RuntimeException exception = new RuntimeException("mock_exception"); + TestProcessor processor = new TestProcessor("id", "mock", ingestDocument -> { throw exception; }); + List processorResultList = new ArrayList<>(); + try { + executionService.executeVerboseDocument(processor, ingestDocument, processorResultList); + fail("should throw exception"); + } catch (RuntimeException e) { + assertThat(e.getMessage(), equalTo("mock_exception")); + } + SimulateProcessorResult result = new SimulateProcessorResult("id", exception); + assertThat(processorResultList.size(), equalTo(1)); + assertThat(processorResultList.get(0).getProcessorTag(), equalTo(result.getProcessorTag())); + assertThat(processorResultList.get(0).getFailure(), equalTo(result.getFailure())); + } + + public void testExecuteVerboseDocumentCompoundSuccess() throws Exception { + TestProcessor processor1 = new TestProcessor("p1", "mock", ingestDocument -> { }); + TestProcessor processor2 = new TestProcessor("p2", "mock", ingestDocument -> { }); + + Processor compoundProcessor = new CompoundProcessor(processor1, processor2); + List processorResultList = new ArrayList<>(); + executionService.executeVerboseDocument(compoundProcessor, ingestDocument, processorResultList); + assertThat(processor1.getInvokedCounter(), equalTo(1)); + assertThat(processor2.getInvokedCounter(), equalTo(1)); + assertThat(processorResultList.size(), equalTo(2)); + assertThat(processorResultList.get(0).getProcessorTag(), equalTo("p1")); + assertThat(processorResultList.get(0).getIngestDocument(), equalTo(ingestDocument)); + assertThat(processorResultList.get(0).getFailure(), nullValue()); + assertThat(processorResultList.get(1).getProcessorTag(), equalTo("p2")); + assertThat(processorResultList.get(1).getIngestDocument(), equalTo(ingestDocument)); + assertThat(processorResultList.get(1).getFailure(), nullValue()); + } + + public void testExecuteVerboseDocumentCompoundOnFailure() throws Exception { + TestProcessor processor1 = new TestProcessor("p1", "mock", ingestDocument -> { }); + TestProcessor processor2 = new TestProcessor("p2", "mock", ingestDocument -> { throw new RuntimeException("p2_exception"); }); + TestProcessor onFailureProcessor1 = new TestProcessor("fail_p1", "mock", ingestDocument -> { }); + TestProcessor onFailureProcessor2 = new TestProcessor("fail_p2", "mock", ingestDocument -> { throw new RuntimeException("fail_p2_exception"); }); + TestProcessor onFailureProcessor3 = new TestProcessor("fail_p3", "mock", ingestDocument -> { }); + CompoundProcessor onFailureCompoundProcessor = new CompoundProcessor(Collections.singletonList(onFailureProcessor2), Collections.singletonList(onFailureProcessor3)); + + Processor compoundProcessor = new CompoundProcessor(Arrays.asList(processor1, processor2), Arrays.asList(onFailureProcessor1, onFailureCompoundProcessor)); + List processorResultList = new ArrayList<>(); + executionService.executeVerboseDocument(compoundProcessor, ingestDocument, processorResultList); + assertThat(processor1.getInvokedCounter(), equalTo(1)); + assertThat(processor2.getInvokedCounter(), equalTo(1)); + assertThat(onFailureProcessor1.getInvokedCounter(), equalTo(1)); + assertThat(onFailureProcessor2.getInvokedCounter(), equalTo(1)); + assertThat(onFailureProcessor3.getInvokedCounter(), equalTo(1)); + assertThat(processorResultList.size(), equalTo(5)); + assertThat(processorResultList.get(0).getProcessorTag(), equalTo("p1")); + assertThat(processorResultList.get(1).getProcessorTag(), equalTo("p2")); + assertThat(processorResultList.get(2).getProcessorTag(), equalTo("fail_p1")); + assertThat(processorResultList.get(3).getProcessorTag(), equalTo("fail_p2")); + assertThat(processorResultList.get(4).getProcessorTag(), equalTo("fail_p3")); + } + public void testExecuteVerboseItem() throws Exception { - TestProcessor processor = new TestProcessor("mock", ingestDocument -> {}); + TestProcessor processor = new TestProcessor("test-id", "mock", ingestDocument -> {}); Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor)); SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); assertThat(processor.getInvokedCounter(), equalTo(2)); assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(2)); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getProcessorId(), equalTo("processor[mock]-0")); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getProcessorTag(), equalTo("test-id")); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), not(sameInstance(ingestDocument))); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), equalTo(ingestDocument)); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument().getSourceAndMetadata(), not(sameInstance(ingestDocument.getSourceAndMetadata()))); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(), nullValue()); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorId(), equalTo("processor[mock]-1")); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorTag(), equalTo("test-id")); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), not(sameInstance(ingestDocument))); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), equalTo(ingestDocument)); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument().getSourceAndMetadata(), not(sameInstance(ingestDocument.getSourceAndMetadata()))); @@ -86,7 +160,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { } public void testExecuteItem() throws Exception { - TestProcessor processor = new TestProcessor("mock", ingestDocument -> {}); + TestProcessor processor = new TestProcessor("processor_0", "mock", ingestDocument -> {}); Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor)); SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, false); assertThat(processor.getInvokedCounter(), equalTo(2)); @@ -97,21 +171,21 @@ public class SimulateExecutionServiceTests extends ESTestCase { } public void testExecuteVerboseItemWithFailure() throws Exception { - TestProcessor processor1 = new TestProcessor("mock", ingestDocument -> { throw new RuntimeException("processor failed"); }); - TestProcessor processor2 = new TestProcessor("mock", ingestDocument -> {}); - Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor1, processor2)); + TestProcessor processor1 = new TestProcessor("processor_0", "mock", ingestDocument -> { throw new RuntimeException("processor failed"); }); + TestProcessor processor2 = new TestProcessor("processor_1", "mock", ingestDocument -> {}); + Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(Collections.singletonList(processor1), Collections.singletonList(processor2))); SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); assertThat(processor1.getInvokedCounter(), equalTo(1)); assertThat(processor2.getInvokedCounter(), equalTo(1)); assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(2)); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getProcessorId(), equalTo("processor[mock]-0")); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getProcessorTag(), equalTo("processor_0")); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), nullValue()); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(), instanceOf(RuntimeException.class)); RuntimeException runtimeException = (RuntimeException) simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(); assertThat(runtimeException.getMessage(), equalTo("processor failed")); - assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorId(), equalTo("processor[mock]-1")); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorTag(), equalTo("processor_1")); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), not(sameInstance(ingestDocument))); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), equalTo(ingestDocument)); assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getFailure(), nullValue()); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java index eabc1821d2a..c0e7d6921ac 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.ingest.TestProcessor; import org.elasticsearch.ingest.core.CompoundProcessor; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java index 905baa86485..c82c42d6209 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java @@ -19,11 +19,6 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.action.ingest.SimulateDocumentResult; -import org.elasticsearch.action.ingest.SimulateDocumentSimpleResult; -import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult; -import org.elasticsearch.action.ingest.SimulatePipelineResponse; -import org.elasticsearch.action.ingest.SimulateProcessorResult; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.ingest.core.IngestDocument; @@ -94,7 +89,7 @@ public class SimulatePipelineResponseTests extends ESTestCase { Iterator expectedProcessorResultIterator = expectedSimulateDocumentVerboseResult.getProcessorResults().iterator(); for (SimulateProcessorResult simulateProcessorResult : simulateDocumentVerboseResult.getProcessorResults()) { SimulateProcessorResult expectedProcessorResult = expectedProcessorResultIterator.next(); - assertThat(simulateProcessorResult.getProcessorId(), equalTo(expectedProcessorResult.getProcessorId())); + assertThat(simulateProcessorResult.getProcessorTag(), equalTo(expectedProcessorResult.getProcessorTag())); assertThat(simulateProcessorResult.getIngestDocument(), equalTo(expectedProcessorResult.getIngestDocument())); if (expectedProcessorResult.getFailure() == null) { assertThat(simulateProcessorResult.getFailure(), nullValue()); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java index 999e7ee6650..ace5d58c104 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.action.ingest.SimulateProcessorResult; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.ingest.core.IngestDocument; @@ -48,7 +47,7 @@ public class SimulateProcessorResultTests extends ESTestCase { simulateProcessorResult.writeTo(out); StreamInput streamInput = StreamInput.wrap(out.bytes()); SimulateProcessorResult otherSimulateProcessorResult = new SimulateProcessorResult(streamInput); - assertThat(otherSimulateProcessorResult.getProcessorId(), equalTo(simulateProcessorResult.getProcessorId())); + assertThat(otherSimulateProcessorResult.getProcessorTag(), equalTo(simulateProcessorResult.getProcessorTag())); assertThat(otherSimulateProcessorResult.getIngestDocument(), equalTo(simulateProcessorResult.getIngestDocument())); if (isFailure) { assertThat(otherSimulateProcessorResult.getFailure(), instanceOf(IllegalArgumentException.class)); diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 6227b01a30c..e8203b52285 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -226,8 +226,8 @@ public class IngestClientIT extends ESIntegTestCase { } public void onModule(NodeModule nodeModule) { - nodeModule.registerProcessor("test", (templateService) -> config -> - new TestProcessor("test", ingestDocument -> { + nodeModule.registerProcessor("test", templateService -> config -> + new TestProcessor("id", "test", ingestDocument -> { ingestDocument.setFieldValue("processed", true); if (ingestDocument.getFieldValue("fail", Boolean.class)) { throw new IllegalArgumentException("test processor failed"); diff --git a/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java index 6cc38e12536..f19500ab186 100644 --- a/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java @@ -71,7 +71,7 @@ public class CompoundProcessorTests extends ESTestCase { } public void testSingleProcessorWithOnFailureProcessor() throws Exception { - TestProcessor processor1 = new TestProcessor("first", ingestDocument -> {throw new RuntimeException("error");}); + TestProcessor processor1 = new TestProcessor("id", "first", ingestDocument -> {throw new RuntimeException("error");}); TestProcessor processor2 = new TestProcessor(ingestDocument -> { Map ingestMetadata = ingestDocument.getIngestMetadata(); assertThat(ingestMetadata.size(), equalTo(2)); @@ -87,8 +87,8 @@ public class CompoundProcessorTests extends ESTestCase { } public void testSingleProcessorWithNestedFailures() throws Exception { - TestProcessor processor = new TestProcessor("first", ingestDocument -> {throw new RuntimeException("error");}); - TestProcessor processorToFail = new TestProcessor("second", ingestDocument -> { + TestProcessor processor = new TestProcessor("id", "first", ingestDocument -> {throw new RuntimeException("error");}); + TestProcessor processorToFail = new TestProcessor("id", "second", ingestDocument -> { Map ingestMetadata = ingestDocument.getIngestMetadata(); assertThat(ingestMetadata.size(), equalTo(2)); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); diff --git a/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java index 153dc30019e..f195f8ef045 100644 --- a/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java @@ -24,26 +24,33 @@ import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.nullValue; public class PipelineFactoryTests extends ESTestCase { public void testCreate() throws Exception { - Map processorConfig = new HashMap<>(); + Map processorConfig0 = new HashMap<>(); + Map processorConfig1 = new HashMap<>(); + processorConfig0.put(AbstractProcessorFactory.PROCESSOR_TAG_KEY, "first-processor"); Map pipelineConfig = new HashMap<>(); pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); - pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig))); + pipelineConfig.put(Pipeline.PROCESSORS_KEY, Arrays.asList(Collections.singletonMap("test", processorConfig0), Collections.singletonMap("test", processorConfig1))); Pipeline.Factory factory = new Pipeline.Factory(); Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); assertThat(pipeline.getId(), equalTo("_id")); assertThat(pipeline.getDescription(), equalTo("_description")); - assertThat(pipeline.getProcessors().size(), equalTo(1)); + assertThat(pipeline.getProcessors().size(), equalTo(2)); assertThat(pipeline.getProcessors().get(0).getType(), equalTo("test-processor")); + assertThat(pipeline.getProcessors().get(0).getTag(), equalTo("first-processor")); + assertThat(pipeline.getProcessors().get(1).getType(), equalTo("test-processor")); + assertThat(pipeline.getProcessors().get(1).getTag(), nullValue()); } public void testCreateWithPipelineOnFailure() throws Exception { @@ -91,6 +98,6 @@ public class PipelineFactoryTests extends ESTestCase { assertThat(pipeline.getId(), equalTo("_id")); assertThat(pipeline.getDescription(), equalTo("_description")); assertThat(pipeline.getProcessors().size(), equalTo(1)); - assertThat(pipeline.getProcessors().get(0).getType(), equalTo("compound[test-processor]")); + assertThat(pipeline.getProcessors().get(0).getType(), equalTo("compound")); } } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java index 09ce465c002..e236abe12f0 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java @@ -49,9 +49,12 @@ public class AppendProcessorFactoryTests extends ESTestCase { value = Arrays.asList("value1", "value2", "value3"); } config.put("value", value); - AppendProcessor setProcessor = factory.create(config); - assertThat(setProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); - assertThat(setProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo(value)); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); + AppendProcessor appendProcessor = factory.create(config); + assertThat(appendProcessor.getTag(), equalTo(processorTag)); + assertThat(appendProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); + assertThat(appendProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo(value)); } public void testCreateNoFieldPresent() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java index 7853709240b..4a78ba621ce 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java @@ -158,7 +158,7 @@ public class AppendProcessorTests extends ESTestCase { private static Processor createAppendProcessor(String fieldName, Object fieldValue) { TemplateService templateService = TestTemplateService.instance(); - return new AppendProcessor(templateService.compile(fieldName), ValueSource.wrap(fieldValue, templateService)); + return new AppendProcessor(randomAsciiOfLength(10), templateService.compile(fieldName), ValueSource.wrap(fieldValue, templateService)); } private enum Scalar { diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java index 4feb20d41c4..280be970044 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java @@ -36,7 +36,10 @@ public class ConvertProcessorFactoryTests extends ESTestCase { ConvertProcessor.Type type = randomFrom(ConvertProcessor.Type.values()); config.put("field", "field1"); config.put("type", type.toString()); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); ConvertProcessor convertProcessor = factory.create(config); + assertThat(convertProcessor.getTag(), equalTo(processorTag)); assertThat(convertProcessor.getField(), equalTo("field1")); assertThat(convertProcessor.getConvertType(), equalTo(type)); } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java index 040cac4851f..1350ebab601 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java @@ -41,7 +41,7 @@ public class ConvertProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int randomInt = randomInt(); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, randomInt); - Processor processor = new ConvertProcessor(fieldName, Type.INTEGER); + Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.INTEGER); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, Integer.class), equalTo(randomInt)); } @@ -57,7 +57,7 @@ public class ConvertProcessorTests extends ESTestCase { expectedList.add(randomInt); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new ConvertProcessor(fieldName, Type.INTEGER); + Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.INTEGER); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList)); } @@ -68,7 +68,7 @@ public class ConvertProcessorTests extends ESTestCase { String value = "string-" + randomAsciiOfLengthBetween(1, 10); ingestDocument.setFieldValue(fieldName, value); - Processor processor = new ConvertProcessor(fieldName, Type.INTEGER); + Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.INTEGER); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -84,7 +84,7 @@ public class ConvertProcessorTests extends ESTestCase { String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, randomFloat); expectedResult.put(fieldName, randomFloat); - Processor processor = new ConvertProcessor(fieldName, Type.FLOAT); + Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.FLOAT); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, Float.class), equalTo(randomFloat)); } @@ -100,7 +100,7 @@ public class ConvertProcessorTests extends ESTestCase { expectedList.add(randomFloat); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new ConvertProcessor(fieldName, Type.FLOAT); + Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.FLOAT); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList)); } @@ -111,7 +111,7 @@ public class ConvertProcessorTests extends ESTestCase { String value = "string-" + randomAsciiOfLengthBetween(1, 10); ingestDocument.setFieldValue(fieldName, value); - Processor processor = new ConvertProcessor(fieldName, Type.FLOAT); + Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.FLOAT); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -122,8 +122,6 @@ public class ConvertProcessorTests extends ESTestCase { public void testConvertBoolean() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - Map fields = new HashMap<>(); - Map expectedResult = new HashMap<>(); boolean randomBoolean = randomBoolean(); String booleanString = Boolean.toString(randomBoolean); if (randomBoolean) { @@ -131,7 +129,7 @@ public class ConvertProcessorTests extends ESTestCase { } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, booleanString); - Processor processor = new ConvertProcessor(fieldName, Type.BOOLEAN); + Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.BOOLEAN); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, Boolean.class), equalTo(randomBoolean)); } @@ -151,7 +149,7 @@ public class ConvertProcessorTests extends ESTestCase { expectedList.add(randomBoolean); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new ConvertProcessor(fieldName, Type.BOOLEAN); + Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.BOOLEAN); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList)); } @@ -168,7 +166,7 @@ public class ConvertProcessorTests extends ESTestCase { } ingestDocument.setFieldValue(fieldName, fieldValue); - Processor processor = new ConvertProcessor(fieldName, Type.BOOLEAN); + Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.BOOLEAN); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -202,7 +200,7 @@ public class ConvertProcessorTests extends ESTestCase { } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new ConvertProcessor(fieldName, Type.STRING); + Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.STRING); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedFieldValue)); } @@ -238,7 +236,7 @@ public class ConvertProcessorTests extends ESTestCase { expectedList.add(randomValueString); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new ConvertProcessor(fieldName, Type.STRING); + Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.STRING); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList)); } @@ -247,7 +245,7 @@ public class ConvertProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); Type type = randomFrom(Type.values()); - Processor processor = new ConvertProcessor(fieldName, type); + Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, type); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -259,7 +257,7 @@ public class ConvertProcessorTests extends ESTestCase { public void testConvertNullField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); Type type = randomFrom(Type.values()); - Processor processor = new ConvertProcessor("field", type); + Processor processor = new ConvertProcessor(randomAsciiOfLength(10), "field", type); try { processor.execute(ingestDocument); fail("processor execute should have failed"); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java index 708b164ebd6..41b9e0f9258 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java @@ -41,8 +41,10 @@ public class DateProcessorFactoryTests extends ESTestCase { String sourceField = randomAsciiOfLengthBetween(1, 10); config.put("match_field", sourceField); config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); - + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); DateProcessor processor = factory.create(config); + assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getMatchField(), equalTo(sourceField)); assertThat(processor.getTargetField(), equalTo(DateProcessor.DEFAULT_TARGET_FIELD)); assertThat(processor.getMatchFormats(), equalTo(Collections.singletonList("dd/MM/yyyyy"))); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java index f7aba42d549..ff108e61bfc 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java @@ -38,7 +38,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class DateProcessorTests extends ESTestCase { public void testJodaPattern() { - DateProcessor dateProcessor = new DateProcessor(DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, + DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, "date_as_string", Collections.singletonList("yyyy dd MM hh:mm:ss"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 06 11:05:15"); @@ -52,7 +52,7 @@ public class DateProcessorTests extends ESTestCase { matchFormats.add("yyyy dd MM"); matchFormats.add("dd/MM/yyyy"); matchFormats.add("dd-MM-yyyy"); - DateProcessor dateProcessor = new DateProcessor(DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, + DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, "date_as_string", matchFormats, "date_as_date"); Map document = new HashMap<>(); @@ -85,7 +85,7 @@ public class DateProcessorTests extends ESTestCase { } public void testJodaPatternLocale() { - DateProcessor dateProcessor = new DateProcessor(DateTimeZone.forID("Europe/Amsterdam"), Locale.ITALIAN, + DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ITALIAN, "date_as_string", Collections.singletonList("yyyy dd MMM"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 giugno"); @@ -95,7 +95,7 @@ public class DateProcessorTests extends ESTestCase { } public void testJodaPatternDefaultYear() { - DateProcessor dateProcessor = new DateProcessor(DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, + DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, "date_as_string", Collections.singletonList("dd/MM"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "12/06"); @@ -105,7 +105,7 @@ public class DateProcessorTests extends ESTestCase { } public void testTAI64N() { - DateProcessor dateProcessor = new DateProcessor(DateTimeZone.forOffsetHours(2), randomLocale(random()), + DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forOffsetHours(2), randomLocale(random()), "date_as_string", Collections.singletonList(DateFormat.Tai64n.toString()), "date_as_date"); Map document = new HashMap<>(); String dateAsString = (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024"; @@ -116,7 +116,7 @@ public class DateProcessorTests extends ESTestCase { } public void testUnixMs() { - DateProcessor dateProcessor = new DateProcessor(DateTimeZone.UTC, randomLocale(random()), + DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.UTC, randomLocale(random()), "date_as_string", Collections.singletonList(DateFormat.UnixMs.toString()), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "1000500"); @@ -126,7 +126,7 @@ public class DateProcessorTests extends ESTestCase { } public void testUnix() { - DateProcessor dateProcessor = new DateProcessor(DateTimeZone.UTC, randomLocale(random()), + DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.UTC, randomLocale(random()), "date_as_string", Collections.singletonList(DateFormat.Unix.toString()), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "1000.5"); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java index 620958b142c..bca16060be1 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java @@ -39,8 +39,11 @@ public class DeDotProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { Map config = new HashMap<>(); config.put("separator", "_"); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); DeDotProcessor deDotProcessor = factory.create(config); assertThat(deDotProcessor.getSeparator(), equalTo("_")); + assertThat(deDotProcessor.getTag(), equalTo(processorTag)); } public void testCreateMissingSeparatorField() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorTests.java index be6426ede36..a0c87d7a16b 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorTests.java @@ -37,7 +37,7 @@ public class DeDotProcessorTests extends ESTestCase { source.put("a.b", "hello world!"); IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); String separator = randomUnicodeOfCodepointLengthBetween(1, 10); - Processor processor = new DeDotProcessor(separator); + Processor processor = new DeDotProcessor(randomAsciiOfLength(10), separator); processor.execute(ingestDocument); assertThat(ingestDocument.getSourceAndMetadata().get("a" + separator + "b" ), equalTo("hello world!")); } @@ -48,7 +48,7 @@ public class DeDotProcessorTests extends ESTestCase { subField.put("b.c", "hello world!"); source.put("a", subField); IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); - Processor processor = new DeDotProcessor("_"); + Processor processor = new DeDotProcessor(randomAsciiOfLength(10), "_"); processor.execute(ingestDocument); IngestDocument expectedDocument = new IngestDocument( @@ -63,7 +63,7 @@ public class DeDotProcessorTests extends ESTestCase { subField.put("b.c", "hello world!"); source.put("a", Arrays.asList(subField)); IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); - Processor processor = new DeDotProcessor("_"); + Processor processor = new DeDotProcessor(randomAsciiOfLength(10), "_"); processor.execute(ingestDocument); IngestDocument expectedDocument = new IngestDocument( diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java index 4793cb62c25..55ad338efca 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java @@ -42,7 +42,10 @@ public class FailProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { Map config = new HashMap<>(); config.put("message", "error"); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); FailProcessor failProcessor = factory.create(config); + assertThat(failProcessor.getTag(), equalTo(processorTag)); assertThat(failProcessor.getMessage().execute(Collections.emptyMap()), equalTo("error")); } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java index 0ee3068d367..3fdc2073c09 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java @@ -32,7 +32,7 @@ public class FailProcessorTests extends ESTestCase { public void test() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String message = randomAsciiOfLength(10); - Processor processor = new FailProcessor(new TestTemplateService.MockTemplate(message)); + Processor processor = new FailProcessor(randomAsciiOfLength(10), new TestTemplateService.MockTemplate(message)); try { processor.execute(ingestDocument); fail("fail processor should throw an exception"); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java index 7a7377b9045..9ce74293ed3 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java @@ -35,7 +35,10 @@ public class GsubProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("pattern", "\\."); config.put("replacement", "-"); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); GsubProcessor gsubProcessor = factory.create(config); + assertThat(gsubProcessor.getTag(), equalTo(processorTag)); assertThat(gsubProcessor.getField(), equalTo("field1")); assertThat(gsubProcessor.getPattern().toString(), equalTo("\\.")); assertThat(gsubProcessor.getReplacement(), equalTo("-")); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java index 9c7a9bd721c..fe44f33b614 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java @@ -36,7 +36,7 @@ public class GsubProcessorTests extends ESTestCase { public void testGsub() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "127.0.0.1"); - Processor processor = new GsubProcessor(fieldName, Pattern.compile("\\."), "-"); + Processor processor = new GsubProcessor(randomAsciiOfLength(10), fieldName, Pattern.compile("\\."), "-"); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo("127-0-0-1")); } @@ -45,7 +45,7 @@ public class GsubProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, 123); - Processor processor = new GsubProcessor(fieldName, Pattern.compile("\\."), "-"); + Processor processor = new GsubProcessor(randomAsciiOfLength(10), fieldName, Pattern.compile("\\."), "-"); try { processor.execute(ingestDocument); fail("processor execution should have failed"); @@ -57,7 +57,7 @@ public class GsubProcessorTests extends ESTestCase { public void testGsubFieldNotFound() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new GsubProcessor(fieldName, Pattern.compile("\\."), "-"); + Processor processor = new GsubProcessor(randomAsciiOfLength(10), fieldName, Pattern.compile("\\."), "-"); try { processor.execute(ingestDocument); fail("processor execution should have failed"); @@ -68,7 +68,7 @@ public class GsubProcessorTests extends ESTestCase { public void testGsubNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); - Processor processor = new GsubProcessor("field", Pattern.compile("\\."), "-"); + Processor processor = new GsubProcessor(randomAsciiOfLength(10), "field", Pattern.compile("\\."), "-"); try { processor.execute(ingestDocument); fail("processor execution should have failed"); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java index 2d7bee118f5..fffeca51770 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java @@ -33,7 +33,10 @@ public class JoinProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); config.put("separator", "-"); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); JoinProcessor joinProcessor = factory.create(config); + assertThat(joinProcessor.getTag(), equalTo(processorTag)); assertThat(joinProcessor.getField(), equalTo("field1")); assertThat(joinProcessor.getSeparator(), equalTo("-")); } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java index cbd4dd66143..2aa3ac2e035 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java @@ -51,7 +51,7 @@ public class JoinProcessorTests extends ESTestCase { } } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new JoinProcessor(fieldName, separator); + Processor processor = new JoinProcessor(randomAsciiOfLength(10), fieldName, separator); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedResult)); } @@ -71,7 +71,7 @@ public class JoinProcessorTests extends ESTestCase { } } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new JoinProcessor(fieldName, separator); + Processor processor = new JoinProcessor(randomAsciiOfLength(10), fieldName, separator); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedResult)); } @@ -80,7 +80,7 @@ public class JoinProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, randomAsciiOfLengthBetween(1, 10)); - Processor processor = new JoinProcessor(fieldName, "-"); + Processor processor = new JoinProcessor(randomAsciiOfLength(10), fieldName, "-"); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { @@ -91,7 +91,7 @@ public class JoinProcessorTests extends ESTestCase { public void testJoinNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new JoinProcessor(fieldName, "-"); + Processor processor = new JoinProcessor(randomAsciiOfLength(10), fieldName, "-"); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { @@ -101,7 +101,7 @@ public class JoinProcessorTests extends ESTestCase { public void testJoinNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); - Processor processor = new JoinProcessor("field", "-"); + Processor processor = new JoinProcessor(randomAsciiOfLength(10), "field", "-"); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java index 478bb70b6fe..c22ea6641ea 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java @@ -32,7 +32,10 @@ public class LowercaseProcessorFactoryTests extends ESTestCase { LowercaseProcessor.Factory factory = new LowercaseProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "field1"); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); LowercaseProcessor uppercaseProcessor = factory.create(config); + assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java index 898e431faff..77e22b0ca1e 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java @@ -24,7 +24,7 @@ import java.util.Locale; public class LowercaseProcessorTests extends AbstractStringProcessorTestCase { @Override protected AbstractStringProcessor newProcessor(String field) { - return new LowercaseProcessor(field); + return new LowercaseProcessor(randomAsciiOfLength(10), field); } @Override diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java index 6c0e899f6c8..6522b565b21 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java @@ -41,7 +41,10 @@ public class RemoveProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { Map config = new HashMap<>(); config.put("field", "field1"); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); RemoveProcessor removeProcessor = factory.create(config); + assertThat(removeProcessor.getTag(), equalTo(processorTag)); assertThat(removeProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java index 891dc57ffc6..d134b0213eb 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java @@ -35,7 +35,7 @@ public class RemoveProcessorTests extends ESTestCase { public void testRemoveFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String field = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); - Processor processor = new RemoveProcessor(new TestTemplateService.MockTemplate(field)); + Processor processor = new RemoveProcessor(randomAsciiOfLength(10), new TestTemplateService.MockTemplate(field)); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(field), equalTo(false)); } @@ -43,7 +43,7 @@ public class RemoveProcessorTests extends ESTestCase { public void testRemoveNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new RemoveProcessor(new TestTemplateService.MockTemplate(fieldName)); + Processor processor = new RemoveProcessor(randomAsciiOfLength(10), new TestTemplateService.MockTemplate(fieldName)); try { processor.execute(ingestDocument); fail("remove field should have failed"); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java index e0298a927d6..0a5d709f06d 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java @@ -33,7 +33,10 @@ public class RenameProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "old_field"); config.put("to", "new_field"); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); RenameProcessor renameProcessor = factory.create(config); + assertThat(renameProcessor.getTag(), equalTo(processorTag)); assertThat(renameProcessor.getOldFieldName(), equalTo("old_field")); assertThat(renameProcessor.getNewFieldName(), equalTo("new_field")); } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java index ae354e893d7..1f9bddaac6b 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java @@ -44,7 +44,7 @@ public class RenameProcessorTests extends ESTestCase { do { newFieldName = RandomDocumentPicks.randomFieldName(random()); } while (RandomDocumentPicks.canAddField(newFieldName, ingestDocument) == false || newFieldName.equals(fieldName)); - Processor processor = new RenameProcessor(fieldName, newFieldName); + Processor processor = new RenameProcessor(randomAsciiOfLength(10), fieldName, newFieldName); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(newFieldName, Object.class), equalTo(fieldValue)); } @@ -62,7 +62,7 @@ public class RenameProcessorTests extends ESTestCase { document.put("one", one); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - Processor processor = new RenameProcessor("list.0", "item"); + Processor processor = new RenameProcessor(randomAsciiOfLength(10), "list.0", "item"); processor.execute(ingestDocument); Object actualObject = ingestDocument.getSourceAndMetadata().get("list"); assertThat(actualObject, instanceOf(List.class)); @@ -75,7 +75,7 @@ public class RenameProcessorTests extends ESTestCase { assertThat(actualObject, instanceOf(String.class)); assertThat(actualObject, equalTo("item1")); - processor = new RenameProcessor("list.0", "list.3"); + processor = new RenameProcessor(randomAsciiOfLength(10), "list.0", "list.3"); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -90,7 +90,7 @@ public class RenameProcessorTests extends ESTestCase { public void testRenameNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new RenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random())); + Processor processor = new RenameProcessor(randomAsciiOfLength(10), fieldName, RandomDocumentPicks.randomFieldName(random())); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -102,7 +102,7 @@ public class RenameProcessorTests extends ESTestCase { public void testRenameNewFieldAlreadyExists() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); - Processor processor = new RenameProcessor(RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument), fieldName); + Processor processor = new RenameProcessor(randomAsciiOfLength(10), RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument), fieldName); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -116,7 +116,7 @@ public class RenameProcessorTests extends ESTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, null); String newFieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new RenameProcessor(fieldName, newFieldName); + Processor processor = new RenameProcessor(randomAsciiOfLength(10), fieldName, newFieldName); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(fieldName), equalTo(false)); assertThat(ingestDocument.hasField(newFieldName), equalTo(true)); @@ -136,7 +136,7 @@ public class RenameProcessorTests extends ESTestCase { source.put("list", Collections.singletonList("item")); IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); - Processor processor = new RenameProcessor("list", "new_field"); + Processor processor = new RenameProcessor(randomAsciiOfLength(10), "list", "new_field"); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -160,7 +160,7 @@ public class RenameProcessorTests extends ESTestCase { source.put("list", Collections.singletonList("item")); IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); - Processor processor = new RenameProcessor("list", "new_field"); + Processor processor = new RenameProcessor(randomAsciiOfLength(10), "list", "new_field"); try { processor.execute(ingestDocument); fail("processor execute should have failed"); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java index 22ece637739..ffc98379955 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java @@ -42,7 +42,10 @@ public class SetProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); config.put("value", "value1"); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); SetProcessor setProcessor = factory.create(config); + assertThat(setProcessor.getTag(), equalTo(processorTag)); assertThat(setProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); assertThat(setProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo("value1")); } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java index b66cc24202a..283825cdad8 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java @@ -78,6 +78,6 @@ public class SetProcessorTests extends ESTestCase { private static Processor createSetProcessor(String fieldName, Object fieldValue) { TemplateService templateService = TestTemplateService.instance(); - return new SetProcessor(templateService.compile(fieldName), ValueSource.wrap(fieldValue, templateService)); + return new SetProcessor(randomAsciiOfLength(10), templateService.compile(fieldName), ValueSource.wrap(fieldValue, templateService)); } } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java index d9cf2a0fdd8..9954336030a 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java @@ -33,7 +33,10 @@ public class SplitProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); config.put("separator", "\\."); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); SplitProcessor splitProcessor = factory.create(config); + assertThat(splitProcessor.getTag(), equalTo(processorTag)); assertThat(splitProcessor.getField(), equalTo("field1")); assertThat(splitProcessor.getSeparator(), equalTo("\\.")); } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java index d5a587fbd41..dbf164c89aa 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java @@ -37,7 +37,7 @@ public class SplitProcessorTests extends ESTestCase { public void testSplit() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "127.0.0.1"); - Processor processor = new SplitProcessor(fieldName, "\\."); + Processor processor = new SplitProcessor(randomAsciiOfLength(10), fieldName, "\\."); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(Arrays.asList("127", "0", "0", "1"))); } @@ -45,7 +45,7 @@ public class SplitProcessorTests extends ESTestCase { public void testSplitFieldNotFound() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new SplitProcessor(fieldName, "\\."); + Processor processor = new SplitProcessor(randomAsciiOfLength(10), fieldName, "\\."); try { processor.execute(ingestDocument); fail("split processor should have failed"); @@ -56,7 +56,7 @@ public class SplitProcessorTests extends ESTestCase { public void testSplitNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); - Processor processor = new SplitProcessor("field", "\\."); + Processor processor = new SplitProcessor(randomAsciiOfLength(10), "field", "\\."); try { processor.execute(ingestDocument); fail("split processor should have failed"); @@ -69,7 +69,7 @@ public class SplitProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, randomInt()); - Processor processor = new SplitProcessor(fieldName, "\\."); + Processor processor = new SplitProcessor(randomAsciiOfLength(10), fieldName, "\\."); try { processor.execute(ingestDocument); fail("split processor should have failed"); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java index f7bcf832973..cb5e1ed838f 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java @@ -32,7 +32,10 @@ public class TrimProcessorFactoryTests extends ESTestCase { TrimProcessor.Factory factory = new TrimProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "field1"); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); TrimProcessor uppercaseProcessor = factory.create(config); + assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java index 265040ead19..a0e5fdeab2b 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java @@ -23,7 +23,7 @@ public class TrimProcessorTests extends AbstractStringProcessorTestCase { @Override protected AbstractStringProcessor newProcessor(String field) { - return new TrimProcessor(field); + return new TrimProcessor(randomAsciiOfLength(10), field); } @Override diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java index c683e8ebbc2..a3569bd2ee0 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java @@ -32,7 +32,10 @@ public class UppercaseProcessorFactoryTests extends ESTestCase { UppercaseProcessor.Factory factory = new UppercaseProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "field1"); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); UppercaseProcessor uppercaseProcessor = factory.create(config); + assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java index 343fff1b588..4ab61f7b5e3 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java @@ -25,7 +25,7 @@ public class UppercaseProcessorTests extends AbstractStringProcessorTestCase { @Override protected AbstractStringProcessor newProcessor(String field) { - return new UppercaseProcessor(field); + return new UppercaseProcessor(randomAsciiOfLength(10), field); } @Override diff --git a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java index 56c04f09709..3a40f9fa0df 100644 --- a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java +++ b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.grok; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Processor; @@ -30,10 +31,12 @@ public final class GrokProcessor implements Processor { public static final String TYPE = "grok"; + private final String processorTag; private final String matchField; private final Grok grok; - public GrokProcessor(Grok grok, String matchField) { + public GrokProcessor(String processorTag, Grok grok, String matchField) { + this.processorTag = processorTag; this.matchField = matchField; this.grok = grok; } @@ -54,6 +57,11 @@ public final class GrokProcessor implements Processor { return TYPE; } + @Override + public String getTag() { + return processorTag; + } + String getMatchField() { return matchField; } @@ -62,7 +70,7 @@ public final class GrokProcessor implements Processor { return grok; } - public final static class Factory implements Processor.Factory { + public final static class Factory extends AbstractProcessorFactory { private final Map builtinPatterns; @@ -70,7 +78,8 @@ public final class GrokProcessor implements Processor { this.builtinPatterns = builtinPatterns; } - public GrokProcessor create(Map config) throws Exception { + @Override + public GrokProcessor doCreate(String processorTag, Map config) throws Exception { String matchField = ConfigurationUtils.readStringProperty(config, "field"); String matchPattern = ConfigurationUtils.readStringProperty(config, "pattern"); Map customPatternBank = ConfigurationUtils.readOptionalMap(config, "pattern_definitions"); @@ -80,7 +89,7 @@ public final class GrokProcessor implements Processor { } Grok grok = new Grok(patternBank, matchPattern); - return new GrokProcessor(grok, matchField); + return new GrokProcessor(processorTag, grok, matchField); } } diff --git a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java index 419e48874cd..9eb3100729d 100644 --- a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java +++ b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java @@ -36,7 +36,10 @@ public class GrokProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("pattern", "(?\\w+)"); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); GrokProcessor processor = factory.create(config); + assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getMatchField(), equalTo("_field")); assertThat(processor.getGrok(), notNullValue()); } diff --git a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorTests.java b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorTests.java index 9fe203ca396..840cf954022 100644 --- a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorTests.java +++ b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorTests.java @@ -38,7 +38,7 @@ public class GrokProcessorTests extends ESTestCase { IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, "1"); Grok grok = new Grok(Collections.singletonMap("ONE", "1"), "%{ONE:one}"); - GrokProcessor processor = new GrokProcessor(grok, fieldName); + GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), grok, fieldName); processor.execute(doc); assertThat(doc.getFieldValue("one", String.class), equalTo("1")); } @@ -48,7 +48,7 @@ public class GrokProcessorTests extends ESTestCase { IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, "23"); Grok grok = new Grok(Collections.singletonMap("ONE", "1"), "%{ONE:one}"); - GrokProcessor processor = new GrokProcessor(grok, fieldName); + GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), grok, fieldName); try { processor.execute(doc); fail(); @@ -63,7 +63,7 @@ public class GrokProcessorTests extends ESTestCase { originalDoc.setFieldValue(fieldName, fieldName); IngestDocument doc = new IngestDocument(originalDoc); Grok grok = new Grok(Collections.emptyMap(), fieldName); - GrokProcessor processor = new GrokProcessor(grok, fieldName); + GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), grok, fieldName); processor.execute(doc); assertThat(doc, equalTo(originalDoc)); } @@ -73,7 +73,7 @@ public class GrokProcessorTests extends ESTestCase { IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, 1); Grok grok = new Grok(Collections.singletonMap("ONE", "1"), "%{ONE:one}"); - GrokProcessor processor = new GrokProcessor(grok, fieldName); + GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), grok, fieldName); try { processor.execute(doc); fail(); @@ -86,7 +86,7 @@ public class GrokProcessorTests extends ESTestCase { String fieldName = "foo.bar"; IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); Grok grok = new Grok(Collections.singletonMap("ONE", "1"), "%{ONE:one}"); - GrokProcessor processor = new GrokProcessor(grok, fieldName); + GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), grok, fieldName); try { processor.execute(doc); fail(); diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 0ffc3cf3a59..c1eb6830d49 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -32,6 +32,7 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Processor; @@ -56,12 +57,14 @@ public final class GeoIpProcessor implements Processor { public static final String TYPE = "geoip"; + private final String processorTag; private final String sourceField; private final String targetField; private final DatabaseReader dbReader; private final Set fields; - GeoIpProcessor(String sourceField, DatabaseReader dbReader, String targetField, Set fields) throws IOException { + GeoIpProcessor(String processorTag, String sourceField, DatabaseReader dbReader, String targetField, Set fields) throws IOException { + this.processorTag = processorTag; this.sourceField = sourceField; this.targetField = targetField; this.dbReader = dbReader; @@ -100,6 +103,11 @@ public final class GeoIpProcessor implements Processor { return TYPE; } + @Override + public String getTag() { + return processorTag; + } + String getSourceField() { return sourceField; } @@ -215,7 +223,7 @@ public final class GeoIpProcessor implements Processor { return geoData; } - public static final class Factory implements Processor.Factory, Closeable { + public static final class Factory extends AbstractProcessorFactory implements Closeable { static final Set DEFAULT_FIELDS = EnumSet.of( Field.CONTINENT_NAME, Field.COUNTRY_ISO_CODE, Field.REGION_NAME, Field.CITY_NAME, Field.LOCATION @@ -227,7 +235,8 @@ public final class GeoIpProcessor implements Processor { this.databaseReaders = databaseReaders; } - public GeoIpProcessor create(Map config) throws Exception { + @Override + public GeoIpProcessor doCreate(String processorTag, Map config) throws Exception { String ipField = readStringProperty(config, "source_field"); String targetField = readStringProperty(config, "target_field", "geoip"); String databaseFile = readStringProperty(config, "database_file", "GeoLite2-City.mmdb"); @@ -251,7 +260,7 @@ public final class GeoIpProcessor implements Processor { if (databaseReader == null) { throw new IllegalArgumentException("database file [" + databaseFile + "] doesn't exist"); } - return new GeoIpProcessor(ipField, databaseReader, targetField, fields); + return new GeoIpProcessor(processorTag, ipField, databaseReader, targetField, fields); } @Override diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 20ffe7fe43a..7653e978127 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -60,7 +60,11 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("source_field", "_field"); + String processorTag = randomAsciiOfLength(10); + config.put("processor_tag", processorTag); + GeoIpProcessor processor = factory.create(config); + assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getSourceField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-City")); diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index b3b41105157..aa0d6e6d6af 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -37,7 +37,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCity() throws Exception { InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb"); - GeoIpProcessor processor = new GeoIpProcessor("source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); + GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); Map document = new HashMap<>(); document.put("source_field", "82.170.213.79"); @@ -62,7 +62,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCountry() throws Exception { InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-Country.mmdb"); - GeoIpProcessor processor = new GeoIpProcessor("source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); + GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); Map document = new HashMap<>(); document.put("source_field", "82.170.213.79"); @@ -81,7 +81,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testAddressIsNotInTheDatabase() throws Exception { InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb"); - GeoIpProcessor processor = new GeoIpProcessor("source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); + GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); Map document = new HashMap<>(); document.put("source_field", "202.45.11.11"); @@ -95,7 +95,7 @@ public class GeoIpProcessorTests extends ESTestCase { /** Don't silently do DNS lookups or anything trappy on bogus data */ public void testInvalid() throws Exception { InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb"); - GeoIpProcessor processor = new GeoIpProcessor("source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); + GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); Map document = new HashMap<>(); document.put("source_field", "www.google.com"); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml index a5965adec6e..67623a0763d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml @@ -69,6 +69,39 @@ } - length: { docs: 1 } +--- +"Test simulate with provided invalid pipeline definition": + - do: + catch: request + ingest.simulate: + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "set" : { + "value" : "_value" + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "foo": "bar" + } + } + ] + } + - length: { error: 3 } + - match: { status: 400 } + - match: { error.type: "illegal_argument_exception" } + - match: { error.reason: "required property [field] is missing" } + --- "Test simulate without index type and id": - do: @@ -173,6 +206,7 @@ "processors": [ { "set" : { + "processor_tag" : "processor[set]-0", "field" : "field2", "value" : "_value" } @@ -198,7 +232,7 @@ } - length: { docs: 1 } - length: { docs.0.processor_results: 2 } - - match: { docs.0.processor_results.0.processor_id: "processor[set]-0" } + - match: { docs.0.processor_results.0.processor_tag: "processor[set]-0" } - length: { docs.0.processor_results.0.doc._source: 2 } - match: { docs.0.processor_results.0.doc._source.foo: "bar" } - match: { docs.0.processor_results.0.doc._source.field2: "_value" } @@ -297,15 +331,10 @@ ] } - length: { docs: 2 } - - length: { docs.0.processor_results: 2 } + - length: { docs.0.processor_results: 1 } - match: { docs.0.processor_results.0.error.type: "illegal_argument_exception" } - - match: { docs.0.processor_results.1.doc._index: "index" } - - match: { docs.0.processor_results.1.doc._type: "type" } - - match: { docs.0.processor_results.1.doc._id: "id" } - - match: { docs.0.processor_results.1.doc._source.foo: "bar" } - - match: { docs.0.processor_results.1.doc._source.bar: "HELLO" } - - length: { docs.0.processor_results.1.doc._ingest: 1 } - - is_true: docs.0.processor_results.1.doc._ingest.timestamp + - length: { docs.1.processor_results: 2 } + - match: { docs.1.processor_results.0.doc._index: "index" } - match: { docs.1.processor_results.0.doc._source.foo: 5 } - match: { docs.1.processor_results.0.doc._source.bar: "hello" } - length: { docs.1.processor_results.0.doc._ingest: 1 } @@ -315,3 +344,78 @@ - length: { docs.1.processor_results.1.doc._ingest: 1 } - is_true: docs.1.processor_results.1.doc._ingest.timestamp +--- +"Test verbose simulate with on_failure": + - do: + ingest.simulate: + verbose: true + body: > + { + "pipeline" : { + "description": "_description", + "processors": [ + { + "set" : { + "processor_tag" : "setstatus-1", + "field" : "status", + "value" : 200 + } + }, + { + "rename" : { + "processor_tag" : "rename-1", + "field" : "foofield", + "to" : "field1", + "on_failure" : [ + { + "set" : { + "processor_tag" : "set on_failure rename", + "field" : "foofield", + "value" : "exists" + } + }, + { + "rename" : { + "field" : "foofield2", + "to" : "field1", + "on_failure" : [ + { + "set" : { + "field" : "foofield2", + "value" : "ran" + } + } + ] + } + } + ] + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "field1": "123.42 400 " + } + } + ] + } + - length: { docs: 1 } + - length: { docs.0.processor_results: 5 } + - match: { docs.0.processor_results.0.processor_tag: "setstatus-1" } + - match: { docs.0.processor_results.0.doc._source.field1: "123.42 400 " } + - match: { docs.0.processor_results.0.doc._source.status: 200 } + - match: { docs.0.processor_results.1.processor_tag: "rename-1" } + - match: { docs.0.processor_results.1.error.type: "illegal_argument_exception" } + - match: { docs.0.processor_results.1.error.reason: "field [foofield] doesn't exist" } + - match: { docs.0.processor_results.2.processor_tag: "set on_failure rename" } + - is_false: docs.0.processor_results.3.processor_tag + - is_false: docs.0.processor_results.4.processor_tag + - match: { docs.0.processor_results.4.doc._source.foofield: "exists" } + - match: { docs.0.processor_results.4.doc._source.foofield2: "ran" } + - match: { docs.0.processor_results.4.doc._source.field1: "123.42 400 " } + - match: { docs.0.processor_results.4.doc._source.status: 200 } diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java index 5c4dd701a72..78f5b765994 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Processor; @@ -33,16 +34,18 @@ import java.util.function.Consumer; public class TestProcessor implements Processor { private final String type; + private final String tag; private final Consumer ingestDocumentConsumer; private final AtomicInteger invokedCounter = new AtomicInteger(); public TestProcessor(Consumer ingestDocumentConsumer) { - this("test-processor", ingestDocumentConsumer); + this(null, "test-processor", ingestDocumentConsumer); } - public TestProcessor(String type, Consumer ingestDocumentConsumer) { + public TestProcessor(String tag, String type, Consumer ingestDocumentConsumer) { this.ingestDocumentConsumer = ingestDocumentConsumer; this.type = type; + this.tag = tag; } @Override @@ -56,14 +59,19 @@ public class TestProcessor implements Processor { return type; } + @Override + public String getTag() { + return tag; + } + public int getInvokedCounter() { return invokedCounter.get(); } - public static final class Factory implements Processor.Factory { + public static final class Factory extends AbstractProcessorFactory { @Override - public TestProcessor create(Map config) throws Exception { - return new TestProcessor(ingestDocument -> {}); + public TestProcessor doCreate(String processorId, Map config) throws Exception { + return new TestProcessor(processorId, "test-processor", ingestDocument -> {}); } } } From 3b78267c7154d69a40d18acdab971a205acb2f22 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Sat, 16 Jan 2016 21:54:05 -0800 Subject: [PATCH 232/347] Plugins: Remove site plugins Site plugins used to be used for things like kibana and marvel, but there is no longer a need since kibana (and marvel as a kibana plugin) uses node.js. This change removes site plugins, as well as the flag for jvm plugins. Now all plugins are jvm plugins. --- .../gradle/plugin/PluginBuildPlugin.groovy | 3 - .../plugin/PluginPropertiesExtension.groovy | 6 - .../gradle/plugin/PluginPropertiesTask.groovy | 10 +- .../resources/plugin-descriptor.properties | 30 +--- .../org/elasticsearch/http/HttpServer.java | 150 +----------------- .../plugins/DummyPluginInfo.java | 6 +- .../org/elasticsearch/plugins/PluginInfo.java | 119 +++----------- .../elasticsearch/plugins/PluginManager.java | 4 +- .../elasticsearch/plugins/PluginsService.java | 52 ++---- .../org/elasticsearch/plugins/SitePlugin.java | 41 ----- .../rest/action/cat/RestPluginsAction.java | 18 --- .../plugins/PluginInfoTests.java | 74 ++------- .../elasticsearch/plugins/SitePluginIT.java | 131 --------------- .../SitePluginRelativePathConfigIT.java | 88 ---------- docs/plugins/authors.asciidoc | 52 +----- docs/reference/setup/upgrade.asciidoc | 2 +- modules/build.gradle | 3 - .../test/lang_expression/10_basic.yaml | 1 - .../test/lang_groovy/10_basic.yaml | 1 - .../test/lang_mustache/10_basic.yaml | 1 - .../test/discovery_azure/10_basic.yaml | 1 - .../test/discovery_ec2/10_basic.yaml | 1 - .../test/discovery_gce/10_basic.yaml | 1 - .../test/discovery_multicast/10_basic.yaml | 1 - .../test/jvm_example/10_basic.yaml | 1 - .../rest-api-spec/test/plan_a/10_basic.yaml | 1 - .../test/mapper_attachments/00_basic.yaml | 1 - .../test/repository_azure/10_basic.yaml | 1 - .../test/hdfs_repository/10_basic.yaml | 1 - .../test/repository_s3/10_basic.yaml | 1 - plugins/site-example/build.gradle | 27 ---- .../site-example/src/site/_site/index.html | 6 - .../elasticsearch/example/SiteContentsIT.java | 59 ------- .../org/elasticsearch/example/SiteRestIT.java | 41 ----- .../rest-api-spec/test/example/10_basic.yaml | 15 -- .../test/store_smb/10_basic.yaml | 1 - .../plugins/PluginManagerTests.java | 68 +------- .../packaging/scripts/plugin_test_cases.bash | 6 - .../test/cat.plugins/10_basic.yaml | 2 - settings.gradle | 1 - .../bootstrap/BootstrapForTesting.java | 8 +- .../hamcrest/ElasticsearchAssertions.java | 76 --------- 42 files changed, 79 insertions(+), 1033 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/plugins/SitePlugin.java delete mode 100644 core/src/test/java/org/elasticsearch/plugins/SitePluginIT.java delete mode 100644 core/src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigIT.java delete mode 100644 plugins/site-example/build.gradle delete mode 100644 plugins/site-example/src/site/_site/index.html delete mode 100644 plugins/site-example/src/test/java/org/elasticsearch/example/SiteContentsIT.java delete mode 100644 plugins/site-example/src/test/java/org/elasticsearch/example/SiteRestIT.java delete mode 100644 plugins/site-example/src/test/resources/rest-api-spec/test/example/10_basic.yaml diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 042e8d22529..67d0e167b8a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -112,9 +112,6 @@ public class PluginBuildPlugin extends BuildPlugin { include 'config/**' include 'bin/**' } - from('src/site') { - include '_site/**' - } } project.assemble.dependsOn(bundle) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy index dd5bcaedb0b..7b949b3e1da 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy @@ -36,15 +36,9 @@ class PluginPropertiesExtension { @Input String description - @Input - boolean jvm = true - @Input String classname - @Input - boolean site = false - @Input boolean isolated = true diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy index 51853f85e00..de3d060ff26 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy @@ -51,11 +51,11 @@ class PluginPropertiesTask extends Copy { if (extension.description == null) { throw new InvalidUserDataException('description is a required setting for esplugin') } - if (extension.jvm && extension.classname == null) { - throw new InvalidUserDataException('classname is a required setting for esplugin with jvm=true') + if (extension.classname == null) { + throw new InvalidUserDataException('classname is a required setting for esplugin') } doFirst { - if (extension.jvm && extension.isolated == false) { + if (extension.isolated == false) { String warning = "WARNING: Disabling plugin isolation in ${project.path} is deprecated and will be removed in the future" logger.warn("${'=' * warning.length()}\n${warning}\n${'=' * warning.length()}") } @@ -74,10 +74,8 @@ class PluginPropertiesTask extends Copy { 'version': extension.version, 'elasticsearchVersion': VersionProperties.elasticsearch, 'javaVersion': project.targetCompatibility as String, - 'jvm': extension.jvm as String, - 'site': extension.site as String, 'isolated': extension.isolated as String, - 'classname': extension.jvm ? extension.classname : 'NA' + 'classname': extension.classname ] } } diff --git a/buildSrc/src/main/resources/plugin-descriptor.properties b/buildSrc/src/main/resources/plugin-descriptor.properties index 4c676c26cad..e6a5f81882d 100644 --- a/buildSrc/src/main/resources/plugin-descriptor.properties +++ b/buildSrc/src/main/resources/plugin-descriptor.properties @@ -2,26 +2,13 @@ # This file must exist as 'plugin-descriptor.properties' at # the root directory of all plugins. # -# A plugin can be 'site', 'jvm', or both. -# -### example site plugin for "foo": -# -# foo.zip <-- zip file for the plugin, with this structure: -# _site/ <-- the contents that will be served -# plugin-descriptor.properties <-- example contents below: -# -# site=true -# description=My cool plugin -# version=1.0 -# -### example jvm plugin for "foo" +### example plugin for "foo" # # foo.zip <-- zip file for the plugin, with this structure: # .jar <-- classes, resources, dependencies # .jar <-- any number of jars # plugin-descriptor.properties <-- example contents below: # -# jvm=true # classname=foo.bar.BazPlugin # description=My cool plugin # version=2.0 @@ -38,21 +25,6 @@ version=${version} # # 'name': the plugin name name=${name} - -### mandatory elements for site plugins: -# -# 'site': set to true to indicate contents of the _site/ -# directory in the root of the plugin should be served. -site=${site} -# -### mandatory elements for jvm plugins : -# -# 'jvm': true if the 'classname' class should be loaded -# from jar files in the root directory of the plugin. -# Note that only jar files in the root directory are -# added to the classpath for the plugin! If you need -# other resources, package them into a resources jar. -jvm=${jvm} # # 'classname': the name of the class to load, fully-qualified. classname=${classname} diff --git a/core/src/main/java/org/elasticsearch/http/HttpServer.java b/core/src/main/java/org/elasticsearch/http/HttpServer.java index 9971ce7722d..35c46f4f794 100644 --- a/core/src/main/java/org/elasticsearch/http/HttpServer.java +++ b/core/src/main/java/org/elasticsearch/http/HttpServer.java @@ -51,7 +51,7 @@ import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.OK; /** - * + * A component to serve http requests, backed by rest handlers. */ public class HttpServer extends AbstractLifecycleComponent { @@ -63,10 +63,6 @@ public class HttpServer extends AbstractLifecycleComponent { private final NodeService nodeService; - private final boolean disableSites; - - private final PluginSiteFilter pluginSiteFilter = new PluginSiteFilter(); - @Inject public HttpServer(Settings settings, Environment environment, HttpServerTransport transport, RestController restController, @@ -77,9 +73,6 @@ public class HttpServer extends AbstractLifecycleComponent { this.restController = restController; this.nodeService = nodeService; nodeService.setHttpServer(this); - - this.disableSites = this.settings.getAsBoolean("http.disable_sites", false); - transport.httpServerAdapter(new Dispatcher(this)); } @@ -126,27 +119,13 @@ public class HttpServer extends AbstractLifecycleComponent { } public void internalDispatchRequest(final HttpRequest request, final HttpChannel channel) { - String rawPath = request.rawPath(); - if (rawPath.startsWith("/_plugin/")) { - RestFilterChain filterChain = restController.filterChain(pluginSiteFilter); - filterChain.continueProcessing(request, channel); - return; - } else if (rawPath.equals("/favicon.ico")) { + if (request.rawPath().equals("/favicon.ico")) { handleFavicon(request, channel); return; } restController.dispatchRequest(request, channel); } - - class PluginSiteFilter extends RestFilter { - - @Override - public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws IOException { - handlePluginSite((HttpRequest) request, (HttpChannel) channel); - } - } - void handleFavicon(HttpRequest request, HttpChannel channel) { if (request.method() == RestRequest.Method.GET) { try { @@ -163,129 +142,4 @@ public class HttpServer extends AbstractLifecycleComponent { channel.sendResponse(new BytesRestResponse(FORBIDDEN)); } } - - void handlePluginSite(HttpRequest request, HttpChannel channel) throws IOException { - if (disableSites) { - channel.sendResponse(new BytesRestResponse(FORBIDDEN)); - return; - } - if (request.method() == RestRequest.Method.OPTIONS) { - // when we have OPTIONS request, simply send OK by default (with the Access Control Origin header which gets automatically added) - channel.sendResponse(new BytesRestResponse(OK)); - return; - } - if (request.method() != RestRequest.Method.GET) { - channel.sendResponse(new BytesRestResponse(FORBIDDEN)); - return; - } - // TODO for a "/_plugin" endpoint, we should have a page that lists all the plugins? - - String path = request.rawPath().substring("/_plugin/".length()); - int i1 = path.indexOf('/'); - String pluginName; - String sitePath; - if (i1 == -1) { - pluginName = path; - sitePath = null; - // If a trailing / is missing, we redirect to the right page #2654 - String redirectUrl = request.rawPath() + "/"; - BytesRestResponse restResponse = new BytesRestResponse(RestStatus.MOVED_PERMANENTLY, "text/html", ""); - restResponse.addHeader("Location", redirectUrl); - channel.sendResponse(restResponse); - return; - } else { - pluginName = path.substring(0, i1); - sitePath = path.substring(i1 + 1); - } - - // we default to index.html, or what the plugin provides (as a unix-style path) - // this is a relative path under _site configured by the plugin. - if (sitePath.length() == 0) { - sitePath = "index.html"; - } else { - // remove extraneous leading slashes, its not an absolute path. - while (sitePath.length() > 0 && sitePath.charAt(0) == '/') { - sitePath = sitePath.substring(1); - } - } - final Path siteFile = environment.pluginsFile().resolve(pluginName).resolve("_site"); - - final String separator = siteFile.getFileSystem().getSeparator(); - // Convert file separators. - sitePath = sitePath.replace("/", separator); - - Path file = siteFile.resolve(sitePath); - - // return not found instead of forbidden to prevent malicious requests to find out if files exist or dont exist - if (!Files.exists(file) || FileSystemUtils.isHidden(file) || !file.toAbsolutePath().normalize().startsWith(siteFile.toAbsolutePath().normalize())) { - channel.sendResponse(new BytesRestResponse(NOT_FOUND)); - return; - } - - BasicFileAttributes attributes = Files.readAttributes(file, BasicFileAttributes.class); - if (!attributes.isRegularFile()) { - // If it's not a dir, we send a 403 - if (!attributes.isDirectory()) { - channel.sendResponse(new BytesRestResponse(FORBIDDEN)); - return; - } - // We don't serve dir but if index.html exists in dir we should serve it - file = file.resolve("index.html"); - if (!Files.exists(file) || FileSystemUtils.isHidden(file) || !Files.isRegularFile(file)) { - channel.sendResponse(new BytesRestResponse(FORBIDDEN)); - return; - } - } - - try { - byte[] data = Files.readAllBytes(file); - channel.sendResponse(new BytesRestResponse(OK, guessMimeType(sitePath), data)); - } catch (IOException e) { - channel.sendResponse(new BytesRestResponse(INTERNAL_SERVER_ERROR)); - } - } - - - // TODO: Don't respond with a mime type that violates the request's Accept header - private String guessMimeType(String path) { - int lastDot = path.lastIndexOf('.'); - if (lastDot == -1) { - return ""; - } - String extension = path.substring(lastDot + 1).toLowerCase(Locale.ROOT); - String mimeType = DEFAULT_MIME_TYPES.get(extension); - if (mimeType == null) { - return ""; - } - return mimeType; - } - - static { - // This is not an exhaustive list, just the most common types. Call registerMimeType() to add more. - Map mimeTypes = new HashMap<>(); - mimeTypes.put("txt", "text/plain"); - mimeTypes.put("css", "text/css"); - mimeTypes.put("csv", "text/csv"); - mimeTypes.put("htm", "text/html"); - mimeTypes.put("html", "text/html"); - mimeTypes.put("xml", "text/xml"); - mimeTypes.put("js", "text/javascript"); // Technically it should be application/javascript (RFC 4329), but IE8 struggles with that - mimeTypes.put("xhtml", "application/xhtml+xml"); - mimeTypes.put("json", "application/json"); - mimeTypes.put("pdf", "application/pdf"); - mimeTypes.put("zip", "application/zip"); - mimeTypes.put("tar", "application/x-tar"); - mimeTypes.put("gif", "image/gif"); - mimeTypes.put("jpeg", "image/jpeg"); - mimeTypes.put("jpg", "image/jpeg"); - mimeTypes.put("tiff", "image/tiff"); - mimeTypes.put("tif", "image/tiff"); - mimeTypes.put("png", "image/png"); - mimeTypes.put("svg", "image/svg+xml"); - mimeTypes.put("ico", "image/vnd.microsoft.icon"); - mimeTypes.put("mp3", "audio/mpeg"); - DEFAULT_MIME_TYPES = unmodifiableMap(mimeTypes); - } - - public static final Map DEFAULT_MIME_TYPES; } diff --git a/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java index a57a96c631d..a7d088ce214 100644 --- a/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java +++ b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java @@ -20,9 +20,9 @@ package org.elasticsearch.plugins; public class DummyPluginInfo extends PluginInfo { - private DummyPluginInfo(String name, String description, boolean site, String version, boolean jvm, String classname, boolean isolated) { - super(name, description, site, version, jvm, classname, isolated); + private DummyPluginInfo(String name, String description, String version, String classname, boolean isolated) { + super(name, description, version, classname, isolated); } - public static final DummyPluginInfo INSTANCE = new DummyPluginInfo("dummy_plugin_name", "dummy plugin description", true, "dummy_plugin_version", true, "DummyPluginName", true); + public static final DummyPluginInfo INSTANCE = new DummyPluginInfo("dummy_plugin_name", "dummy plugin description", "dummy_plugin_version", "DummyPluginName", true); } diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java b/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java index 3062f01697d..76af7833f06 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java @@ -42,19 +42,14 @@ public class PluginInfo implements Streamable, ToXContent { static final XContentBuilderString NAME = new XContentBuilderString("name"); static final XContentBuilderString DESCRIPTION = new XContentBuilderString("description"); static final XContentBuilderString URL = new XContentBuilderString("url"); - static final XContentBuilderString SITE = new XContentBuilderString("site"); static final XContentBuilderString VERSION = new XContentBuilderString("version"); - static final XContentBuilderString JVM = new XContentBuilderString("jvm"); static final XContentBuilderString CLASSNAME = new XContentBuilderString("classname"); static final XContentBuilderString ISOLATED = new XContentBuilderString("isolated"); } private String name; private String description; - private boolean site; private String version; - - private boolean jvm; private String classname; private boolean isolated; @@ -66,15 +61,11 @@ public class PluginInfo implements Streamable, ToXContent { * * @param name Its name * @param description Its description - * @param site true if it's a site plugin - * @param jvm true if it's a jvm plugin * @param version Version number */ - PluginInfo(String name, String description, boolean site, String version, boolean jvm, String classname, boolean isolated) { + PluginInfo(String name, String description, String version, String classname, boolean isolated) { this.name = name; this.description = description; - this.site = site; - this.jvm = jvm; this.version = version; this.classname = classname; this.isolated = isolated; @@ -101,43 +92,28 @@ public class PluginInfo implements Streamable, ToXContent { throw new IllegalArgumentException("Property [version] is missing for plugin [" + name + "]"); } - boolean jvm = Boolean.parseBoolean(props.getProperty("jvm")); - boolean site = Boolean.parseBoolean(props.getProperty("site")); - if (jvm == false && site == false) { - throw new IllegalArgumentException("Plugin [" + name + "] must be at least a jvm or site plugin"); + String esVersionString = props.getProperty("elasticsearch.version"); + if (esVersionString == null) { + throw new IllegalArgumentException("Property [elasticsearch.version] is missing for plugin [" + name + "]"); } - boolean isolated = true; - String classname = "NA"; - if (jvm) { - String esVersionString = props.getProperty("elasticsearch.version"); - if (esVersionString == null) { - throw new IllegalArgumentException("Property [elasticsearch.version] is missing for jvm plugin [" + name + "]"); - } - Version esVersion = Version.fromString(esVersionString); - if (esVersion.equals(Version.CURRENT) == false) { - throw new IllegalArgumentException("Plugin [" + name + "] is incompatible with Elasticsearch [" + Version.CURRENT.toString() + - "]. Was designed for version [" + esVersionString + "]"); - } - String javaVersionString = props.getProperty("java.version"); - if (javaVersionString == null) { - throw new IllegalArgumentException("Property [java.version] is missing for jvm plugin [" + name + "]"); - } - JarHell.checkVersionFormat(javaVersionString); - JarHell.checkJavaVersion(name, javaVersionString); - isolated = Boolean.parseBoolean(props.getProperty("isolated", "true")); - classname = props.getProperty("classname"); - if (classname == null) { - throw new IllegalArgumentException("Property [classname] is missing for jvm plugin [" + name + "]"); - } + Version esVersion = Version.fromString(esVersionString); + if (esVersion.equals(Version.CURRENT) == false) { + throw new IllegalArgumentException("Plugin [" + name + "] is incompatible with Elasticsearch [" + Version.CURRENT.toString() + + "]. Was designed for version [" + esVersionString + "]"); + } + String javaVersionString = props.getProperty("java.version"); + if (javaVersionString == null) { + throw new IllegalArgumentException("Property [java.version] is missing for plugin [" + name + "]"); + } + JarHell.checkVersionFormat(javaVersionString); + JarHell.checkJavaVersion(name, javaVersionString); + boolean isolated = Boolean.parseBoolean(props.getProperty("isolated", "true")); + String classname = props.getProperty("classname"); + if (classname == null) { + throw new IllegalArgumentException("Property [classname] is missing for plugin [" + name + "]"); } - if (site) { - if (!Files.exists(dir.resolve("_site"))) { - throw new IllegalArgumentException("Plugin [" + name + "] is a site plugin but has no '_site/' directory"); - } - } - - return new PluginInfo(name, description, site, version, jvm, classname, isolated); + return new PluginInfo(name, description, version, classname, isolated); } /** @@ -155,46 +131,19 @@ public class PluginInfo implements Streamable, ToXContent { } /** - * @return true if it's a site plugin - */ - public boolean isSite() { - return site; - } - - /** - * @return true if it's a plugin running in the jvm - */ - public boolean isJvm() { - return jvm; - } - - /** - * @return true if jvm plugin has isolated classloader + * @return true if plugin has isolated classloader */ public boolean isIsolated() { return isolated; } /** - * @return jvm plugin's classname + * @return plugin's classname */ public String getClassname() { return classname; } - /** - * We compute the URL for sites: "/_plugin/" + name + "/" - * - * @return relative URL for site plugin - */ - public String getUrl() { - if (site) { - return ("/_plugin/" + name + "/"); - } else { - return null; - } - } - /** * @return Version number for the plugin */ @@ -212,8 +161,6 @@ public class PluginInfo implements Streamable, ToXContent { public void readFrom(StreamInput in) throws IOException { this.name = in.readString(); this.description = in.readString(); - this.site = in.readBoolean(); - this.jvm = in.readBoolean(); this.version = in.readString(); this.classname = in.readString(); this.isolated = in.readBoolean(); @@ -223,8 +170,6 @@ public class PluginInfo implements Streamable, ToXContent { public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeString(description); - out.writeBoolean(site); - out.writeBoolean(jvm); out.writeString(version); out.writeString(classname); out.writeBoolean(isolated); @@ -236,15 +181,8 @@ public class PluginInfo implements Streamable, ToXContent { builder.field(Fields.NAME, name); builder.field(Fields.VERSION, version); builder.field(Fields.DESCRIPTION, description); - if (site) { - builder.field(Fields.URL, getUrl()); - } - builder.field(Fields.JVM, jvm); - if (jvm) { - builder.field(Fields.CLASSNAME, classname); - builder.field(Fields.ISOLATED, isolated); - } - builder.field(Fields.SITE, site); + builder.field(Fields.CLASSNAME, classname); + builder.field(Fields.ISOLATED, isolated); builder.endObject(); return builder; @@ -274,14 +212,9 @@ public class PluginInfo implements Streamable, ToXContent { .append("- Plugin information:\n") .append("Name: ").append(name).append("\n") .append("Description: ").append(description).append("\n") - .append("Site: ").append(site).append("\n") .append("Version: ").append(version).append("\n") - .append("JVM: ").append(jvm).append("\n"); - - if (jvm) { - information.append(" * Classname: ").append(classname).append("\n"); - information.append(" * Isolated: ").append(isolated); - } + .append(" * Classname: ").append(classname).append("\n") + .append(" * Isolated: ").append(isolated); return information.toString(); } diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java index 7cd50409fb6..8e6391ee0c6 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java @@ -258,9 +258,7 @@ public class PluginManager { } // check for jar hell before any copying - if (info.isJvm()) { - jarHellCheck(root, info.isIsolated()); - } + jarHellCheck(root, info.isIsolated()); // read optional security policy (extra permissions) // if it exists, confirm or warn the user diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index 50938a1916c..4e61185491d 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -98,7 +98,7 @@ public class PluginsService extends AbstractComponent { // first we load plugins that are on the classpath. this is for tests and transport clients for (Class pluginClass : classpathPlugins) { Plugin plugin = loadPlugin(pluginClass, settings); - PluginInfo pluginInfo = new PluginInfo(plugin.name(), plugin.description(), false, "NA", true, pluginClass.getName(), false); + PluginInfo pluginInfo = new PluginInfo(plugin.name(), plugin.description(), "NA", pluginClass.getName(), false); if (logger.isTraceEnabled()) { logger.trace("plugin loaded from classpath [{}]", pluginInfo); } @@ -136,18 +136,10 @@ public class PluginsService extends AbstractComponent { plugins = Collections.unmodifiableList(pluginsLoaded); - // We need to build a List of jvm and site plugins for checking mandatory plugins - Map jvmPlugins = new HashMap<>(); - List sitePlugins = new ArrayList<>(); - + // We need to build a List of plugins for checking mandatory plugins + Set pluginsNames = new HashSet<>(); for (Tuple tuple : plugins) { - PluginInfo info = tuple.v1(); - if (info.isJvm()) { - jvmPlugins.put(info.getName(), tuple.v2()); - } - if (info.isSite()) { - sitePlugins.add(info.getName()); - } + pluginsNames.add(tuple.v1().getName()); } // Checking expected plugins @@ -155,7 +147,7 @@ public class PluginsService extends AbstractComponent { if (mandatoryPlugins != null) { Set missingPlugins = new HashSet<>(); for (String mandatoryPlugin : mandatoryPlugins) { - if (!jvmPlugins.containsKey(mandatoryPlugin) && !sitePlugins.contains(mandatoryPlugin) && !missingPlugins.contains(mandatoryPlugin)) { + if (!pluginsNames.contains(mandatoryPlugin) && !missingPlugins.contains(mandatoryPlugin)) { missingPlugins.add(mandatoryPlugin); } } @@ -175,10 +167,11 @@ public class PluginsService extends AbstractComponent { jvmPluginNames.add(pluginInfo.getName()); } - logger.info("modules {}, plugins {}, sites {}", moduleNames, jvmPluginNames, sitePlugins); + logger.info("modules {}, plugins {}", moduleNames, jvmPluginNames); Map> onModuleReferences = new HashMap<>(); - for (Plugin plugin : jvmPlugins.values()) { + for (Tuple pluginEntry : plugins) { + Plugin plugin = pluginEntry.v2(); List list = new ArrayList<>(); for (Method method : plugin.getClass().getMethods()) { if (!method.getName().equals("onModule")) { @@ -304,9 +297,6 @@ public class PluginsService extends AbstractComponent { continue; // skip over .DS_Store etc } PluginInfo info = PluginInfo.readFromProperties(module); - if (!info.isJvm()) { - throw new IllegalStateException("modules must be jvm plugins: " + info); - } if (!info.isIsolated()) { throw new IllegalStateException("modules must be isolated: " + info); } @@ -353,17 +343,14 @@ public class PluginsService extends AbstractComponent { } List urls = new ArrayList<>(); - if (info.isJvm()) { - // a jvm plugin: gather urls for jar files - try (DirectoryStream jarStream = Files.newDirectoryStream(plugin, "*.jar")) { - for (Path jar : jarStream) { - // normalize with toRealPath to get symlinks out of our hair - urls.add(jar.toRealPath().toUri().toURL()); - } + try (DirectoryStream jarStream = Files.newDirectoryStream(plugin, "*.jar")) { + for (Path jar : jarStream) { + // normalize with toRealPath to get symlinks out of our hair + urls.add(jar.toRealPath().toUri().toURL()); } } final Bundle bundle; - if (info.isJvm() && info.isIsolated() == false) { + if (info.isIsolated() == false) { bundle = bundles.get(0); // purgatory } else { bundle = new Bundle(); @@ -395,15 +382,10 @@ public class PluginsService extends AbstractComponent { // create a child to load the plugins in this bundle ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), getClass().getClassLoader()); for (PluginInfo pluginInfo : bundle.plugins) { - final Plugin plugin; - if (pluginInfo.isJvm()) { - // reload lucene SPI with any new services from the plugin - reloadLuceneSPI(loader); - Class pluginClass = loadPluginClass(pluginInfo.getClassname(), loader); - plugin = loadPlugin(pluginClass, settings); - } else { - plugin = new SitePlugin(pluginInfo.getName(), pluginInfo.getDescription()); - } + // reload lucene SPI with any new services from the plugin + reloadLuceneSPI(loader); + final Class pluginClass = loadPluginClass(pluginInfo.getClassname(), loader); + final Plugin plugin = loadPlugin(pluginClass, settings); plugins.add(new Tuple<>(pluginInfo, plugin)); } } diff --git a/core/src/main/java/org/elasticsearch/plugins/SitePlugin.java b/core/src/main/java/org/elasticsearch/plugins/SitePlugin.java deleted file mode 100644 index 4c12f2095bb..00000000000 --- a/core/src/main/java/org/elasticsearch/plugins/SitePlugin.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugins; - -/** A site-only plugin, just serves resources */ -final class SitePlugin extends Plugin { - final String name; - final String description; - - SitePlugin(String name, String description) { - this.name = name; - this.description = description; - } - - @Override - public String name() { - return name; - } - - @Override - public String description() { - return description; - } -} diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java index 34e05223657..1a37ab6da38 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java @@ -84,8 +84,6 @@ public class RestPluginsAction extends AbstractCatAction { table.addCell("name", "alias:n;desc:node name"); table.addCell("component", "alias:c;desc:component"); table.addCell("version", "alias:v;desc:component version"); - table.addCell("type", "alias:t;desc:type (j for JVM, s for Site)"); - table.addCell("url", "alias:u;desc:url for site plugins"); table.addCell("description", "alias:d;default:false;desc:plugin details"); table.endHeaders(); return table; @@ -104,22 +102,6 @@ public class RestPluginsAction extends AbstractCatAction { table.addCell(node.name()); table.addCell(pluginInfo.getName()); table.addCell(pluginInfo.getVersion()); - String type; - if (pluginInfo.isSite()) { - if (pluginInfo.isJvm()) { - type = "j/s"; - } else { - type = "s"; - } - } else { - if (pluginInfo.isJvm()) { - type = "j"; - } else { - type = ""; - } - } - table.addCell(type); - table.addCell(pluginInfo.getUrl()); table.addCell(pluginInfo.getDescription()); table.endRow(); } diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java index deaff46f27b..37a0f4e358e 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java @@ -40,17 +40,13 @@ public class PluginInfoTests extends ESTestCase { "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); PluginInfo info = PluginInfo.readFromProperties(pluginDir); assertEquals("my_plugin", info.getName()); assertEquals("fake desc", info.getDescription()); assertEquals("1.0", info.getVersion()); assertEquals("FakePlugin", info.getClassname()); - assertTrue(info.isJvm()); assertTrue(info.isIsolated()); - assertFalse(info.isSite()); - assertNull(info.getUrl()); } public void testReadFromPropertiesNameMissing() throws Exception { @@ -94,27 +90,12 @@ public class PluginInfoTests extends ESTestCase { } } - public void testReadFromPropertiesJvmAndSiteMissing() throws Exception { - Path pluginDir = createTempDir().resolve("fake-plugin"); - PluginTestUtil.writeProperties(pluginDir, - "description", "fake desc", - "version", "1.0", - "name", "my_plugin"); - try { - PluginInfo.readFromProperties(pluginDir); - fail("expected jvm or site exception"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("must be at least a jvm or site plugin")); - } - } - public void testReadFromPropertiesElasticsearchVersionMissing() throws Exception { Path pluginDir = createTempDir().resolve("fake-plugin"); PluginTestUtil.writeProperties(pluginDir, "description", "fake desc", "name", "my_plugin", - "version", "1.0", - "jvm", "true"); + "version", "1.0"); try { PluginInfo.readFromProperties(pluginDir); fail("expected missing elasticsearch version exception"); @@ -129,8 +110,7 @@ public class PluginInfoTests extends ESTestCase { "description", "fake desc", "name", "my_plugin", "elasticsearch.version", Version.CURRENT.toString(), - "version", "1.0", - "jvm", "true"); + "version", "1.0"); try { PluginInfo.readFromProperties(pluginDir); fail("expected missing java version exception"); @@ -148,8 +128,7 @@ public class PluginInfoTests extends ESTestCase { "elasticsearch.version", Version.CURRENT.toString(), "java.version", "1000000.0", "classname", "FakePlugin", - "version", "1.0", - "jvm", "true"); + "version", "1.0"); try { PluginInfo.readFromProperties(pluginDir); fail("expected incompatible java version exception"); @@ -167,8 +146,7 @@ public class PluginInfoTests extends ESTestCase { "elasticsearch.version", Version.CURRENT.toString(), "java.version", "1.7.0_80", "classname", "FakePlugin", - "version", "1.0", - "jvm", "true"); + "version", "1.0"); try { PluginInfo.readFromProperties(pluginDir); fail("expected bad java version format exception"); @@ -182,7 +160,6 @@ public class PluginInfoTests extends ESTestCase { PluginTestUtil.writeProperties(pluginDir, "description", "fake desc", "version", "1.0", - "jvm", "true", "name", "my_plugin", "elasticsearch.version", "bogus"); try { @@ -199,7 +176,6 @@ public class PluginInfoTests extends ESTestCase { "description", "fake desc", "name", "my_plugin", "version", "1.0", - "jvm", "true", "elasticsearch.version", Version.V_1_7_0.toString()); try { PluginInfo.readFromProperties(pluginDir); @@ -216,8 +192,7 @@ public class PluginInfoTests extends ESTestCase { "name", "my_plugin", "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "jvm", "true"); + "java.version", System.getProperty("java.specification.version")); try { PluginInfo.readFromProperties(pluginDir); fail("expected old elasticsearch version exception"); @@ -226,42 +201,13 @@ public class PluginInfoTests extends ESTestCase { } } - public void testReadFromPropertiesSitePlugin() throws Exception { - Path pluginDir = createTempDir().resolve("fake-plugin"); - Files.createDirectories(pluginDir.resolve("_site")); - PluginTestUtil.writeProperties(pluginDir, - "description", "fake desc", - "name", "my_plugin", - "version", "1.0", - "site", "true"); - PluginInfo info = PluginInfo.readFromProperties(pluginDir); - assertTrue(info.isSite()); - assertFalse(info.isJvm()); - assertEquals("NA", info.getClassname()); - } - - public void testReadFromPropertiesSitePluginWithoutSite() throws Exception { - Path pluginDir = createTempDir().resolve("fake-plugin"); - PluginTestUtil.writeProperties(pluginDir, - "description", "fake desc", - "name", "my_plugin", - "version", "1.0", - "site", "true"); - try { - PluginInfo.readFromProperties(pluginDir); - fail("didn't get expected exception"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("site plugin but has no '_site")); - } - } - public void testPluginListSorted() { PluginsAndModules pluginsInfo = new PluginsAndModules(); - pluginsInfo.addPlugin(new PluginInfo("c", "foo", true, "dummy", true, "dummyclass", true)); - pluginsInfo.addPlugin(new PluginInfo("b", "foo", true, "dummy", true, "dummyclass", true)); - pluginsInfo.addPlugin(new PluginInfo("e", "foo", true, "dummy", true, "dummyclass", true)); - pluginsInfo.addPlugin(new PluginInfo("a", "foo", true, "dummy", true, "dummyclass", true)); - pluginsInfo.addPlugin(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true)); + pluginsInfo.addPlugin(new PluginInfo("c", "foo", "dummy", "dummyclass", true)); + pluginsInfo.addPlugin(new PluginInfo("b", "foo", "dummy", "dummyclass", true)); + pluginsInfo.addPlugin(new PluginInfo("e", "foo", "dummy", "dummyclass", true)); + pluginsInfo.addPlugin(new PluginInfo("a", "foo", "dummy", "dummyclass", true)); + pluginsInfo.addPlugin(new PluginInfo("d", "foo", "dummy", "dummyclass", true)); final List infos = pluginsInfo.getPluginInfos(); List names = infos.stream().map((input) -> input.getName()).collect(Collectors.toList()); diff --git a/core/src/test/java/org/elasticsearch/plugins/SitePluginIT.java b/core/src/test/java/org/elasticsearch/plugins/SitePluginIT.java deleted file mode 100644 index e2df2518f1c..00000000000 --- a/core/src/test/java/org/elasticsearch/plugins/SitePluginIT.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugins; - -import org.apache.http.client.config.RequestConfig; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.http.HttpServerTransport; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; -import org.elasticsearch.test.rest.client.http.HttpResponse; - -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.List; -import java.util.Locale; - -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.rest.RestStatus.FORBIDDEN; -import static org.elasticsearch.rest.RestStatus.MOVED_PERMANENTLY; -import static org.elasticsearch.rest.RestStatus.NOT_FOUND; -import static org.elasticsearch.rest.RestStatus.OK; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus; -import static org.hamcrest.Matchers.containsString; - -/** - * We want to test site plugins - */ -@ClusterScope(scope = Scope.SUITE, numDataNodes = 1) -public class SitePluginIT extends ESIntegTestCase { - @Override - protected Settings nodeSettings(int nodeOrdinal) { - Path pluginDir = getDataPath("/org/elasticsearch/test_plugins"); - return settingsBuilder() - .put(super.nodeSettings(nodeOrdinal)) - .put("path.plugins", pluginDir.toAbsolutePath()) - .put("force.http.enabled", true) - .build(); - } - - @Override - public HttpRequestBuilder httpClient() { - RequestConfig.Builder builder = RequestConfig.custom().setRedirectsEnabled(false); - CloseableHttpClient httpClient = HttpClients.custom().setDefaultRequestConfig(builder.build()).build(); - return new HttpRequestBuilder(httpClient).httpTransport(internalCluster().getDataNodeInstance(HttpServerTransport.class)); - } - - public void testRedirectSitePlugin() throws Exception { - // We use an HTTP Client to test redirection - HttpResponse response = httpClient().method("GET").path("/_plugin/dummy").execute(); - assertThat(response, hasStatus(MOVED_PERMANENTLY)); - assertThat(response.getBody(), containsString("/_plugin/dummy/")); - - // We test the real URL - response = httpClient().method("GET").path("/_plugin/dummy/").execute(); - assertThat(response, hasStatus(OK)); - assertThat(response.getBody(), containsString("Dummy Site Plugin")); - } - - /** - * Test direct access to an existing file (index.html) - */ - public void testAnyPage() throws Exception { - HttpResponse response = httpClient().path("/_plugin/dummy/index.html").execute(); - assertThat(response, hasStatus(OK)); - assertThat(response.getBody(), containsString("Dummy Site Plugin")); - } - - /** - * Test normalizing of path - */ - public void testThatPathsAreNormalized() throws Exception { - // more info: https://www.owasp.org/index.php/Path_Traversal - List notFoundUris = new ArrayList<>(); - notFoundUris.add("/_plugin/dummy/../../../../../log4j.properties"); - notFoundUris.add("/_plugin/dummy/../../../../../%00log4j.properties"); - notFoundUris.add("/_plugin/dummy/..%c0%af..%c0%af..%c0%af..%c0%af..%c0%aflog4j.properties"); - notFoundUris.add("/_plugin/dummy/%2E%2E/%2E%2E/%2E%2E/%2E%2E/index.html"); - notFoundUris.add("/_plugin/dummy/%2e%2e/%2e%2e/%2e%2e/%2e%2e/index.html"); - notFoundUris.add("/_plugin/dummy/%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2findex.html"); - notFoundUris.add("/_plugin/dummy/%2E%2E/%2E%2E/%2E%2E/%2E%2E/index.html"); - notFoundUris.add("/_plugin/dummy/..%5C..%5C..%5C..%5C..%5Clog4j.properties"); - - for (String uri : notFoundUris) { - HttpResponse response = httpClient().path(uri).execute(); - String message = String.format(Locale.ROOT, "URI [%s] expected to be not found", uri); - assertThat(message, response, hasStatus(NOT_FOUND)); - } - - // using relative path inside of the plugin should work - HttpResponse response = httpClient().path("/_plugin/dummy/dir1/../dir1/../index.html").execute(); - assertThat(response, hasStatus(OK)); - assertThat(response.getBody(), containsString("Dummy Site Plugin")); - } - - /** - * Test case for #4845: https://github.com/elasticsearch/elasticsearch/issues/4845 - * Serving _site plugins do not pick up on index.html for sub directories - */ - public void testWelcomePageInSubDirs() throws Exception { - HttpResponse response = httpClient().path("/_plugin/subdir/dir/").execute(); - assertThat(response, hasStatus(OK)); - assertThat(response.getBody(), containsString("Dummy Site Plugin (subdir)")); - - response = httpClient().path("/_plugin/subdir/dir_without_index/").execute(); - assertThat(response, hasStatus(FORBIDDEN)); - - response = httpClient().path("/_plugin/subdir/dir_without_index/page.html").execute(); - assertThat(response, hasStatus(OK)); - assertThat(response.getBody(), containsString("Dummy Site Plugin (page)")); - } -} diff --git a/core/src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigIT.java b/core/src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigIT.java deleted file mode 100644 index 1cde90d6984..00000000000 --- a/core/src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigIT.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugins; - -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.http.HttpServerTransport; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; -import org.elasticsearch.test.rest.client.http.HttpResponse; - -import java.nio.file.Path; - -import static org.apache.lucene.util.Constants.WINDOWS; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.rest.RestStatus.OK; -import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus; - -@ClusterScope(scope = SUITE, numDataNodes = 1) -public class SitePluginRelativePathConfigIT extends ESIntegTestCase { - private final Path root = PathUtils.get(".").toAbsolutePath().getRoot(); - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - String cwdToRoot = getRelativePath(PathUtils.get(".").toAbsolutePath()); - Path pluginDir = PathUtils.get(cwdToRoot, relativizeToRootIfNecessary(getDataPath("/org/elasticsearch/test_plugins")).toString()); - - Path tempDir = createTempDir(); - boolean useRelativeInMiddleOfPath = randomBoolean(); - if (useRelativeInMiddleOfPath) { - pluginDir = PathUtils.get(tempDir.toString(), getRelativePath(tempDir), pluginDir.toString()); - } - - return settingsBuilder() - .put(super.nodeSettings(nodeOrdinal)) - .put("path.plugins", pluginDir) - .put("force.http.enabled", true) - .build(); - } - - public void testThatRelativePathsDontAffectPlugins() throws Exception { - HttpResponse response = httpClient().method("GET").path("/_plugin/dummy/").execute(); - assertThat(response, hasStatus(OK)); - } - - private Path relativizeToRootIfNecessary(Path path) { - if (WINDOWS) { - return root.relativize(path); - } - return path; - } - - private String getRelativePath(Path path) { - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < path.getNameCount(); i++) { - sb.append(".."); - sb.append(path.getFileSystem().getSeparator()); - } - - return sb.toString(); - } - - @Override - public HttpRequestBuilder httpClient() { - CloseableHttpClient httpClient = HttpClients.createDefault(); - return new HttpRequestBuilder(httpClient).httpTransport(internalCluster().getDataNodeInstance(HttpServerTransport.class)); - } -} diff --git a/docs/plugins/authors.asciidoc b/docs/plugins/authors.asciidoc index 9461ba8dd53..6f63eab83ca 100644 --- a/docs/plugins/authors.asciidoc +++ b/docs/plugins/authors.asciidoc @@ -3,8 +3,6 @@ The Elasticsearch repository contains examples of: -* a https://github.com/elastic/elasticsearch/tree/master/plugins/site-example[site plugin] - for serving static HTML, JavaScript, and CSS. * a https://github.com/elastic/elasticsearch/tree/master/plugins/jvm-example[Java plugin] which contains Java code. @@ -12,20 +10,6 @@ These examples provide the bare bones needed to get started. For more information about how to write a plugin, we recommend looking at the plugins listed in this documentation for inspiration. -[NOTE] -.Site plugins -==================================== - -The example site plugin mentioned above contains all of the scaffolding needed -for integrating with Gradle builds. If you don't plan on using Gradle, then all -you really need in your plugin is: - -* The `plugin-descriptor.properties` file -* The `_site/` directory -* The `_site/index.html` file - -==================================== - [float] === Plugin descriptor file @@ -43,7 +27,7 @@ instance, see https://github.com/elastic/elasticsearch/blob/master/plugins/site-example/build.gradle[`/plugins/site-example/build.gradle`]. [float] -==== Mandatory elements for all plugins +==== Mandatory elements for plugins [cols="<,<,<",options="header",] @@ -56,23 +40,6 @@ https://github.com/elastic/elasticsearch/blob/master/plugins/site-example/build. |`name` |String | the plugin name -|======================================================================= - - - -[float] -==== Mandatory elements for Java plugins - - -[cols="<,<,<",options="header",] -|======================================================================= -|Element | Type | Description - -|`jvm` |Boolean | true if the `classname` class should be loaded -from jar files in the root directory of the plugin. -Note that only jar files in the root directory are added to the classpath for the plugin! -If you need other resources, package them into a resources jar. - |`classname` |String | the name of the class to load, fully-qualified. |`java.version` |String | version of java the code is built against. @@ -83,6 +50,9 @@ of nonnegative decimal integers separated by "."'s and may have leading zeros. |======================================================================= +Note that only jar files in the root directory are added to the classpath for the plugin! +If you need other resources, package them into a resources jar. + [IMPORTANT] .Plugin release lifecycle ============================================== @@ -94,20 +64,6 @@ in the presence of plugins with the incorrect `elasticsearch.version`. ============================================== -[float] -==== Mandatory elements for Site plugins - - -[cols="<,<,<",options="header",] -|======================================================================= -|Element | Type | Description - -|`site` |Boolean | true to indicate contents of the `_site/` -directory in the root of the plugin should be served. - -|======================================================================= - - [float] === Testing your plugin diff --git a/docs/reference/setup/upgrade.asciidoc b/docs/reference/setup/upgrade.asciidoc index 894f82a6db5..05b63588252 100644 --- a/docs/reference/setup/upgrade.asciidoc +++ b/docs/reference/setup/upgrade.asciidoc @@ -27,7 +27,7 @@ consult this table: |2.x |3.x |<> |======================================================================= -TIP: Take plugins into consideration as well when upgrading. Most plugins will have to be upgraded alongside Elasticsearch, although some plugins accessed primarily through the browser (`_site` plugins) may continue to work given that API changes are compatible. +TIP: Take plugins into consideration as well when upgrading. Plugins must be upgraded alongside Elasticsearch. include::backup.asciidoc[] diff --git a/modules/build.gradle b/modules/build.gradle index 41f7a8873b4..4b88dfd703f 100644 --- a/modules/build.gradle +++ b/modules/build.gradle @@ -39,8 +39,5 @@ subprojects { if (esplugin.isolated == false) { throw new InvalidModelException("Modules cannot disable isolation") } - if (esplugin.jvm == false) { - throw new InvalidModelException("Modules must be jvm plugins") - } } } diff --git a/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yaml b/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yaml index 1550f2a7f81..cc777bd826b 100644 --- a/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yaml +++ b/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.modules.0.name: lang-expression } - - match: { nodes.$master.modules.0.jvm: true } diff --git a/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/10_basic.yaml b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/10_basic.yaml index c276bab6495..d5044bbe422 100644 --- a/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/10_basic.yaml +++ b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.modules.0.name: lang-groovy } - - match: { nodes.$master.modules.0.jvm: true } diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml index 9bfea28abfa..195eea7c4b8 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml @@ -11,7 +11,6 @@ nodes.info: {} - match: { nodes.$master.modules.0.name: lang-mustache } - - match: { nodes.$master.modules.0.jvm: true } --- "Indexed template": diff --git a/plugins/discovery-azure/src/test/resources/rest-api-spec/test/discovery_azure/10_basic.yaml b/plugins/discovery-azure/src/test/resources/rest-api-spec/test/discovery_azure/10_basic.yaml index 51ba41e7e82..7a5acd1f001 100644 --- a/plugins/discovery-azure/src/test/resources/rest-api-spec/test/discovery_azure/10_basic.yaml +++ b/plugins/discovery-azure/src/test/resources/rest-api-spec/test/discovery_azure/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: discovery-azure } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yaml b/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yaml index e3b7844fd54..d612c75db97 100644 --- a/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yaml +++ b/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: discovery-ec2 } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yaml b/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yaml index 8f5fbdc4ab4..6f48aa6c29e 100644 --- a/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yaml +++ b/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: discovery-gce } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/discovery-multicast/src/test/resources/rest-api-spec/test/discovery_multicast/10_basic.yaml b/plugins/discovery-multicast/src/test/resources/rest-api-spec/test/discovery_multicast/10_basic.yaml index 4c110238aea..36172fa2c33 100644 --- a/plugins/discovery-multicast/src/test/resources/rest-api-spec/test/discovery_multicast/10_basic.yaml +++ b/plugins/discovery-multicast/src/test/resources/rest-api-spec/test/discovery_multicast/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: discovery-multicast } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/jvm-example/src/test/resources/rest-api-spec/test/jvm_example/10_basic.yaml b/plugins/jvm-example/src/test/resources/rest-api-spec/test/jvm_example/10_basic.yaml index 169b924f83d..c671fe2e8ba 100644 --- a/plugins/jvm-example/src/test/resources/rest-api-spec/test/jvm_example/10_basic.yaml +++ b/plugins/jvm-example/src/test/resources/rest-api-spec/test/jvm_example/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: jvm-example } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml b/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml index 6259780bfb4..04a5a7a2051 100644 --- a/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml +++ b/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: lang-plan-a } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/00_basic.yaml b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/00_basic.yaml index 819478d7d56..9654535f2ac 100644 --- a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/00_basic.yaml +++ b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/00_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: mapper-attachments } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yaml b/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yaml index a77304ba5fc..fb929f1e822 100644 --- a/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yaml +++ b/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: repository-azure } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml index 7c569408a61..6fbbfc82e87 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml @@ -13,7 +13,6 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: repository-hdfs } - - match: { nodes.$master.plugins.0.jvm: true } --- # # Check that we can't use file:// repositories or anything like that diff --git a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yaml b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yaml index 811ff888baa..5fcc81209e2 100644 --- a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yaml +++ b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: repository-s3 } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/site-example/build.gradle b/plugins/site-example/build.gradle deleted file mode 100644 index d2228129025..00000000000 --- a/plugins/site-example/build.gradle +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -esplugin { - description 'Demonstrates how to serve resources via elasticsearch.' - jvm false - site true -} - -// no unit tests -test.enabled = false diff --git a/plugins/site-example/src/site/_site/index.html b/plugins/site-example/src/site/_site/index.html deleted file mode 100644 index bc6343f6653..00000000000 --- a/plugins/site-example/src/site/_site/index.html +++ /dev/null @@ -1,6 +0,0 @@ - - - Page title - - Page body - diff --git a/plugins/site-example/src/test/java/org/elasticsearch/example/SiteContentsIT.java b/plugins/site-example/src/test/java/org/elasticsearch/example/SiteContentsIT.java deleted file mode 100644 index c92a0ba719e..00000000000 --- a/plugins/site-example/src/test/java/org/elasticsearch/example/SiteContentsIT.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.example; - -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; -import org.elasticsearch.common.network.NetworkAddress; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ExternalTestCluster; -import org.elasticsearch.test.TestCluster; -import org.elasticsearch.test.rest.client.RestResponse; -import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; - -import java.net.InetSocketAddress; -import java.util.concurrent.TimeUnit; - -/** - * verifies content is actually served for the site plugin - */ -public class SiteContentsIT extends ESIntegTestCase { - - // TODO: probably a better way to test, but we don't want to really - // define a fake rest spec or anything? - public void test() throws Exception { - TestCluster cluster = cluster(); - assumeTrue("this test will not work from an IDE unless you pass tests.cluster pointing to a running instance", cluster instanceof ExternalTestCluster); - ExternalTestCluster externalCluster = (ExternalTestCluster) cluster; - try (CloseableHttpClient httpClient = HttpClients.createMinimal(new PoolingHttpClientConnectionManager(15, TimeUnit.SECONDS))) { - for (InetSocketAddress address : externalCluster.httpAddresses()) { - RestResponse restResponse = new RestResponse( - new HttpRequestBuilder(httpClient) - .host(NetworkAddress.formatAddress(address.getAddress())).port(address.getPort()) - .path("/_plugin/site-example/") - .method("GET").execute()); - assertEquals(200, restResponse.getStatusCode()); - String body = restResponse.getBodyAsString(); - assertTrue("unexpected body contents: " + body, body.contains("Page body")); - } - } - } -} diff --git a/plugins/site-example/src/test/java/org/elasticsearch/example/SiteRestIT.java b/plugins/site-example/src/test/java/org/elasticsearch/example/SiteRestIT.java deleted file mode 100644 index e3df9ce92d9..00000000000 --- a/plugins/site-example/src/test/java/org/elasticsearch/example/SiteRestIT.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.example; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; - -import java.io.IOException; - -public class SiteRestIT extends ESRestTestCase { - - public SiteRestIT(@Name("yaml") RestTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); - } -} - diff --git a/plugins/site-example/src/test/resources/rest-api-spec/test/example/10_basic.yaml b/plugins/site-example/src/test/resources/rest-api-spec/test/example/10_basic.yaml deleted file mode 100644 index a66ce5c9133..00000000000 --- a/plugins/site-example/src/test/resources/rest-api-spec/test/example/10_basic.yaml +++ /dev/null @@ -1,15 +0,0 @@ -# Integration tests for Example site plugin -# -"Example site loaded": - - do: - cluster.state: {} - - # Get master node id - - set: { master_node: master } - - - do: - nodes.info: {} - - - match: { nodes.$master.plugins.0.name: site-example } - - match: { nodes.$master.plugins.0.jvm: false } - - match: { nodes.$master.plugins.0.site: true } diff --git a/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yaml b/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yaml index 155a39b1471..a210fd4e597 100644 --- a/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yaml +++ b/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: store-smb } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java index bc92f894019..514a2ee6710 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java @@ -196,7 +196,6 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); assertStatus("install", USAGE); } @@ -216,7 +215,6 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); Path binDir = environment.binFile(); @@ -260,7 +258,6 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); Path pluginConfigDir = environment.configFile().resolve(pluginName); @@ -296,7 +293,6 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "2.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginUrl)); @@ -361,7 +357,6 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); Path binDir = environment.binFile(); @@ -392,16 +387,13 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); System.err.println("install " + pluginUrl + " --verbose"); ExitStatus status = new PluginManagerCliParser(terminal).execute(args("install " + pluginUrl + " --verbose")); assertThat("Terminal output was: " + terminal.getTerminalOutput(), status, is(ExitStatus.OK)); assertThat(terminal.getTerminalOutput(), hasItem(containsString("Name: fake-plugin"))); assertThat(terminal.getTerminalOutput(), hasItem(containsString("Description: fake desc"))); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("Site: false"))); assertThat(terminal.getTerminalOutput(), hasItem(containsString("Version: 1.0"))); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("JVM: true"))); assertThatPluginIsListed(pluginName); } @@ -414,7 +406,6 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); ExitStatus status = new PluginManagerCliParser(terminal).execute(args("install " + pluginUrl)); assertThat("Terminal output was: " + terminal.getTerminalOutput(), status, is(ExitStatus.OK)); @@ -447,7 +438,6 @@ public class PluginManagerTests extends ESIntegTestCase { "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), "isolated", "false", - "jvm", "true", "classname", "FakePlugin"); // install @@ -465,63 +455,20 @@ public class PluginManagerTests extends ESIntegTestCase { assertTrue(foundExpectedMessage); } - public void testInstallSitePluginVerbose() throws IOException { - String pluginName = "fake-plugin"; - Path pluginDir = createTempDir().resolve(pluginName); - Files.createDirectories(pluginDir.resolve("_site")); - Files.createFile(pluginDir.resolve("_site").resolve("somefile")); - String pluginUrl = createPlugin(pluginDir, - "description", "fake desc", - "name", pluginName, - "version", "1.0", - "site", "true"); - ExitStatus status = new PluginManagerCliParser(terminal).execute(args("install " + pluginUrl + " --verbose")); - assertThat("Terminal output was: " + terminal.getTerminalOutput(), status, is(ExitStatus.OK)); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("Name: fake-plugin"))); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("Description: fake desc"))); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("Site: true"))); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("Version: 1.0"))); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("JVM: false"))); - assertThatPluginIsListed(pluginName); - // We want to check that Plugin Manager moves content to _site - assertFileExists(environment.pluginsFile().resolve(pluginName).resolve("_site")); - } - - public void testInstallSitePlugin() throws IOException { - String pluginName = "fake-plugin"; - Path pluginDir = createTempDir().resolve(pluginName); - Files.createDirectories(pluginDir.resolve("_site")); - Files.createFile(pluginDir.resolve("_site").resolve("somefile")); - String pluginUrl = createPlugin(pluginDir, - "description", "fake desc", - "name", pluginName, - "version", "1.0", - "site", "true"); - ExitStatus status = new PluginManagerCliParser(terminal).execute(args("install " + pluginUrl)); - assertThat("Terminal output was: " + terminal.getTerminalOutput(), status, is(ExitStatus.OK)); - assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("Name: fake-plugin")))); - assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("Description:")))); - assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("Site:")))); - assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("Version:")))); - assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("JVM:")))); - assertThatPluginIsListed(pluginName); - // We want to check that Plugin Manager moves content to _site - assertFileExists(environment.pluginsFile().resolve(pluginName).resolve("_site")); - } - public void testInstallPluginWithBadChecksum() throws IOException { String pluginName = "fake-plugin"; Path pluginDir = createTempDir().resolve(pluginName); - Files.createDirectories(pluginDir.resolve("_site")); - Files.createFile(pluginDir.resolve("_site").resolve("somefile")); String pluginUrl = createPluginWithBadChecksum(pluginDir, - "description", "fake desc", - "version", "1.0", - "site", "true"); + "description", "fake desc", + "name", pluginName, + "version", "1.0", + "elasticsearch.version", Version.CURRENT.toString(), + "java.version", System.getProperty("java.specification.version"), + "classname", "FakePlugin"); assertStatus(String.format(Locale.ROOT, "install %s --verbose", pluginUrl), ExitStatus.IO_ERROR); assertThatPluginIsNotListed(pluginName); - assertFileNotExists(environment.pluginsFile().resolve(pluginName).resolve("_site")); + assertFileNotExists(environment.pluginsFile().resolve(pluginName)); } private void singlePluginInstallAndRemove(String pluginDescriptor, String pluginName, String pluginCoordinates) throws IOException { @@ -606,7 +553,6 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "1.0.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); // We want to remove plugin with plugin short name diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash index 54978b39605..da5790d69c8 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash @@ -263,12 +263,6 @@ fi install_and_check_plugin repository s3 aws-java-sdk-core-*.jar } -@test "[$GROUP] install site example" { - # Doesn't use install_and_check_plugin because this is a site plugin - install_plugin site-example $(readlink -m site-example-*.zip) - assert_file_exist "$ESHOME/plugins/site-example/_site/index.html" -} - @test "[$GROUP] install store-smb plugin" { install_and_check_plugin store smb } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.plugins/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.plugins/10_basic.yaml index bf974c85703..86f2a360afa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.plugins/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.plugins/10_basic.yaml @@ -10,7 +10,5 @@ name .+ \n component .+ \n version .+ \n - type .+ \n - url .+ \n description .+ \n $/ diff --git a/settings.gradle b/settings.gradle index 55126b3c808..682bfec96d9 100644 --- a/settings.gradle +++ b/settings.gradle @@ -34,7 +34,6 @@ List projects = [ 'plugins:repository-hdfs', 'plugins:repository-s3', 'plugins:jvm-example', - 'plugins:site-example', 'plugins:store-smb', 'qa:evil-tests', 'qa:smoke-test-client', diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index b2ce5ebd86e..60cde5a5194 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -154,11 +154,9 @@ public class BootstrapForTesting { try (InputStream stream = url.openStream()) { properties.load(stream); } - if (Boolean.parseBoolean(properties.getProperty("jvm"))) { - String clazz = properties.getProperty("classname"); - if (clazz != null) { - Class.forName(clazz); - } + String clazz = properties.getProperty("classname"); + if (clazz != null) { + Class.forName(clazz); } } } catch (Exception e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 61755f7ecb4..cb3bbc7436b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -731,82 +731,6 @@ public class ElasticsearchAssertions { return response; } - public static void assertNodeContainsPlugins(NodesInfoResponse response, String nodeId, - List expectedJvmPluginNames, - List expectedJvmPluginDescriptions, - List expectedJvmVersions, - List expectedSitePluginNames, - List expectedSitePluginDescriptions, - List expectedSiteVersions) { - - Assert.assertThat(response.getNodesMap().get(nodeId), notNullValue()); - - PluginsAndModules plugins = response.getNodesMap().get(nodeId).getPlugins(); - Assert.assertThat(plugins, notNullValue()); - - List pluginNames = filterAndMap(plugins, jvmPluginPredicate, nameFunction); - for (String expectedJvmPluginName : expectedJvmPluginNames) { - Assert.assertThat(pluginNames, hasItem(expectedJvmPluginName)); - } - - List pluginDescriptions = filterAndMap(plugins, jvmPluginPredicate, descriptionFunction); - for (String expectedJvmPluginDescription : expectedJvmPluginDescriptions) { - Assert.assertThat(pluginDescriptions, hasItem(expectedJvmPluginDescription)); - } - - List jvmPluginVersions = filterAndMap(plugins, jvmPluginPredicate, versionFunction); - for (String pluginVersion : expectedJvmVersions) { - Assert.assertThat(jvmPluginVersions, hasItem(pluginVersion)); - } - - boolean anyHaveUrls = - plugins - .getPluginInfos() - .stream() - .filter(jvmPluginPredicate.and(sitePluginPredicate.negate())) - .map(urlFunction) - .anyMatch(p -> p != null); - assertFalse(anyHaveUrls); - - List sitePluginNames = filterAndMap(plugins, sitePluginPredicate, nameFunction); - - Assert.assertThat(sitePluginNames.isEmpty(), is(expectedSitePluginNames.isEmpty())); - for (String expectedSitePluginName : expectedSitePluginNames) { - Assert.assertThat(sitePluginNames, hasItem(expectedSitePluginName)); - } - - List sitePluginDescriptions = filterAndMap(plugins, sitePluginPredicate, descriptionFunction); - Assert.assertThat(sitePluginDescriptions.isEmpty(), is(expectedSitePluginDescriptions.isEmpty())); - for (String sitePluginDescription : expectedSitePluginDescriptions) { - Assert.assertThat(sitePluginDescriptions, hasItem(sitePluginDescription)); - } - - List sitePluginUrls = filterAndMap(plugins, sitePluginPredicate, urlFunction); - Assert.assertThat(sitePluginUrls, not(contains(nullValue()))); - - List sitePluginVersions = filterAndMap(plugins, sitePluginPredicate, versionFunction); - Assert.assertThat(sitePluginVersions.isEmpty(), is(expectedSiteVersions.isEmpty())); - for (String pluginVersion : expectedSiteVersions) { - Assert.assertThat(sitePluginVersions, hasItem(pluginVersion)); - } - } - - private static List filterAndMap(PluginsAndModules pluginsInfo, Predicate predicate, Function function) { - return pluginsInfo.getPluginInfos().stream().filter(predicate).map(function).collect(Collectors.toList()); - } - - private static Predicate jvmPluginPredicate = p -> p.isJvm(); - - private static Predicate sitePluginPredicate = p -> p.isSite(); - - private static Function nameFunction = p -> p.getName(); - - private static Function descriptionFunction = p -> p.getDescription(); - - private static Function urlFunction = p -> p.getUrl(); - - private static Function versionFunction = p -> p.getVersion(); - /** * Check if a file exists */ From 328c375970bf08af44097528d5ff223e3b4354cf Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 18 Jan 2016 11:59:57 +0100 Subject: [PATCH 233/347] test: removed forgotten AwaitsFix annotations --- .../elasticsearch/indices/template/SimpleIndexTemplateIT.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index d1cb2193b07..3115daf29d8 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -64,7 +64,6 @@ import static org.hamcrest.Matchers.nullValue; */ public class SimpleIndexTemplateIT extends ESIntegTestCase { - @AwaitsFix(bugUrl = "temporarily ignored till we have removed the ingest index template") public void testSimpleIndexTemplateTests() throws Exception { // clean all templates setup by the framework. client().admin().indices().prepareDeleteTemplate("*").get(); @@ -315,7 +314,6 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { } } - @AwaitsFix(bugUrl = "temporarily ignored till we have removed the ingest index template") public void testInvalidSettings() throws Exception { // clean all templates setup by the framework. client().admin().indices().prepareDeleteTemplate("*").get(); From c7d220330d89f2b7502f70a49f8e2dc9f0102df9 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 18 Jan 2016 13:09:10 +0100 Subject: [PATCH 234/347] added note to jdocs that this exception should never get serialzed --- .../ingest/processor/FailProcessorException.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java b/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java index 846ba40c136..bfdfe11178e 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java @@ -21,6 +21,10 @@ package org.elasticsearch.ingest.processor; /** * Exception class thrown by {@link FailProcessor}. + * + * This exception is caught in the {@link org.elasticsearch.ingest.core.CompoundProcessor} and + * then changes the state of {@link org.elasticsearch.ingest.core.IngestDocument}. This + * exception should get serialized. */ public class FailProcessorException extends RuntimeException { From 62089539dbebd7e63c71b79ae5353cd196434ecc Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 18 Jan 2016 13:11:46 +0100 Subject: [PATCH 235/347] s/processorId/processorTag --- .../elasticsearch/ingest/processor/LowercaseProcessor.java | 4 ++-- .../org/elasticsearch/ingest/processor/TrimProcessor.java | 4 ++-- .../action/ingest/SimulatePipelineResponseTests.java | 6 +++--- .../action/ingest/SimulateProcessorResultTests.java | 6 +++--- .../main/java/org/elasticsearch/ingest/TestProcessor.java | 4 ++-- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java index f03d0b1a2c3..282d1e29716 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java @@ -30,8 +30,8 @@ public class LowercaseProcessor extends AbstractStringProcessor { public static final String TYPE = "lowercase"; - LowercaseProcessor(String processorId, String field) { - super(processorId, field); + LowercaseProcessor(String processorTag, String field) { + super(processorTag, field); } @Override diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java index a9591f92ebe..8a75ed24b43 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java @@ -27,8 +27,8 @@ public class TrimProcessor extends AbstractStringProcessor { public static final String TYPE = "trim"; - TrimProcessor(String processorId, String field) { - super(processorId, field); + TrimProcessor(String processorTag, String field) { + super(processorTag, field); } @Override diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java index c82c42d6209..9af049da01e 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java @@ -47,12 +47,12 @@ public class SimulatePipelineResponseTests extends ESTestCase { int numProcessors = randomIntBetween(1, 10); List processorResults = new ArrayList<>(numProcessors); for (int j = 0; j < numProcessors; j++) { - String processorId = randomAsciiOfLengthBetween(1, 10); + String processorTag = randomAsciiOfLengthBetween(1, 10); SimulateProcessorResult processorResult; if (isFailure) { - processorResult = new SimulateProcessorResult(processorId, new IllegalArgumentException("test")); + processorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test")); } else { - processorResult = new SimulateProcessorResult(processorId, ingestDocument); + processorResult = new SimulateProcessorResult(processorTag, ingestDocument); } processorResults.add(processorResult); } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java index ace5d58c104..0885475adc5 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java @@ -33,14 +33,14 @@ import static org.hamcrest.Matchers.instanceOf; public class SimulateProcessorResultTests extends ESTestCase { public void testSerialization() throws IOException { - String processorId = randomAsciiOfLengthBetween(1, 10); + String processorTag = randomAsciiOfLengthBetween(1, 10); boolean isFailure = randomBoolean(); SimulateProcessorResult simulateProcessorResult; if (isFailure) { - simulateProcessorResult = new SimulateProcessorResult(processorId, new IllegalArgumentException("test")); + simulateProcessorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test")); } else { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - simulateProcessorResult = new SimulateProcessorResult(processorId, ingestDocument); + simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument); } BytesStreamOutput out = new BytesStreamOutput(); diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java index 78f5b765994..ae13174e7c1 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java @@ -70,8 +70,8 @@ public class TestProcessor implements Processor { public static final class Factory extends AbstractProcessorFactory { @Override - public TestProcessor doCreate(String processorId, Map config) throws Exception { - return new TestProcessor(processorId, "test-processor", ingestDocument -> {}); + public TestProcessor doCreate(String processorTag, Map config) throws Exception { + return new TestProcessor(processorTag, "test-processor", ingestDocument -> {}); } } } From 7c8a1ecf37c068415fd610401ff5b114f49404df Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 18 Jan 2016 13:17:01 +0100 Subject: [PATCH 236/347] test: added todo --- core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index e8203b52285..ad295e66a95 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -55,6 +55,7 @@ public class IngestClientIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { + // TODO: Remove this method once gets in: https://github.com/elastic/elasticsearch/issues/16019 if (nodeOrdinal % 2 == 0) { return Settings.builder().put("node.ingest", false).put(super.nodeSettings(nodeOrdinal)).build(); } From 4ef85eda36ae40c29a55ea7aca46142cc7ea45f0 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Mon, 18 Jan 2016 09:25:36 -0800 Subject: [PATCH 237/347] add default separator test to dedot rest test --- .../test/ingest/80_dedot_processor.yaml | 33 ++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/80_dedot_processor.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/80_dedot_processor.yaml index 8aedb4099d8..bdc64572a45 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/80_dedot_processor.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/80_dedot_processor.yaml @@ -1,5 +1,5 @@ --- -"Test De-Dot Processor": +"Test De-Dot Processor With Provided Separator": - do: ingest.put_pipeline: id: "my_pipeline" @@ -31,3 +31,34 @@ id: 1 - match: { _source.a3b3c: "hello world" } +--- +"Test De-Dot Processor With Default Separator": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "dedot" : { + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {"a.b.c": "hello world"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.a_b_c: "hello world" } From e2e207687d3dafed66a6eb00768a17706ee546da Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 19 Jan 2016 22:03:26 +0100 Subject: [PATCH 238/347] Fixes due to changes in master branch. --- .../elasticsearch/action/ingest/DeletePipelineRequest.java | 3 +-- .../org/elasticsearch/action/ingest/IngestActionFilter.java | 6 +++--- .../org/elasticsearch/action/ingest/PutPipelineRequest.java | 4 +--- .../action/ingest/SimulatePipelineRequest.java | 2 +- .../org/elasticsearch/ingest/PipelineExecutionService.java | 2 +- .../test/java/org/elasticsearch/ingest/IngestClientIT.java | 2 +- .../org/elasticsearch/ingest/grok/IngestGrokRestIT.java | 5 ----- .../org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java | 5 ----- 8 files changed, 8 insertions(+), 21 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java index a18f13680a5..f0188c663d6 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.io.stream.StreamInput; @@ -29,7 +28,7 @@ import java.io.IOException; import static org.elasticsearch.action.ValidateActions.addValidationError; -public class DeletePipelineRequest extends AcknowledgedRequest { +public class DeletePipelineRequest extends AcknowledgedRequest { private String id; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java index 440e1f37aab..fe9cb3c5f44 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java @@ -55,7 +55,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio } @Override - public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { + public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { if (IndexAction.NAME.equals(action)) { assert request instanceof IndexRequest; IndexRequest indexRequest = (IndexRequest) request; @@ -89,7 +89,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio } @Override - public void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { + public void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { chain.proceed(action, response, listener); } @@ -131,7 +131,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio return Integer.MAX_VALUE; } - final static class BulkRequestModifier implements Iterator { + final static class BulkRequestModifier implements Iterator> { final BulkRequest bulkRequest; final Set failedSlots; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java index c993d1606d6..6b199b2aa90 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java @@ -19,10 +19,8 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.cluster.ack.AckedRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -31,7 +29,7 @@ import java.io.IOException; import static org.elasticsearch.action.ValidateActions.addValidationError; -public class PutPipelineRequest extends AcknowledgedRequest { +public class PutPipelineRequest extends AcknowledgedRequest { private String id; private BytesReference source; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java index ccc51e7bdd7..48961f0e41f 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java @@ -38,7 +38,7 @@ import java.util.Map; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.ingest.core.IngestDocument.MetaData; -public class SimulatePipelineRequest extends ActionRequest { +public class SimulatePipelineRequest extends ActionRequest { private String id; private boolean verbose; diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index 7a382ca1ce7..4fac72899c1 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -52,7 +52,7 @@ public class PipelineExecutionService { }); } - public void execute(Iterable actionRequests, + public void execute(Iterable> actionRequests, BiConsumer itemFailureHandler, Consumer completionHandler) { threadPool.executor(ThreadPool.Names.INDEX).execute(() -> { for (ActionRequest actionRequest : actionRequests) { diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index ad295e66a95..3eab68ec3eb 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -211,7 +211,7 @@ public class IngestClientIT extends ESIntegTestCase { @Override protected Collection> getMockPlugins() { - return Collections.emptyList(); + return Collections.singletonList(TestSeedPlugin.class); } public static class IngestPlugin extends Plugin { diff --git a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/IngestGrokRestIT.java b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/IngestGrokRestIT.java index 7073c4ede56..3f4bdf1f8b9 100644 --- a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/IngestGrokRestIT.java +++ b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/IngestGrokRestIT.java @@ -32,11 +32,6 @@ import java.util.Collection; public class IngestGrokRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(IngestGrokPlugin.class); - } - public IngestGrokRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java index fed53456f0b..0e4d1ee4b2b 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java @@ -31,11 +31,6 @@ import java.util.Collection; public class IngestGeoIpRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(IngestGeoIpPlugin.class); - } - public IngestGeoIpRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } From e383e96f58542ae14f51828755460995d8de0088 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 11:40:17 +0100 Subject: [PATCH 239/347] replaced custom PipelineMissingException with ResourceNotFoundException --- .../elasticsearch/ElasticsearchException.java | 3 +- .../ingest/PipelineMissingException.java | 42 ------------------- .../elasticsearch/ingest/PipelineStore.java | 3 +- .../ExceptionSerializationTests.java | 1 - .../ingest/PipelineStoreTests.java | 3 +- 5 files changed, 5 insertions(+), 47 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/ingest/PipelineMissingException.java diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index 98c992cf2bc..4a35bcbcfb0 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -610,8 +610,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte RETRY_ON_REPLICA_EXCEPTION(org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnReplicaException.class, org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnReplicaException::new, 136), TYPE_MISSING_EXCEPTION(org.elasticsearch.indices.TypeMissingException.class, org.elasticsearch.indices.TypeMissingException::new, 137), FAILED_TO_COMMIT_CLUSTER_STATE_EXCEPTION(org.elasticsearch.discovery.Discovery.FailedToCommitClusterStateException.class, org.elasticsearch.discovery.Discovery.FailedToCommitClusterStateException::new, 140), - QUERY_SHARD_EXCEPTION(org.elasticsearch.index.query.QueryShardException.class, org.elasticsearch.index.query.QueryShardException::new, 141), - PIPELINE_MISSING_EXCEPTION(org.elasticsearch.ingest.PipelineMissingException.class, org.elasticsearch.ingest.PipelineMissingException::new, 142); + QUERY_SHARD_EXCEPTION(org.elasticsearch.index.query.QueryShardException.class, org.elasticsearch.index.query.QueryShardException::new, 141); final Class exceptionClass; final FunctionThatThrowsIOException constructor; diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineMissingException.java b/core/src/main/java/org/elasticsearch/ingest/PipelineMissingException.java deleted file mode 100644 index 82637ae2ded..00000000000 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineMissingException.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.rest.RestStatus; - -import java.io.IOException; - -public class PipelineMissingException extends ElasticsearchException { - - public PipelineMissingException(String id) { - super("pipeline [{}] is missing", id); - } - - public PipelineMissingException(StreamInput in) throws IOException { - super(in); - } - - @Override - public RestStatus status() { - return RestStatus.NOT_FOUND; - } -} diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java index 6db1d6c0681..e7ab217e736 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest; import org.apache.lucene.util.IOUtils; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; @@ -136,7 +137,7 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust } Map pipelines = currentIngestMetadata.getPipelines(); if (pipelines.containsKey(request.id()) == false) { - throw new PipelineMissingException(request.id()); + throw new ResourceNotFoundException("pipeline [{}] is missing", request.id()); } else { pipelines = new HashMap<>(pipelines); pipelines.remove(request.id()); diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index be229310043..db7b5df6662 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -776,7 +776,6 @@ public class ExceptionSerializationTests extends ESTestCase { ids.put(139, null); ids.put(140, org.elasticsearch.discovery.Discovery.FailedToCommitClusterStateException.class); ids.put(141, org.elasticsearch.index.query.QueryShardException.class); - ids.put(142, org.elasticsearch.ingest.PipelineMissingException.class); Map, Integer> reverse = new HashMap<>(); for (Map.Entry> entry : ids.entrySet()) { diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index c1f14b26eb8..a0e07169f8f 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.cluster.ClusterName; @@ -129,7 +130,7 @@ public class PipelineStoreTests extends ESTestCase { try { store.innerDelete(deleteRequest, clusterState); fail("exception expected"); - } catch (PipelineMissingException e) { + } catch (ResourceNotFoundException e) { assertThat(e.getMessage(), equalTo("pipeline [_id] is missing")); } } From 8a7f3d9d6faccb549e946e1b90f4621b7716951b Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 13:35:15 +0100 Subject: [PATCH 240/347] addressed various comments --- .../elasticsearch/action/ActionModule.java | 4 +- .../action/bulk/BulkRequest.java | 23 +++++++- .../action/index/IndexRequest.java | 4 +- .../action/index/IndexRequestBuilder.java | 2 +- .../action/ingest/GetPipelineRequest.java | 16 +++--- .../ingest/GetPipelineRequestBuilder.java | 2 +- .../ingest/GetPipelineTransportAction.java | 2 +- .../action/ingest/IngestActionFilter.java | 55 ++++++++----------- .../ingest/IngestProxyActionFilter.java | 4 +- .../client/transport/TransportClient.java | 4 +- .../ingest/PipelineExecutionService.java | 8 +-- .../java/org/elasticsearch/node/Node.java | 3 +- .../rest/action/index/RestIndexAction.java | 2 +- .../action/ingest/RestGetPipelineAction.java | 2 +- .../ingest/IngestActionFilterTests.java | 12 ++-- .../ingest/IngestProxyActionFilterTests.java | 12 ++-- .../elasticsearch/ingest/IngestClientIT.java | 4 +- .../ingest/PipelineExecutionServiceTests.java | 28 +++++----- 18 files changed, 100 insertions(+), 87 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 7be4226eca0..39aa4b7a2ba 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -227,8 +227,8 @@ public class ActionModule extends AbstractModule { private final boolean ingestEnabled; private final boolean proxy; - public ActionModule(Settings settings, boolean proxy) { - this.ingestEnabled = NodeModule.isNodeIngestEnabled(settings); + public ActionModule(boolean ingestEnabled, boolean proxy) { + this.ingestEnabled = ingestEnabled; this.proxy = proxy; } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index fdd5c3fed09..3bc08d39df0 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -383,15 +384,15 @@ public class BulkRequest extends ActionRequest implements Composite if ("index".equals(action)) { if (opType == null) { internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) - .pipeline(pipeline).source(data.slice(from, nextMarker - from)), payload); + .setPipeline(pipeline).source(data.slice(from, nextMarker - from)), payload); } else { internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) - .create("create".equals(opType)).pipeline(pipeline) + .create("create".equals(opType)).setPipeline(pipeline) .source(data.slice(from, nextMarker - from)), payload); } } else if ("create".equals(action)) { internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) - .create(true).pipeline(pipeline) + .create(true).setPipeline(pipeline) .source(data.slice(from, nextMarker - from)), payload); } else if ("update".equals(action)) { UpdateRequest updateRequest = new UpdateRequest(index, type, id).routing(routing).parent(parent).retryOnConflict(retryOnConflict) @@ -482,6 +483,22 @@ public class BulkRequest extends ActionRequest implements Composite return -1; } + /** + * @return Whether this bulk request contains index request with an ingest pipeline enabled. + */ + public boolean hasIndexRequestsWithPipelines() { + for (ActionRequest actionRequest : requests) { + if (actionRequest instanceof IndexRequest) { + IndexRequest indexRequest = (IndexRequest) actionRequest; + if (Strings.hasText(indexRequest.getPipeline())) { + return true; + } + } + } + + return false; + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 5c9f5aeb5aa..387f7566e81 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -368,7 +368,7 @@ public class IndexRequest extends ReplicationRequest implements Do /** * Sets the ingest pipeline to be executed before indexing the document */ - public IndexRequest pipeline(String pipeline) { + public IndexRequest setPipeline(String pipeline) { this.pipeline = pipeline; return this; } @@ -376,7 +376,7 @@ public class IndexRequest extends ReplicationRequest implements Do /** * Returns the ingest pipeline to be executed before indexing the document */ - public String pipeline() { + public String getPipeline() { return this.pipeline; } diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java index a355d68b034..4116755e4eb 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java @@ -283,7 +283,7 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder listener) throws Exception { - listener.onResponse(new GetPipelineResponse(pipelineStore.getPipelines(request.ids()))); + listener.onResponse(new GetPipelineResponse(pipelineStore.getPipelines(request.getIds()))); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java index fe9cb3c5f44..5747ddd1402 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java @@ -56,36 +56,29 @@ public final class IngestActionFilter extends AbstractComponent implements Actio @Override public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { - if (IndexAction.NAME.equals(action)) { - assert request instanceof IndexRequest; - IndexRequest indexRequest = (IndexRequest) request; - if (Strings.hasText(indexRequest.pipeline())) { - processIndexRequest(task, action, listener, chain, (IndexRequest) request); - return; - } - } - if (BulkAction.NAME.equals(action)) { - assert request instanceof BulkRequest; - BulkRequest bulkRequest = (BulkRequest) request; - boolean isIngestRequest = false; - for (ActionRequest actionRequest : bulkRequest.requests()) { - if (actionRequest instanceof IndexRequest) { - IndexRequest indexRequest = (IndexRequest) actionRequest; - if (Strings.hasText(indexRequest.pipeline())) { - isIngestRequest = true; - break; - } + switch (action) { + case IndexAction.NAME: + IndexRequest indexRequest = (IndexRequest) request; + if (Strings.hasText(indexRequest.getPipeline())) { + processIndexRequest(task, action, listener, chain, (IndexRequest) request); + } else { + chain.proceed(task, action, request, listener); } - } - if (isIngestRequest) { - @SuppressWarnings("unchecked") - ActionListener actionListener = (ActionListener) listener; - processBulkIndexRequest(task, bulkRequest, action, chain, actionListener); - return; - } + break; + case BulkAction.NAME: + BulkRequest bulkRequest = (BulkRequest) request; + if (bulkRequest.hasIndexRequestsWithPipelines()) { + @SuppressWarnings("unchecked") + ActionListener actionListener = (ActionListener) listener; + processBulkIndexRequest(task, bulkRequest, action, chain, actionListener); + } else { + chain.proceed(task, action, request, listener); + } + break; + default: + chain.proceed(task, action, request, listener); + break; } - - chain.proceed(task, action, request, listener); } @Override @@ -96,13 +89,13 @@ public final class IngestActionFilter extends AbstractComponent implements Actio void processIndexRequest(Task task, String action, ActionListener listener, ActionFilterChain chain, IndexRequest indexRequest) { executionService.execute(indexRequest, t -> { - logger.error("failed to execute pipeline [{}]", t, indexRequest.pipeline()); + logger.error("failed to execute pipeline [{}]", t, indexRequest.getPipeline()); listener.onFailure(t); }, success -> { // TransportIndexAction uses IndexRequest and same action name on the node that receives the request and the node that // processes the primary action. This could lead to a pipeline being executed twice for the same // index request, hence we set the pipeline to null once its execution completed. - indexRequest.pipeline(null); + indexRequest.setPipeline(null); chain.proceed(task, action, indexRequest, listener); }); } @@ -110,7 +103,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio void processBulkIndexRequest(Task task, BulkRequest original, String action, ActionFilterChain chain, ActionListener listener) { BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original); executionService.execute(() -> bulkRequestModifier, (indexRequest, throwable) -> { - logger.debug("failed to execute pipeline [{}] for document [{}/{}/{}]", indexRequest.pipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id(), throwable); + logger.debug("failed to execute pipeline [{}] for document [{}/{}/{}]", indexRequest.getPipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id(), throwable); bulkRequestModifier.markCurrentItemAsFailed(throwable); }, (success) -> { BulkRequest bulkRequest = bulkRequestModifier.getBulkRequest(); diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java index 1df4970b236..c1884fe1ab3 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java @@ -65,7 +65,7 @@ public final class IngestProxyActionFilter implements ActionFilter { ingestAction = IndexAction.INSTANCE; assert request instanceof IndexRequest; IndexRequest indexRequest = (IndexRequest) request; - isIngestRequest = Strings.hasText(indexRequest.pipeline()); + isIngestRequest = Strings.hasText(indexRequest.getPipeline()); } else if (BulkAction.NAME.equals(action)) { ingestAction = BulkAction.INSTANCE; assert request instanceof BulkRequest; @@ -73,7 +73,7 @@ public final class IngestProxyActionFilter implements ActionFilter { for (ActionRequest actionRequest : bulkRequest.requests()) { if (actionRequest instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) actionRequest; - if (Strings.hasText(indexRequest.pipeline())) { + if (Strings.hasText(indexRequest.getPipeline())) { isIngestRequest = true; break; } diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 7e26af7f325..0d677490fd3 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -49,6 +49,7 @@ import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.indices.breaker.CircuitBreakerModule; import org.elasticsearch.monitor.MonitorService; +import org.elasticsearch.node.NodeModule; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsModule; @@ -150,7 +151,8 @@ public class TransportClient extends AbstractClient { // noop } }); - modules.add(new ActionModule(settings, true)); + boolean ingestEnabled = NodeModule.isNodeIngestEnabled(settings); + modules.add(new ActionModule(ingestEnabled, true)); modules.add(new CircuitBreakerModule(settings)); pluginsService.processModules(modules); diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index 4fac72899c1..f6936487a23 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -41,7 +41,7 @@ public class PipelineExecutionService { } public void execute(IndexRequest request, Consumer failureHandler, Consumer completionHandler) { - Pipeline pipeline = getPipeline(request.pipeline()); + Pipeline pipeline = getPipeline(request.getPipeline()); threadPool.executor(ThreadPool.Names.INDEX).execute(() -> { try { innerExecute(request, pipeline); @@ -58,11 +58,11 @@ public class PipelineExecutionService { for (ActionRequest actionRequest : actionRequests) { if ((actionRequest instanceof IndexRequest)) { IndexRequest indexRequest = (IndexRequest) actionRequest; - if (Strings.hasText(indexRequest.pipeline())) { + if (Strings.hasText(indexRequest.getPipeline())) { try { - innerExecute(indexRequest, getPipeline(indexRequest.pipeline())); + innerExecute(indexRequest, getPipeline(indexRequest.getPipeline())); //this shouldn't be needed here but we do it for consistency with index api which requires it to prevent double execution - indexRequest.pipeline(null); + indexRequest.setPipeline(null); } catch (Throwable e) { itemFailureHandler.accept(indexRequest, e); } diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index daa4e163569..4cc338fb837 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -190,7 +190,8 @@ public class Node implements Releasable { modules.add(new ClusterModule(this.settings)); modules.add(new IndicesModule()); modules.add(new SearchModule(settings, namedWriteableRegistry)); - modules.add(new ActionModule(settings, false)); + boolean ingestEnabled = NodeModule.isNodeIngestEnabled(settings); + modules.add(new ActionModule(ingestEnabled, false)); modules.add(new GatewayModule(settings)); modules.add(new NodeClientModule()); modules.add(new PercolatorModule()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java index 4eaec2c6b1b..0fc15454ecb 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java @@ -77,7 +77,7 @@ public class RestIndexAction extends BaseRestHandler { if (request.hasParam("ttl")) { indexRequest.ttl(request.param("ttl")); } - indexRequest.pipeline(request.param("pipeline")); + indexRequest.setPipeline(request.param("pipeline")); indexRequest.source(request.content()); indexRequest.timeout(request.paramAsTime("timeout", IndexRequest.DEFAULT_TIMEOUT)); indexRequest.refresh(request.paramAsBoolean("refresh", indexRequest.refresh())); diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java index 4860f5e7931..e43e8846e14 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java @@ -41,7 +41,7 @@ public class RestGetPipelineAction extends BaseRestHandler { @Override protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { GetPipelineRequest request = new GetPipelineRequest(); - request.ids(Strings.splitStringByCommaToArray(restRequest.param("id"))); + request.setIds(Strings.splitStringByCommaToArray(restRequest.param("id"))); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); client.getPipeline(request, new RestStatusToXContentListener<>(channel)); } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java index 8c8b4ba35ff..e1ffe94e63d 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java @@ -99,7 +99,7 @@ public class IngestActionFilterTests extends ESTestCase { @SuppressWarnings("unchecked") public void testApplyIngestIdViaRequestParam() throws Exception { Task task = mock(Task.class); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline("_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id"); indexRequest.source("field", "value"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -113,7 +113,7 @@ public class IngestActionFilterTests extends ESTestCase { @SuppressWarnings("unchecked") public void testApplyExecuted() throws Exception { Task task = mock(Task.class); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline("_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id"); indexRequest.source("field", "value"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -135,7 +135,7 @@ public class IngestActionFilterTests extends ESTestCase { @SuppressWarnings("unchecked") public void testApplyFailed() throws Exception { Task task = mock(Task.class); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline("_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id"); indexRequest.source("field", "value"); ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); @@ -196,7 +196,7 @@ public class IngestActionFilterTests extends ESTestCase { } bulkRequest.add(request); } else { - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline("_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id"); indexRequest.source("field1", "value1"); bulkRequest.add(indexRequest); } @@ -239,9 +239,9 @@ public class IngestActionFilterTests extends ESTestCase { ActionListener actionListener = mock(ActionListener.class); ActionFilterChain actionFilterChain = mock(ActionFilterChain.class); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline("_id").source("field", "value"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id").source("field", "value"); filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain); - assertThat(indexRequest.pipeline(), nullValue()); + assertThat(indexRequest.getPipeline(), nullValue()); filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain); verify(executionService, times(1)).execute(same(indexRequest), any(Consumer.class), any(Consumer.class)); verify(actionFilterChain, times(2)).proceed(task, IndexAction.NAME, indexRequest, actionListener); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java index 042dacce223..a3b107d4c1e 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java @@ -102,10 +102,10 @@ public class IngestProxyActionFilterTests extends ESTestCase { ActionRequest request; if (randomBoolean()) { action = IndexAction.NAME; - request = new IndexRequest().pipeline("_id"); + request = new IndexRequest().setPipeline("_id"); } else { action = BulkAction.NAME; - request = new BulkRequest().add(new IndexRequest().pipeline("_id")); + request = new BulkRequest().add(new IndexRequest().setPipeline("_id")); } try { filter.apply(task, action, request, actionListener, actionFilterChain); @@ -169,7 +169,7 @@ public class IngestProxyActionFilterTests extends ESTestCase { }; doAnswer(answer).when(transportService).sendRequest(any(DiscoveryNode.class), any(String.class), any(TransportRequest.class), any(TransportResponseHandler.class)); - IndexRequest indexRequest = new IndexRequest().pipeline("_id"); + IndexRequest indexRequest = new IndexRequest().setPipeline("_id"); filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain); verify(transportService).sendRequest(argThat(new IngestNodeMatcher()), eq(IndexAction.NAME), same(indexRequest), any(TransportResponseHandler.class)); @@ -193,7 +193,7 @@ public class IngestProxyActionFilterTests extends ESTestCase { doAnswer(answer).when(transportService).sendRequest(any(DiscoveryNode.class), any(String.class), any(TransportRequest.class), any(TransportResponseHandler.class)); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(new IndexRequest().pipeline("_id")); + bulkRequest.add(new IndexRequest().setPipeline("_id")); int numNoPipelineRequests = randomIntBetween(0, 10); for (int i = 0; i < numNoPipelineRequests; i++) { bulkRequest.add(new IndexRequest()); @@ -224,10 +224,10 @@ public class IngestProxyActionFilterTests extends ESTestCase { ActionRequest request; if (randomBoolean()) { action = IndexAction.NAME; - request = new IndexRequest().pipeline("_id"); + request = new IndexRequest().setPipeline("_id"); } else { action = BulkAction.NAME; - request = new BulkRequest().add(new IndexRequest().pipeline("_id")); + request = new BulkRequest().add(new IndexRequest().setPipeline("_id")); } filter.apply(task, action, request, actionListener, actionFilterChain); diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 3eab68ec3eb..a613054feba 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -145,7 +145,7 @@ public class IngestClientIT extends ESIntegTestCase { int numRequests = scaledRandomIntBetween(32, 128); BulkRequest bulkRequest = new BulkRequest(); for (int i = 0; i < numRequests; i++) { - IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i)).pipeline("_id"); + IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i)).setPipeline("_id"); indexRequest.source("field", "value", "fail", i % 2 == 0); bulkRequest.add(indexRequest); } @@ -180,7 +180,7 @@ public class IngestClientIT extends ESIntegTestCase { client().putPipeline(putPipelineRequest).get(); GetPipelineRequest getPipelineRequest = new GetPipelineRequest(); - getPipelineRequest.ids("_id"); + getPipelineRequest.setIds("_id"); GetPipelineResponse getResponse = client().getPipeline(getPipelineRequest).get(); assertThat(getResponse.isFound(), is(true)); assertThat(getResponse.pipelines().size(), equalTo(1)); diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java index 9872eb79f0e..9bb098bed04 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java @@ -71,7 +71,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { } public void testExecuteIndexPipelineDoesNotExist() { - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") @@ -91,9 +91,9 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); BulkRequest bulkRequest = new BulkRequest(); - IndexRequest indexRequest1 = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); + IndexRequest indexRequest1 = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); bulkRequest.add(indexRequest1); - IndexRequest indexRequest2 = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("does_not_exist"); + IndexRequest indexRequest2 = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("does_not_exist"); bulkRequest.add(indexRequest2); @SuppressWarnings("unchecked") BiConsumer failureHandler = mock(BiConsumer.class); @@ -122,7 +122,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { CompoundProcessor processor = mock(CompoundProcessor.class); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") @@ -148,7 +148,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { }).when(processor).execute(any()); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") @@ -170,7 +170,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testExecuteFailure() throws Exception { CompoundProcessor processor = mock(CompoundProcessor.class); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @@ -187,7 +187,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { Processor onFailureProcessor = mock(Processor.class); CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), Collections.singletonList(new CompoundProcessor(onFailureProcessor))); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor)); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @@ -203,7 +203,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { Processor onFailureProcessor = mock(Processor.class); CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), Collections.singletonList(new CompoundProcessor(onFailureProcessor))); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor)); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); doThrow(new RuntimeException()).when(onFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") @@ -223,7 +223,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), Collections.singletonList(new CompoundProcessor(Collections.singletonList(onFailureProcessor), Collections.singletonList(onFailureOnFailureProcessor)))); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor)); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); doThrow(new RuntimeException()).when(onFailureOnFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); doThrow(new RuntimeException()).when(onFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @@ -241,7 +241,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { Processor processor = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("_ttl", "5d")); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor))); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") @@ -257,7 +257,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { Processor processor = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("_ttl", "abc")); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor))); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).pipeline("_id"); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") @@ -270,7 +270,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testExecuteProvidedTTL() throws Exception { when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", mock(CompoundProcessor.class))); - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline("_id") + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id") .source(Collections.emptyMap()) .ttl(1000L); Consumer failureHandler = mock(Consumer.class); @@ -297,7 +297,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { request = new UpdateRequest("_index", "_type", "_id"); } } else { - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline(pipelineId); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline(pipelineId); indexRequest.source("field1", "value1"); request = indexRequest; numIndexRequests++; @@ -324,7 +324,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { int numRequest = scaledRandomIntBetween(8, 64); for (int i = 0; i < numRequest; i++) { - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").pipeline(pipelineId); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline(pipelineId); indexRequest.source("field1", "value1"); bulkRequest.add(indexRequest); } From c663aa0dec589410a6897f9fd80c040f47e0b4b1 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 13:44:27 +0100 Subject: [PATCH 241/347] addressed IngestProxyActionFilter comments --- .../ingest/IngestProxyActionFilter.java | 48 +++++++++---------- 1 file changed, 22 insertions(+), 26 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java index c1884fe1ab3..2dc3f3a28d2 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java @@ -59,34 +59,30 @@ public final class IngestProxyActionFilter implements ActionFilter { @Override public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { - Action ingestAction = null; - boolean isIngestRequest = false; - if (IndexAction.NAME.equals(action)) { - ingestAction = IndexAction.INSTANCE; - assert request instanceof IndexRequest; - IndexRequest indexRequest = (IndexRequest) request; - isIngestRequest = Strings.hasText(indexRequest.getPipeline()); - } else if (BulkAction.NAME.equals(action)) { - ingestAction = BulkAction.INSTANCE; - assert request instanceof BulkRequest; - BulkRequest bulkRequest = (BulkRequest) request; - for (ActionRequest actionRequest : bulkRequest.requests()) { - if (actionRequest instanceof IndexRequest) { - IndexRequest indexRequest = (IndexRequest) actionRequest; - if (Strings.hasText(indexRequest.getPipeline())) { - isIngestRequest = true; - break; - } + Action ingestAction; + switch (action) { + case IndexAction.NAME: + ingestAction = IndexAction.INSTANCE; + IndexRequest indexRequest = (IndexRequest) request; + if (Strings.hasText(indexRequest.getPipeline())) { + forwardIngestRequest(ingestAction, request, listener); + } else { + chain.proceed(task, action, request, listener); } - } + break; + case BulkAction.NAME: + ingestAction = BulkAction.INSTANCE; + BulkRequest bulkRequest = (BulkRequest) request; + if (bulkRequest.hasIndexRequestsWithPipelines()) { + forwardIngestRequest(ingestAction, request, listener); + } else { + chain.proceed(task, action, request, listener); + } + break; + default: + chain.proceed(task, action, request, listener); + break; } - - if (isIngestRequest) { - assert ingestAction != null; - forwardIngestRequest(ingestAction, request, listener); - return; - } - chain.proceed(task, action, request, listener); } private void forwardIngestRequest(Action action, ActionRequest request, ActionListener listener) { From f7024bc4dd4da3ec7b57c2ecd0564deb4fd5774f Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 13:55:26 +0100 Subject: [PATCH 242/347] maintain a list of ingest nodes in the cluster state instead of creating it on the spot --- .../ingest/IngestProxyActionFilter.java | 13 ++++------- .../cluster/node/DiscoveryNodes.java | 23 +++++++++++++++++-- 2 files changed, 25 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java index 2dc3f3a28d2..2512cea1162 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java @@ -42,6 +42,7 @@ import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; @@ -122,19 +123,13 @@ public final class IngestProxyActionFilter implements ActionFilter { private DiscoveryNode randomIngestNode() { assert NodeModule.isNodeIngestEnabled(clusterService.localNode().attributes()) == false; - List ingestNodes = new ArrayList<>(); - for (DiscoveryNode node : clusterService.state().nodes()) { - if (NodeModule.isNodeIngestEnabled(node.getAttributes())) { - ingestNodes.add(node); - } - } - - if (ingestNodes.isEmpty()) { + DiscoveryNode[] ingestNodes = clusterService.state().getNodes().getIngestNodes().values().toArray(DiscoveryNode.class); + if (ingestNodes.length == 0) { throw new IllegalStateException("There are no ingest nodes in this cluster, unable to forward request to an ingest node."); } int index = getNodeNumber(); - return ingestNodes.get((index) % ingestNodes.size()); + return ingestNodes[(index) % ingestNodes.length]; } private int getNodeNumber() { diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index d07d3c334ac..58f7244c5c1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.node.NodeModule; import java.io.IOException; import java.util.ArrayList; @@ -52,16 +53,20 @@ public class DiscoveryNodes extends AbstractDiffable implements private final ImmutableOpenMap nodes; private final ImmutableOpenMap dataNodes; private final ImmutableOpenMap masterNodes; + private final ImmutableOpenMap ingestNodes; private final String masterNodeId; private final String localNodeId; private final Version minNodeVersion; private final Version minNonClientNodeVersion; - private DiscoveryNodes(ImmutableOpenMap nodes, ImmutableOpenMap dataNodes, ImmutableOpenMap masterNodes, String masterNodeId, String localNodeId, Version minNodeVersion, Version minNonClientNodeVersion) { + private DiscoveryNodes(ImmutableOpenMap nodes, ImmutableOpenMap dataNodes, + ImmutableOpenMap masterNodes, ImmutableOpenMap ingestNodes, + String masterNodeId, String localNodeId, Version minNodeVersion, Version minNonClientNodeVersion) { this.nodes = nodes; this.dataNodes = dataNodes; this.masterNodes = masterNodes; + this.ingestNodes = ingestNodes; this.masterNodeId = masterNodeId; this.localNodeId = localNodeId; this.minNodeVersion = minNodeVersion; @@ -164,6 +169,13 @@ public class DiscoveryNodes extends AbstractDiffable implements return masterNodes(); } + /** + * @return All the ingest nodes arranged by their ids + */ + public ImmutableOpenMap getIngestNodes() { + return ingestNodes; + } + /** * Get a {@link Map} of the discovered master and data nodes arranged by their ids * @@ -654,6 +666,7 @@ public class DiscoveryNodes extends AbstractDiffable implements public DiscoveryNodes build() { ImmutableOpenMap.Builder dataNodesBuilder = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder masterNodesBuilder = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder ingestNodesBuilder = ImmutableOpenMap.builder(); Version minNodeVersion = Version.CURRENT; Version minNonClientNodeVersion = Version.CURRENT; for (ObjectObjectCursor nodeEntry : nodes) { @@ -665,10 +678,16 @@ public class DiscoveryNodes extends AbstractDiffable implements masterNodesBuilder.put(nodeEntry.key, nodeEntry.value); minNonClientNodeVersion = Version.smallest(minNonClientNodeVersion, nodeEntry.value.version()); } + if (NodeModule.isNodeIngestEnabled(nodeEntry.value.getAttributes())) { + ingestNodesBuilder.put(nodeEntry.key, nodeEntry.value); + } minNodeVersion = Version.smallest(minNodeVersion, nodeEntry.value.version()); } - return new DiscoveryNodes(nodes.build(), dataNodesBuilder.build(), masterNodesBuilder.build(), masterNodeId, localNodeId, minNodeVersion, minNonClientNodeVersion); + return new DiscoveryNodes( + nodes.build(), dataNodesBuilder.build(), masterNodesBuilder.build(), ingestNodesBuilder.build(), + masterNodeId, localNodeId, minNodeVersion, minNonClientNodeVersion + ); } public static DiscoveryNodes readFrom(StreamInput in, @Nullable DiscoveryNode localNode) throws IOException { From 8b520111d0480a2fb025090223e49bbb57018b9f Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 14:00:55 +0100 Subject: [PATCH 243/347] fix PutPipelineRequest comments --- .../action/ingest/PutPipelineRequest.java | 22 +++++++------------ .../ingest/PutPipelineRequestBuilder.java | 5 ++--- .../elasticsearch/ingest/PipelineStore.java | 8 +++---- .../action/ingest/RestPutPipelineAction.java | 4 ++-- .../elasticsearch/ingest/IngestClientIT.java | 8 +++---- .../ingest/PipelineStoreTests.java | 12 +++++----- 6 files changed, 26 insertions(+), 33 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java index 6b199b2aa90..e04abb06cef 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -36,30 +37,23 @@ public class PutPipelineRequest extends AcknowledgedRequest @Override public ActionRequestValidationException validate() { - ActionRequestValidationException validationException = null; - if (id == null) { - validationException = addValidationError("id is missing", validationException); - } - if (source == null) { - validationException = addValidationError("source is missing", validationException); - } - return validationException; + return null; } - public String id() { + public String getId() { return id; } - public void id(String id) { - this.id = id; + public void setId(String id) { + this.id = Objects.requireNonNull(id); } - public BytesReference source() { + public BytesReference getSource() { return source; } - public void source(BytesReference source) { - this.source = source; + public void setSource(BytesReference source) { + this.source = Objects.requireNonNull(source); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java index 4236d7081aa..377a240caba 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.bytes.BytesReference; @@ -31,12 +30,12 @@ public class PutPipelineRequestBuilder extends ActionRequestBuilder listener) throws IllegalArgumentException { try { // validates the pipeline and processor configuration before submitting a cluster update task: - Map pipelineConfig = XContentHelper.convertToMap(request.source(), false).v2(); - constructPipeline(request.id(), pipelineConfig); + Map pipelineConfig = XContentHelper.convertToMap(request.getSource(), false).v2(); + constructPipeline(request.getId(), pipelineConfig); } catch (Exception e) { throw new IllegalArgumentException("Invalid pipeline configuration", e); } - clusterService.submitStateUpdateTask("put-pipeline-" + request.id(), new AckedClusterStateUpdateTask(request, listener) { + clusterService.submitStateUpdateTask("put-pipeline-" + request.getId(), new AckedClusterStateUpdateTask(request, listener) { @Override protected WritePipelineResponse newResponse(boolean acknowledged) { @@ -185,7 +185,7 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust pipelines = new HashMap<>(); } - pipelines.put(request.id(), new PipelineConfiguration(request.id(), request.source())); + pipelines.put(request.getId(), new PipelineConfiguration(request.getId(), request.getSource())); ClusterState.Builder newState = ClusterState.builder(currentState); newState.metaData(MetaData.builder(currentState.getMetaData()) .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines)) diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java index 7c2d9a717dc..c876c45a876 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java @@ -40,9 +40,9 @@ public class RestPutPipelineAction extends BaseRestHandler { @Override protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { PutPipelineRequest request = new PutPipelineRequest(); - request.id(restRequest.param("id")); + request.setId(restRequest.param("id")); if (restRequest.hasContent()) { - request.source(restRequest.content()); + request.setSource(restRequest.content()); } request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); request.timeout(restRequest.paramAsTime("timeout", request.timeout())); diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index a613054feba..259392d3667 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -129,8 +129,8 @@ public class IngestClientIT extends ESIntegTestCase { createIndex("index"); PutPipelineRequest putPipelineRequest = new PutPipelineRequest(); - putPipelineRequest.id("_id"); - putPipelineRequest.source(jsonBuilder().startObject() + putPipelineRequest.setId("_id"); + putPipelineRequest.setSource(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -167,8 +167,8 @@ public class IngestClientIT extends ESIntegTestCase { public void test() throws Exception { PutPipelineRequest putPipelineRequest = new PutPipelineRequest(); - putPipelineRequest.id("_id"); - putPipelineRequest.source(jsonBuilder().startObject() + putPipelineRequest.setId("_id"); + putPipelineRequest.setSource(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index a0e07169f8f..b10fc3d6d4c 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -85,8 +85,8 @@ public class PipelineStoreTests extends ESTestCase { // add a new pipeline: PutPipelineRequest putRequest = new PutPipelineRequest(); - putRequest.id(id); - putRequest.source(new BytesArray("{\"processors\": []}")); + putRequest.setId(id); + putRequest.setSource(new BytesArray("{\"processors\": []}")); clusterState = store.innerPut(putRequest, clusterState); store.innerUpdatePipelines(clusterState); pipeline = store.get(id); @@ -97,8 +97,8 @@ public class PipelineStoreTests extends ESTestCase { // overwrite existing pipeline: putRequest = new PutPipelineRequest(); - putRequest.id(id); - putRequest.source(new BytesArray("{\"processors\": [], \"description\": \"_description\"}")); + putRequest.setId(id); + putRequest.setSource(new BytesArray("{\"processors\": [], \"description\": \"_description\"}")); clusterState = store.innerPut(putRequest, clusterState); store.innerUpdatePipelines(clusterState); pipeline = store.get(id); @@ -170,8 +170,8 @@ public class PipelineStoreTests extends ESTestCase { ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty PutPipelineRequest putRequest = new PutPipelineRequest(); - putRequest.id(id); - putRequest.source(new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}")); + putRequest.setId(id); + putRequest.setSource(new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}")); clusterState = store.innerPut(putRequest, clusterState); store.innerUpdatePipelines(clusterState); pipeline = store.get(id); From 2645fc9145107d576e6b63a5219649ed63974959 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 14:11:45 +0100 Subject: [PATCH 244/347] renamed SimulateDocumentSimpleResult to SimulateDocumentBaseResult and added jdocs --- ...t.java => SimulateDocumentBaseResult.java} | 21 ++++++++------- .../ingest/SimulateDocumentVerboseResult.java | 7 +++-- .../ingest/SimulateExecutionService.java | 6 ++--- .../ingest/SimulatePipelineResponse.java | 2 +- .../SimulateDocumentSimpleResultTests.java | 16 ++++++------ .../ingest/SimulateExecutionServiceTests.java | 18 ++++++------- .../ingest/SimulatePipelineResponseTests.java | 26 +++++++++---------- .../elasticsearch/ingest/IngestClientIT.java | 10 +++---- 8 files changed, 56 insertions(+), 50 deletions(-) rename core/src/main/java/org/elasticsearch/action/ingest/{SimulateDocumentSimpleResult.java => SimulateDocumentBaseResult.java} (72%) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResult.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java similarity index 72% rename from core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResult.java rename to core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java index 74173acff1f..036703e98f1 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResult.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java @@ -27,22 +27,25 @@ import org.elasticsearch.ingest.core.IngestDocument; import java.io.IOException; import java.util.Collections; -public class SimulateDocumentSimpleResult implements SimulateDocumentResult { +/** + * Holds the end result of what a pipeline did to sample document provided via the simulate api. + */ +public final class SimulateDocumentBaseResult implements SimulateDocumentResult { - private static final SimulateDocumentSimpleResult PROTOTYPE = new SimulateDocumentSimpleResult(new WriteableIngestDocument(new IngestDocument(Collections.emptyMap(), Collections.emptyMap()))); + private static final SimulateDocumentBaseResult PROTOTYPE = new SimulateDocumentBaseResult(new WriteableIngestDocument(new IngestDocument(Collections.emptyMap(), Collections.emptyMap()))); private WriteableIngestDocument ingestDocument; private Exception failure; - public SimulateDocumentSimpleResult(IngestDocument ingestDocument) { + public SimulateDocumentBaseResult(IngestDocument ingestDocument) { this.ingestDocument = new WriteableIngestDocument(ingestDocument); } - private SimulateDocumentSimpleResult(WriteableIngestDocument ingestDocument) { + private SimulateDocumentBaseResult(WriteableIngestDocument ingestDocument) { this.ingestDocument = ingestDocument; } - public SimulateDocumentSimpleResult(Exception failure) { + public SimulateDocumentBaseResult(Exception failure) { this.failure = failure; } @@ -57,17 +60,17 @@ public class SimulateDocumentSimpleResult implements SimulateDocumentResult { +/** + * Holds the result of what a pipeline did to a sample document via the simulate api, but instead of {@link SimulateDocumentBaseResult} + * this result class holds the intermediate result each processor did to the sample document. + */ +public final class SimulateDocumentVerboseResult implements SimulateDocumentResult { private static final SimulateDocumentVerboseResult PROTOTYPE = new SimulateDocumentVerboseResult(Collections.emptyList()); diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java index 85c76cf18ae..30efbe1b0fa 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java @@ -71,15 +71,15 @@ class SimulateExecutionService { try { executeVerboseDocument(pipelineProcessor, currentIngestDocument, processorResultList); } catch (Exception e) { - return new SimulateDocumentSimpleResult(e); + return new SimulateDocumentBaseResult(e); } return new SimulateDocumentVerboseResult(processorResultList); } else { try { pipeline.execute(ingestDocument); - return new SimulateDocumentSimpleResult(ingestDocument); + return new SimulateDocumentBaseResult(ingestDocument); } catch (Exception e) { - return new SimulateDocumentSimpleResult(e); + return new SimulateDocumentBaseResult(e); } } } diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java index 7a9ab0b5f8b..c7c0822f04a 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java @@ -81,7 +81,7 @@ public class SimulatePipelineResponse extends ActionResponse implements ToXConte if (verbose) { simulateDocumentResult = SimulateDocumentVerboseResult.readSimulateDocumentVerboseResultFrom(in); } else { - simulateDocumentResult = SimulateDocumentSimpleResult.readSimulateDocumentSimpleResult(in); + simulateDocumentResult = SimulateDocumentBaseResult.readSimulateDocumentSimpleResult(in); } results.add(simulateDocumentResult); } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java index bc66c64ccab..882fca72156 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java @@ -34,23 +34,23 @@ public class SimulateDocumentSimpleResultTests extends ESTestCase { public void testSerialization() throws IOException { boolean isFailure = randomBoolean(); - SimulateDocumentSimpleResult simulateDocumentSimpleResult; + SimulateDocumentBaseResult simulateDocumentBaseResult; if (isFailure) { - simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(new IllegalArgumentException("test")); + simulateDocumentBaseResult = new SimulateDocumentBaseResult(new IllegalArgumentException("test")); } else { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(ingestDocument); + simulateDocumentBaseResult = new SimulateDocumentBaseResult(ingestDocument); } BytesStreamOutput out = new BytesStreamOutput(); - simulateDocumentSimpleResult.writeTo(out); + simulateDocumentBaseResult.writeTo(out); StreamInput streamInput = StreamInput.wrap(out.bytes()); - SimulateDocumentSimpleResult otherSimulateDocumentSimpleResult = SimulateDocumentSimpleResult.readSimulateDocumentSimpleResult(streamInput); + SimulateDocumentBaseResult otherSimulateDocumentBaseResult = SimulateDocumentBaseResult.readSimulateDocumentSimpleResult(streamInput); - assertThat(otherSimulateDocumentSimpleResult.getIngestDocument(), equalTo(simulateDocumentSimpleResult.getIngestDocument())); + assertThat(otherSimulateDocumentBaseResult.getIngestDocument(), equalTo(simulateDocumentBaseResult.getIngestDocument())); if (isFailure) { - assertThat(otherSimulateDocumentSimpleResult.getFailure(), instanceOf(IllegalArgumentException.class)); - IllegalArgumentException e = (IllegalArgumentException) otherSimulateDocumentSimpleResult.getFailure(); + assertThat(otherSimulateDocumentBaseResult.getFailure(), instanceOf(IllegalArgumentException.class)); + IllegalArgumentException e = (IllegalArgumentException) otherSimulateDocumentBaseResult.getFailure(); assertThat(e.getMessage(), equalTo("test")); } } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java index 11cc0a0f6d0..d58b9bf850d 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java @@ -164,10 +164,10 @@ public class SimulateExecutionServiceTests extends ESTestCase { Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor)); SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, false); assertThat(processor.getInvokedCounter(), equalTo(2)); - assertThat(actualItemResponse, instanceOf(SimulateDocumentSimpleResult.class)); - SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) actualItemResponse; - assertThat(simulateDocumentSimpleResult.getIngestDocument(), equalTo(ingestDocument)); - assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); + assertThat(actualItemResponse, instanceOf(SimulateDocumentBaseResult.class)); + SimulateDocumentBaseResult simulateDocumentBaseResult = (SimulateDocumentBaseResult) actualItemResponse; + assertThat(simulateDocumentBaseResult.getIngestDocument(), equalTo(ingestDocument)); + assertThat(simulateDocumentBaseResult.getFailure(), nullValue()); } public void testExecuteVerboseItemWithFailure() throws Exception { @@ -196,11 +196,11 @@ public class SimulateExecutionServiceTests extends ESTestCase { Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor)); SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, false); assertThat(processor.getInvokedCounter(), equalTo(1)); - assertThat(actualItemResponse, instanceOf(SimulateDocumentSimpleResult.class)); - SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) actualItemResponse; - assertThat(simulateDocumentSimpleResult.getIngestDocument(), nullValue()); - assertThat(simulateDocumentSimpleResult.getFailure(), instanceOf(RuntimeException.class)); - RuntimeException runtimeException = (RuntimeException) simulateDocumentSimpleResult.getFailure(); + assertThat(actualItemResponse, instanceOf(SimulateDocumentBaseResult.class)); + SimulateDocumentBaseResult simulateDocumentBaseResult = (SimulateDocumentBaseResult) actualItemResponse; + assertThat(simulateDocumentBaseResult.getIngestDocument(), nullValue()); + assertThat(simulateDocumentBaseResult.getFailure(), instanceOf(RuntimeException.class)); + RuntimeException runtimeException = (RuntimeException) simulateDocumentBaseResult.getFailure(); assertThat(runtimeException.getMessage(), equalTo("processor failed")); } } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java index 9af049da01e..12a62f0684a 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java @@ -58,14 +58,14 @@ public class SimulatePipelineResponseTests extends ESTestCase { } results.add(new SimulateDocumentVerboseResult(processorResults)); } else { - results.add(new SimulateDocumentSimpleResult(ingestDocument)); - SimulateDocumentSimpleResult simulateDocumentSimpleResult; + results.add(new SimulateDocumentBaseResult(ingestDocument)); + SimulateDocumentBaseResult simulateDocumentBaseResult; if (isFailure) { - simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(new IllegalArgumentException("test")); + simulateDocumentBaseResult = new SimulateDocumentBaseResult(new IllegalArgumentException("test")); } else { - simulateDocumentSimpleResult = new SimulateDocumentSimpleResult(ingestDocument); + simulateDocumentBaseResult = new SimulateDocumentBaseResult(ingestDocument); } - results.add(simulateDocumentSimpleResult); + results.add(simulateDocumentBaseResult); } } @@ -100,15 +100,15 @@ public class SimulatePipelineResponseTests extends ESTestCase { } } } else { - SimulateDocumentSimpleResult expectedSimulateDocumentSimpleResult = (SimulateDocumentSimpleResult) expectedResultIterator.next(); - assertThat(result, instanceOf(SimulateDocumentSimpleResult.class)); - SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) result; - assertThat(simulateDocumentSimpleResult.getIngestDocument(), equalTo(expectedSimulateDocumentSimpleResult.getIngestDocument())); - if (expectedSimulateDocumentSimpleResult.getFailure() == null) { - assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); + SimulateDocumentBaseResult expectedSimulateDocumentBaseResult = (SimulateDocumentBaseResult) expectedResultIterator.next(); + assertThat(result, instanceOf(SimulateDocumentBaseResult.class)); + SimulateDocumentBaseResult simulateDocumentBaseResult = (SimulateDocumentBaseResult) result; + assertThat(simulateDocumentBaseResult.getIngestDocument(), equalTo(expectedSimulateDocumentBaseResult.getIngestDocument())); + if (expectedSimulateDocumentBaseResult.getFailure() == null) { + assertThat(simulateDocumentBaseResult.getFailure(), nullValue()); } else { - assertThat(simulateDocumentSimpleResult.getFailure(), instanceOf(IllegalArgumentException.class)); - IllegalArgumentException e = (IllegalArgumentException) simulateDocumentSimpleResult.getFailure(); + assertThat(simulateDocumentBaseResult.getFailure(), instanceOf(IllegalArgumentException.class)); + IllegalArgumentException e = (IllegalArgumentException) simulateDocumentBaseResult.getFailure(); assertThat(e.getMessage(), equalTo("test")); } } diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 259392d3667..0bc79dd3aec 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -28,7 +28,7 @@ import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.action.ingest.SimulateDocumentSimpleResult; +import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.ingest.WritePipelineResponse; @@ -114,15 +114,15 @@ public class IngestClientIT extends ESIntegTestCase { assertThat(response.isVerbose(), equalTo(false)); assertThat(response.getPipelineId(), equalTo("_id")); assertThat(response.getResults().size(), equalTo(1)); - assertThat(response.getResults().get(0), instanceOf(SimulateDocumentSimpleResult.class)); - SimulateDocumentSimpleResult simulateDocumentSimpleResult = (SimulateDocumentSimpleResult) response.getResults().get(0); + assertThat(response.getResults().get(0), instanceOf(SimulateDocumentBaseResult.class)); + SimulateDocumentBaseResult simulateDocumentBaseResult = (SimulateDocumentBaseResult) response.getResults().get(0); Map source = new HashMap<>(); source.put("foo", "bar"); source.put("fail", false); source.put("processed", true); IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, source); - assertThat(simulateDocumentSimpleResult.getIngestDocument().getSourceAndMetadata(), equalTo(ingestDocument.getSourceAndMetadata())); - assertThat(simulateDocumentSimpleResult.getFailure(), nullValue()); + assertThat(simulateDocumentBaseResult.getIngestDocument().getSourceAndMetadata(), equalTo(ingestDocument.getSourceAndMetadata())); + assertThat(simulateDocumentBaseResult.getFailure(), nullValue()); } public void testBulkWithIngestFailures() throws Exception { From 5eaaa95c6127524a010fb74aa6c8750d2eff54bb Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 14:16:41 +0100 Subject: [PATCH 245/347] address SimulatePipelineRequest comments --- .../elasticsearch/action/ingest/SimulatePipelineRequest.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java index 48961f0e41f..8d1565cb2c5 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java @@ -34,6 +34,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.ingest.core.IngestDocument.MetaData; @@ -58,7 +59,7 @@ public class SimulatePipelineRequest extends ActionRequest Date: Wed, 20 Jan 2016 15:03:58 +0100 Subject: [PATCH 246/347] remove parsing logic with ObjectParser --- .../elasticsearch/ingest/IngestMetadata.java | 41 ++++++------ .../ingest/PipelineConfiguration.java | 65 +++++++++++-------- 2 files changed, 56 insertions(+), 50 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java b/core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java index 4ee41c58ad7..a9c48ac6569 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java @@ -21,8 +21,11 @@ package org.elasticsearch.ingest; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -39,6 +42,7 @@ public final class IngestMetadata extends AbstractDiffable impl public final static String TYPE = "ingest"; public final static IngestMetadata PROTO = new IngestMetadata(); + private final ParseField PIPELINES_FIELD = new ParseField("pipeline"); // We can't use Pipeline class directly in cluster state, because we don't have the processor factories around when // IngestMetadata is registered as custom metadata. @@ -82,33 +86,26 @@ public final class IngestMetadata extends AbstractDiffable impl @Override public MetaData.Custom fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token; - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - switch (token) { - case FIELD_NAME: - currentFieldName = parser.currentName(); - break; - case START_ARRAY: - if ("pipelines".equals(currentFieldName)) { - Map pipelines = new HashMap<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.START_OBJECT) { - PipelineConfiguration pipeline = new PipelineConfiguration(parser); - pipelines.put(pipeline.getId(), pipeline); - } - } - return new IngestMetadata(pipelines); - } - break; + ObjectParser ingestMetaDataParser = new ObjectParser<>("ingest_metadata", null); + + Map pipelines = new HashMap<>(); + ingestMetaDataParser.declareField((parser1, aVoid, aVoid2) -> { + XContentParser.Token token; + while ((token = parser1.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token == XContentParser.Token.START_OBJECT) { + PipelineConfiguration pipeline = new PipelineConfiguration.Builder(parser1).build(); + pipelines.put(pipeline.getId(), pipeline); + } } - } - return PROTO; + }, PIPELINES_FIELD, ObjectParser.ValueType.OBJECT); + ingestMetaDataParser.parse(parser); + + return new IngestMetadata(pipelines); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startArray("pipelines"); + builder.startArray(PIPELINES_FIELD.getPreferredName()); for (PipelineConfiguration pipeline : pipelines.values()) { pipeline.toXContent(builder, params); } diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java b/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java index da0ff4c3e2a..628cf2446cb 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java @@ -19,10 +19,14 @@ package org.elasticsearch.ingest; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Build; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; @@ -43,40 +47,45 @@ public final class PipelineConfiguration implements Writeable PARSER = new ObjectParser<>("pipeline_config", null); + + static { + PARSER.declareString(Builder::setId, new ParseField("id")); + PARSER.declareField((parser, builder, aVoid) -> { + XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent()); + XContentHelper.copyCurrentEvent(contentBuilder.generator(), parser); + builder.setConfig(contentBuilder.bytes()); + }, new ParseField("config"), ObjectParser.ValueType.OBJECT); + } + + private String id; + private BytesReference config; + + public Builder(XContentParser parser) throws IOException { + PARSER.parse(parser, this); + } + + public void setId(String id) { + this.id = id; + } + + public void setConfig(BytesReference config) { + this.config = config; + } + + public PipelineConfiguration build() { + return new PipelineConfiguration(id, config); + } + } + private final String id; // Store config as bytes reference, because the config is only used when the pipeline store reads the cluster state // and the way the map of maps config is read requires a deep copy (it removes instead of gets entries to check for unused options) // also the get pipeline api just directly returns this to the caller private final BytesReference config; - PipelineConfiguration(XContentParser parser) throws IOException { - String id = null; - BytesReference config = null; - - XContentParser.Token token; - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - switch (token) { - case FIELD_NAME: - currentFieldName = parser.currentName(); - break; - case VALUE_STRING: - if ("id".equals(currentFieldName)) { - id = parser.text(); - } - break; - case START_OBJECT: - XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent()); - XContentHelper.copyCurrentEvent(builder.generator(), parser); - config = builder.bytes(); - break; - } - } - - this.id = Objects.requireNonNull(id); - this.config = Objects.requireNonNull(config); - } - public PipelineConfiguration(String id, BytesReference config) { this.id = id; this.config = config; From 9fe408adbd24b7707a6fd1ba205f7245c14295d9 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 15:13:27 +0100 Subject: [PATCH 247/347] use ActionListenerResponseHandler --- .../ingest/IngestProxyActionFilter.java | 25 +++++-------------- 1 file changed, 6 insertions(+), 19 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java index 2512cea1162..be40395a009 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.bulk.BulkAction; @@ -59,7 +60,7 @@ public final class IngestProxyActionFilter implements ActionFilter { } @Override - public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { + public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { Action ingestAction; switch (action) { case IndexAction.NAME: @@ -86,33 +87,19 @@ public final class IngestProxyActionFilter implements ActionFilter { } } - private void forwardIngestRequest(Action action, ActionRequest request, ActionListener listener) { - transportService.sendRequest(randomIngestNode(), action.name(), request, new TransportResponseHandler() { + @SuppressWarnings("unchecked") + private void forwardIngestRequest(Action action, ActionRequest request, ActionListener listener) { + transportService.sendRequest(randomIngestNode(), action.name(), request, new ActionListenerResponseHandler(listener) { @Override public TransportResponse newInstance() { return action.newResponse(); } - @Override - @SuppressWarnings("unchecked") - public void handleResponse(TransportResponse response) { - listener.onResponse(response); - } - - @Override - public void handleException(TransportException exp) { - listener.onFailure(exp); - } - - @Override - public String executor() { - return ThreadPool.Names.SAME; - } }); } @Override - public void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { + public void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { chain.proceed(action, response, listener); } From 7aeb3609325e18f5cccab7f1e8c2de04363d8b8f Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 15:26:37 +0100 Subject: [PATCH 248/347] use AbstractRunnable --- .../action/ingest/IngestActionFilter.java | 23 +++++---- .../ingest/PipelineExecutionService.java | 50 ++++++++++++------- .../ingest/PipelineExecutionServiceTests.java | 12 ++--- 3 files changed, 53 insertions(+), 32 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java index 5747ddd1402..2a10570cff7 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java @@ -105,16 +105,21 @@ public final class IngestActionFilter extends AbstractComponent implements Actio executionService.execute(() -> bulkRequestModifier, (indexRequest, throwable) -> { logger.debug("failed to execute pipeline [{}] for document [{}/{}/{}]", indexRequest.getPipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id(), throwable); bulkRequestModifier.markCurrentItemAsFailed(throwable); - }, (success) -> { - BulkRequest bulkRequest = bulkRequestModifier.getBulkRequest(); - ActionListener actionListener = bulkRequestModifier.wrapActionListenerIfNeeded(listener); - if (bulkRequest.requests().isEmpty()) { - // at this stage, the transport bulk action can't deal with a bulk request with no requests, - // so we stop and send an empty response back to the client. - // (this will happen if pre-processing all items in the bulk failed) - actionListener.onResponse(new BulkResponse(new BulkItemResponse[0], 0)); + }, (throwable) -> { + if (throwable != null) { + logger.error("failed to execute pipeline for a bulk request", throwable); + listener.onFailure(throwable); } else { - chain.proceed(task, action, bulkRequest, actionListener); + BulkRequest bulkRequest = bulkRequestModifier.getBulkRequest(); + ActionListener actionListener = bulkRequestModifier.wrapActionListenerIfNeeded(listener); + if (bulkRequest.requests().isEmpty()) { + // at this stage, the transport bulk action can't deal with a bulk request with no requests, + // so we stop and send an empty response back to the client. + // (this will happen if pre-processing all items in the bulk failed) + actionListener.onResponse(new BulkResponse(new BulkItemResponse[0], 0)); + } else { + chain.proceed(task, action, bulkRequest, actionListener); + } } }); } diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index f6936487a23..c6a3b4b843d 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -22,6 +22,7 @@ package org.elasticsearch.ingest; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.threadpool.ThreadPool; @@ -42,34 +43,49 @@ public class PipelineExecutionService { public void execute(IndexRequest request, Consumer failureHandler, Consumer completionHandler) { Pipeline pipeline = getPipeline(request.getPipeline()); - threadPool.executor(ThreadPool.Names.INDEX).execute(() -> { - try { + threadPool.executor(ThreadPool.Names.INDEX).execute(new AbstractRunnable() { + + @Override + public void onFailure(Throwable t) { + failureHandler.accept(t); + } + + @Override + protected void doRun() throws Exception { innerExecute(request, pipeline); completionHandler.accept(true); - } catch (Exception e) { - failureHandler.accept(e); } }); } public void execute(Iterable> actionRequests, - BiConsumer itemFailureHandler, Consumer completionHandler) { - threadPool.executor(ThreadPool.Names.INDEX).execute(() -> { - for (ActionRequest actionRequest : actionRequests) { - if ((actionRequest instanceof IndexRequest)) { - IndexRequest indexRequest = (IndexRequest) actionRequest; - if (Strings.hasText(indexRequest.getPipeline())) { - try { - innerExecute(indexRequest, getPipeline(indexRequest.getPipeline())); - //this shouldn't be needed here but we do it for consistency with index api which requires it to prevent double execution - indexRequest.setPipeline(null); - } catch (Throwable e) { - itemFailureHandler.accept(indexRequest, e); + BiConsumer itemFailureHandler, + Consumer completionHandler) { + threadPool.executor(ThreadPool.Names.INDEX).execute(new AbstractRunnable() { + + @Override + public void onFailure(Throwable t) { + completionHandler.accept(t); + } + + @Override + protected void doRun() throws Exception { + for (ActionRequest actionRequest : actionRequests) { + if ((actionRequest instanceof IndexRequest)) { + IndexRequest indexRequest = (IndexRequest) actionRequest; + if (Strings.hasText(indexRequest.getPipeline())) { + try { + innerExecute(indexRequest, getPipeline(indexRequest.getPipeline())); + //this shouldn't be needed here but we do it for consistency with index api which requires it to prevent double execution + indexRequest.setPipeline(null); + } catch (Throwable e) { + itemFailureHandler.accept(indexRequest, e); + } } } } + completionHandler.accept(null); } - completionHandler.accept(true); }); } diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java index 9bb098bed04..9126a513e6e 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java @@ -98,7 +98,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { @SuppressWarnings("unchecked") BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); executionService.execute(bulkRequest.requests(), failureHandler, completionHandler); verify(failureHandler, times(1)).accept( argThat(new CustomTypeSafeMatcher("failure handler was not called with the expected arguments") { @@ -115,7 +115,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { } }) ); - verify(completionHandler, times(1)).accept(anyBoolean()); + verify(completionHandler, times(1)).accept(null); } public void testExecuteSuccess() throws Exception { @@ -311,11 +311,11 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, processor)); BiConsumer requestItemErrorHandler = mock(BiConsumer.class); - Consumer completionHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); executionService.execute(bulkRequest.requests(), requestItemErrorHandler, completionHandler); verify(requestItemErrorHandler, times(numIndexRequests)).accept(any(IndexRequest.class), eq(error)); - verify(completionHandler, times(1)).accept(true); + verify(completionHandler, times(1)).accept(null); } public void testBulkRequestExecution() throws Exception { @@ -334,11 +334,11 @@ public class PipelineExecutionServiceTests extends ESTestCase { @SuppressWarnings("unchecked") BiConsumer requestItemErrorHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); executionService.execute(bulkRequest.requests(), requestItemErrorHandler, completionHandler); verify(requestItemErrorHandler, never()).accept(any(), any()); - verify(completionHandler, times(1)).accept(true); + verify(completionHandler, times(1)).accept(null); } private IngestDocument eqID(String index, String type, String id, Map source) { From 977bbaba023d51bdad379ca68acf1db95dec190f Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 15:44:32 +0100 Subject: [PATCH 249/347] fixed various small comments --- .../org/elasticsearch/ingest/core/CompoundProcessor.java | 2 +- .../org/elasticsearch/ingest/processor/DateProcessor.java | 7 ++++--- .../org/elasticsearch/ingest/processor/DeDotProcessor.java | 4 +++- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java index bad94de6c2b..bc5fd19aac7 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java @@ -77,7 +77,7 @@ public class CompoundProcessor implements Processor { } else { executeOnFailure(ingestDocument, e, processor.getType()); } - return; + break; } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java index 45f05aab7e0..61a4d142043 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; @@ -78,9 +79,9 @@ public final class DateProcessor implements Processor { for (Function dateParser : dateParsers) { try { dateTime = dateParser.apply(value); - } catch(Exception e) { - //try the next parser and keep track of the last exception - lastException = e; + } catch (Exception e) { + //try the next parser and keep track of the exceptions + lastException = ExceptionsHelper.useOrSuppress(lastException, e); } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java index 12cc3654967..6e0610d90d6 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java @@ -91,7 +91,9 @@ public class DeDotProcessor implements Processor { } else if (obj instanceof List) { @SuppressWarnings("unchecked") List list = (List) obj; - list.forEach(this::deDot); + for (Object value : list) { + deDot(value); + } } } From fd31d6afdf8cc0afc35828566c19134e5c3f415d Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 15:46:48 +0100 Subject: [PATCH 250/347] Let NodeService implement closable and let NodeService delegate to IngestService --- core/src/main/java/org/elasticsearch/node/Node.java | 6 +++--- .../java/org/elasticsearch/node/service/NodeService.java | 8 +++++++- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 4cc338fb837..7ca0f5e76a7 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -355,11 +355,11 @@ public class Node implements Releasable { StopWatch stopWatch = new StopWatch("node_close"); stopWatch.start("tribe"); injector.getInstance(TribeService.class).close(); - stopWatch.stop().start("ingest_service"); + stopWatch.stop().start("node_service"); try { - injector.getInstance(NodeService.class).getIngestService().close(); + injector.getInstance(NodeService.class).close(); } catch (IOException e) { - logger.warn("IngestService close failed", e); + logger.warn("NodeService close failed", e); } stopWatch.stop().start("http"); if (settings.getAsBoolean("http.enabled", true)) { diff --git a/core/src/main/java/org/elasticsearch/node/service/NodeService.java b/core/src/main/java/org/elasticsearch/node/service/NodeService.java index 15352eeadd2..32035dad606 100644 --- a/core/src/main/java/org/elasticsearch/node/service/NodeService.java +++ b/core/src/main/java/org/elasticsearch/node/service/NodeService.java @@ -42,6 +42,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.io.Closeable; import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -51,7 +52,7 @@ import static java.util.Collections.unmodifiableMap; /** */ -public class NodeService extends AbstractComponent { +public class NodeService extends AbstractComponent implements Closeable { private final ThreadPool threadPool; private final MonitorService monitorService; @@ -187,4 +188,9 @@ public class NodeService extends AbstractComponent { public IngestService getIngestService() { return ingestService; } + + @Override + public void close() throws IOException { + indicesService.close(); + } } From f546b27ea094a2a0edae8f345a90395853304c84 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 15:55:24 +0100 Subject: [PATCH 251/347] removed the ClusterService constructor dependency from PipelineStore --- .../ingest/DeletePipelineTransportAction.java | 4 +++- .../action/ingest/GetPipelineTransportAction.java | 2 +- .../action/ingest/PutPipelineTransportAction.java | 4 +++- .../org/elasticsearch/ingest/IngestService.java | 4 ++-- .../org/elasticsearch/ingest/PipelineStore.java | 13 +++++-------- .../org/elasticsearch/node/service/NodeService.java | 3 ++- .../elasticsearch/ingest/PipelineStoreTests.java | 3 +-- 7 files changed, 17 insertions(+), 16 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java index 4f270572df4..6378eb5757b 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java @@ -37,12 +37,14 @@ import org.elasticsearch.transport.TransportService; public class DeletePipelineTransportAction extends TransportMasterNodeAction { private final PipelineStore pipelineStore; + private final ClusterService clusterService; @Inject public DeletePipelineTransportAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, NodeService nodeService) { super(settings, DeletePipelineAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, DeletePipelineRequest::new); + this.clusterService = clusterService; this.pipelineStore = nodeService.getIngestService().getPipelineStore(); } @@ -58,7 +60,7 @@ public class DeletePipelineTransportAction extends TransportMasterNodeAction listener) throws Exception { - pipelineStore.delete(request, listener); + pipelineStore.delete(clusterService, request, listener); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java index 4087a383baa..e762d0b8d33 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java @@ -58,7 +58,7 @@ public class GetPipelineTransportAction extends TransportMasterNodeReadAction listener) throws Exception { - listener.onResponse(new GetPipelineResponse(pipelineStore.getPipelines(request.getIds()))); + listener.onResponse(new GetPipelineResponse(pipelineStore.getPipelines(state, request.getIds()))); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index 123a5c59038..31a911207ab 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -37,12 +37,14 @@ import org.elasticsearch.transport.TransportService; public class PutPipelineTransportAction extends TransportMasterNodeAction { private final PipelineStore pipelineStore; + private final ClusterService clusterService; @Inject public PutPipelineTransportAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, NodeService nodeService) { super(settings, PutPipelineAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, PutPipelineRequest::new); + this.clusterService = clusterService; this.pipelineStore = nodeService.getIngestService().getPipelineStore(); } @@ -58,7 +60,7 @@ public class PutPipelineTransportAction extends TransportMasterNodeAction listener) throws Exception { - pipelineStore.put(request, listener); + pipelineStore.put(clusterService, request, listener); } @Override diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestService.java b/core/src/main/java/org/elasticsearch/ingest/IngestService.java index 40bc803f08a..bc7cd75070c 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -37,9 +37,9 @@ public class IngestService implements Closeable { private final PipelineExecutionService pipelineExecutionService; private final ProcessorsRegistry processorsRegistry; - public IngestService(Settings settings, ThreadPool threadPool, ClusterService clusterService, ProcessorsRegistry processorsRegistry) { + public IngestService(Settings settings, ThreadPool threadPool, ProcessorsRegistry processorsRegistry) { this.processorsRegistry = processorsRegistry; - this.pipelineStore = new PipelineStore(settings, clusterService); + this.pipelineStore = new PipelineStore(settings); this.pipelineExecutionService = new PipelineExecutionService(pipelineStore, threadPool); } diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java index a6787a41289..ea0164a79c9 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -51,7 +51,6 @@ import java.util.function.Function; public class PipelineStore extends AbstractComponent implements Closeable, ClusterStateListener { - private final ClusterService clusterService; private final Pipeline.Factory factory = new Pipeline.Factory(); private Map processorFactoryRegistry; @@ -61,10 +60,8 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust // are loaded, so in the cluster state we just save the pipeline config and here we keep the actual pipelines around. volatile Map pipelines = new HashMap<>(); - public PipelineStore(Settings settings, ClusterService clusterService) { + public PipelineStore(Settings settings) { super(settings); - this.clusterService = clusterService; - clusterService.add(this); } public void buildProcessorFactoryRegistry(ProcessorsRegistry processorsRegistry, ScriptService scriptService) { @@ -115,7 +112,7 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust /** * Deletes the pipeline specified by id in the request. */ - public void delete(DeletePipelineRequest request, ActionListener listener) { + public void delete(ClusterService clusterService, DeletePipelineRequest request, ActionListener listener) { clusterService.submitStateUpdateTask("delete-pipeline-" + request.id(), new AckedClusterStateUpdateTask(request, listener) { @Override @@ -154,7 +151,7 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust * * @throws IllegalArgumentException If the pipeline holds incorrect configuration */ - public void put(PutPipelineRequest request, ActionListener listener) throws IllegalArgumentException { + public void put(ClusterService clusterService, PutPipelineRequest request, ActionListener listener) throws IllegalArgumentException { try { // validates the pipeline and processor configuration before submitting a cluster update task: Map pipelineConfig = XContentHelper.convertToMap(request.getSource(), false).v2(); @@ -210,8 +207,8 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust */ // Returning PipelineConfiguration instead of Pipeline, because Pipeline and Processor interface don't // know how to serialize themselves. - public List getPipelines(String... ids) { - IngestMetadata ingestMetadata = clusterService.state().getMetaData().custom(IngestMetadata.TYPE); + public List getPipelines(ClusterState clusterState, String... ids) { + IngestMetadata ingestMetadata = clusterState.getMetaData().custom(IngestMetadata.TYPE); return innerGetPipelines(ingestMetadata, ids); } diff --git a/core/src/main/java/org/elasticsearch/node/service/NodeService.java b/core/src/main/java/org/elasticsearch/node/service/NodeService.java index 32035dad606..7c385b5b39a 100644 --- a/core/src/main/java/org/elasticsearch/node/service/NodeService.java +++ b/core/src/main/java/org/elasticsearch/node/service/NodeService.java @@ -87,7 +87,8 @@ public class NodeService extends AbstractComponent implements Closeable { this.version = version; this.pluginService = pluginService; this.circuitBreakerService = circuitBreakerService; - this.ingestService = new IngestService(settings, threadPool, clusterService, processorsRegistry); + this.ingestService = new IngestService(settings, threadPool, processorsRegistry); + clusterService.add(ingestService.getPipelineStore()); } // can not use constructor injection or there will be a circular dependency diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index b10fc3d6d4c..cd3e78c822e 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -50,8 +50,7 @@ public class PipelineStoreTests extends ESTestCase { @Before public void init() throws Exception { - ClusterService clusterService = mock(ClusterService.class); - store = new PipelineStore(Settings.EMPTY, clusterService); + store = new PipelineStore(Settings.EMPTY); ProcessorsRegistry registry = new ProcessorsRegistry(); registry.registerProcessor("set", (templateService) -> new SetProcessor.Factory(TestTemplateService.instance())); store.buildProcessorFactoryRegistry(registry, null); From b0c7096e2fa5fb1674cc683bcac5ea2127137245 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 16:00:44 +0100 Subject: [PATCH 252/347] changed delete pipeline request to getters and setters in right format and validate the provided id in the setId(...) method --- .../action/ingest/DeletePipelineRequest.java | 7 ++++--- .../action/ingest/DeletePipelineRequestBuilder.java | 2 +- .../main/java/org/elasticsearch/ingest/PipelineStore.java | 8 ++++---- .../rest/action/ingest/RestDeletePipelineAction.java | 2 +- .../java/org/elasticsearch/ingest/IngestClientIT.java | 2 +- .../java/org/elasticsearch/ingest/PipelineStoreTests.java | 5 ++--- 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java index f0188c663d6..0931e30fdd0 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -32,11 +33,11 @@ public class DeletePipelineRequest extends AcknowledgedRequest listener) { - clusterService.submitStateUpdateTask("delete-pipeline-" + request.id(), new AckedClusterStateUpdateTask(request, listener) { + clusterService.submitStateUpdateTask("delete-pipeline-" + request.getId(), new AckedClusterStateUpdateTask(request, listener) { @Override protected WritePipelineResponse newResponse(boolean acknowledged) { @@ -133,11 +133,11 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust return currentState; } Map pipelines = currentIngestMetadata.getPipelines(); - if (pipelines.containsKey(request.id()) == false) { - throw new ResourceNotFoundException("pipeline [{}] is missing", request.id()); + if (pipelines.containsKey(request.getId()) == false) { + throw new ResourceNotFoundException("pipeline [{}] is missing", request.getId()); } else { pipelines = new HashMap<>(pipelines); - pipelines.remove(request.id()); + pipelines.remove(request.getId()); ClusterState.Builder newState = ClusterState.builder(currentState); newState.metaData(MetaData.builder(currentState.getMetaData()) .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines)) diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java index 723e3eb6840..d880fa23b2e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java @@ -40,7 +40,7 @@ public class RestDeletePipelineAction extends BaseRestHandler { @Override protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { DeletePipelineRequest request = new DeletePipelineRequest(); - request.id(restRequest.param("id")); + request.setId(restRequest.param("id")); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); request.timeout(restRequest.paramAsTime("timeout", request.timeout())); client.deletePipeline(request, new AcknowledgedRestListener<>(channel)); diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 0bc79dd3aec..57a429f9b50 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -200,7 +200,7 @@ public class IngestClientIT extends ESIntegTestCase { assertThat(doc.get("processed"), equalTo(true)); DeletePipelineRequest deletePipelineRequest = new DeletePipelineRequest(); - deletePipelineRequest.id("_id"); + deletePipelineRequest.setId("_id"); WritePipelineResponse response = client().deletePipeline(deletePipelineRequest).get(); assertThat(response.isAcknowledged(), is(true)); diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index cd3e78c822e..798749fe72e 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.bytes.BytesArray; @@ -120,7 +119,7 @@ public class PipelineStoreTests extends ESTestCase { // Delete pipeline: DeletePipelineRequest deleteRequest = new DeletePipelineRequest(); - deleteRequest.id("_id"); + deleteRequest.setId("_id"); clusterState = store.innerDelete(deleteRequest, clusterState); store.innerUpdatePipelines(clusterState); assertThat(store.get("_id"), nullValue()); @@ -181,7 +180,7 @@ public class PipelineStoreTests extends ESTestCase { assertThat(pipeline.getProcessors().get(0).getType(), equalTo("set")); DeletePipelineRequest deleteRequest = new DeletePipelineRequest(); - deleteRequest.id(id); + deleteRequest.setId(id); clusterState = store.innerDelete(deleteRequest, clusterState); store.innerUpdatePipelines(clusterState); pipeline = store.get(id); From faf9ca21223bb3cc0a5d800e19d32a5a3c233188 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 16:02:10 +0100 Subject: [PATCH 253/347] brought back the body of the validate() methods. This is still useful if no setter has been invoked. --- .../elasticsearch/action/ingest/GetPipelineRequest.java | 6 +++++- .../elasticsearch/action/ingest/PutPipelineRequest.java | 9 ++++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java index 0185a0836b8..34b14ff6164 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java @@ -46,7 +46,11 @@ public class GetPipelineRequest extends MasterNodeReadRequest @Override public ActionRequestValidationException validate() { - return null; + ActionRequestValidationException validationException = null; + if (id == null) { + validationException = addValidationError("id is missing", validationException); + } + if (source == null) { + validationException = addValidationError("source is missing", validationException); + } + return validationException; } public String getId() { From e4a142f792ef2a26a1a3bedc90ab5cc8b4656bb9 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 16:30:18 +0100 Subject: [PATCH 254/347] added constructors for the required parameters and removed the body of all validate methods --- .../action/ingest/DeletePipelineRequest.java | 16 +++-- .../ingest/DeletePipelineRequestBuilder.java | 5 +- .../action/ingest/GetPipelineRequest.java | 15 ++-- .../ingest/GetPipelineRequestBuilder.java | 5 +- .../action/ingest/PutPipelineRequest.java | 28 ++++---- .../ingest/PutPipelineRequestBuilder.java | 10 +-- .../ingest/SimulatePipelineRequest.java | 22 +++--- .../SimulatePipelineRequestBuilder.java | 9 ++- .../java/org/elasticsearch/client/Client.java | 7 +- .../client/support/AbstractClient.java | 13 ++-- .../ingest/RestDeletePipelineAction.java | 3 +- .../action/ingest/RestGetPipelineAction.java | 3 +- .../action/ingest/RestPutPipelineAction.java | 7 +- .../ingest/RestSimulatePipelineAction.java | 7 +- .../elasticsearch/ingest/IngestClientIT.java | 69 ++++++++----------- .../ingest/PipelineStoreTests.java | 18 ++--- 16 files changed, 105 insertions(+), 132 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java index 0931e30fdd0..6e5b9d80c67 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java @@ -33,6 +33,16 @@ public class DeletePipelineRequest extends AcknowledgedRequest private String id; private BytesReference source; - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException validationException = null; + public PutPipelineRequest(String id, BytesReference source) { if (id == null) { - validationException = addValidationError("id is missing", validationException); + throw new IllegalArgumentException("id is missing"); } if (source == null) { - validationException = addValidationError("source is missing", validationException); + throw new IllegalArgumentException("source is missing"); } - return validationException; + + this.id = id; + this.source = source; + } + + PutPipelineRequest() { + } + + @Override + public ActionRequestValidationException validate() { + return null; } public String getId() { return id; } - public void setId(String id) { - this.id = Objects.requireNonNull(id); - } - public BytesReference getSource() { return source; } - public void setSource(BytesReference source) { - this.source = Objects.requireNonNull(source); - } - @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java index 377a240caba..bd927115fb5 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java @@ -29,14 +29,8 @@ public class PutPipelineRequestBuilder extends ActionRequestBuilder(channel)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java index e43e8846e14..b483a84c116 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java @@ -40,8 +40,7 @@ public class RestGetPipelineAction extends BaseRestHandler { @Override protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { - GetPipelineRequest request = new GetPipelineRequest(); - request.setIds(Strings.splitStringByCommaToArray(restRequest.param("id"))); + GetPipelineRequest request = new GetPipelineRequest(Strings.splitStringByCommaToArray(restRequest.param("id"))); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); client.getPipeline(request, new RestStatusToXContentListener<>(channel)); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java index c876c45a876..5cdd9a893f2 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; +import org.elasticsearch.rest.action.support.RestActions; public class RestPutPipelineAction extends BaseRestHandler { @@ -39,11 +40,7 @@ public class RestPutPipelineAction extends BaseRestHandler { @Override protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { - PutPipelineRequest request = new PutPipelineRequest(); - request.setId(restRequest.param("id")); - if (restRequest.hasContent()) { - request.setSource(restRequest.content()); - } + PutPipelineRequest request = new PutPipelineRequest(restRequest.param("id"), RestActions.getRestContent(restRequest)); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); request.timeout(restRequest.paramAsTime("timeout", request.timeout())); client.putPipeline(request, new AcknowledgedRestListener<>(channel)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java index da902bdaa42..35cf43740a4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java @@ -43,14 +43,9 @@ public class RestSimulatePipelineAction extends BaseRestHandler { @Override protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception { - SimulatePipelineRequest request = new SimulatePipelineRequest(); + SimulatePipelineRequest request = new SimulatePipelineRequest(RestActions.getRestContent(restRequest)); request.setId(restRequest.param("id")); request.setVerbose(restRequest.paramAsBoolean("verbose", false)); - - if (RestActions.hasBodyContent(restRequest)) { - request.setSource(RestActions.getRestContent(restRequest)); - } - client.simulatePipeline(request, new RestToXContentListener<>(channel)); } } diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 57a429f9b50..ae724a59f78 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -68,20 +68,18 @@ public class IngestClientIT extends ESIntegTestCase { } public void testSimulate() throws Exception { - client().preparePutPipeline() - .setId("_id") - .setSource(jsonBuilder().startObject() - .field("description", "my_pipeline") - .startArray("processors") - .startObject() - .startObject("test") - .endObject() - .endObject() - .endArray() - .endObject().bytes()) + BytesReference pipelineSource = jsonBuilder().startObject() + .field("description", "my_pipeline") + .startArray("processors") + .startObject() + .startObject("test") + .endObject() + .endObject() + .endArray() + .endObject().bytes(); + client().preparePutPipeline("_id", pipelineSource) .get(); - GetPipelineResponse getResponse = client().prepareGetPipeline() - .setIds("_id") + GetPipelineResponse getResponse = client().prepareGetPipeline("_id") .get(); assertThat(getResponse.isFound(), is(true)); assertThat(getResponse.pipelines().size(), equalTo(1)); @@ -102,13 +100,11 @@ public class IngestClientIT extends ESIntegTestCase { .endObject().bytes(); SimulatePipelineResponse response; if (randomBoolean()) { - response = client().prepareSimulatePipeline() - .setId("_id") - .setSource(bytes).get(); + response = client().prepareSimulatePipeline(bytes) + .setId("_id").get(); } else { - SimulatePipelineRequest request = new SimulatePipelineRequest(); + SimulatePipelineRequest request = new SimulatePipelineRequest(bytes); request.setId("_id"); - request.setSource(bytes); response = client().simulatePipeline(request).get(); } assertThat(response.isVerbose(), equalTo(false)); @@ -128,9 +124,7 @@ public class IngestClientIT extends ESIntegTestCase { public void testBulkWithIngestFailures() throws Exception { createIndex("index"); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest(); - putPipelineRequest.setId("_id"); - putPipelineRequest.setSource(jsonBuilder().startObject() + BytesReference source = jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -138,8 +132,8 @@ public class IngestClientIT extends ESIntegTestCase { .endObject() .endObject() .endArray() - .endObject().bytes()); - + .endObject().bytes(); + PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source); client().putPipeline(putPipelineRequest).get(); int numRequests = scaledRandomIntBetween(32, 128); @@ -166,21 +160,19 @@ public class IngestClientIT extends ESIntegTestCase { } public void test() throws Exception { - PutPipelineRequest putPipelineRequest = new PutPipelineRequest(); - putPipelineRequest.setId("_id"); - putPipelineRequest.setSource(jsonBuilder().startObject() - .field("description", "my_pipeline") - .startArray("processors") - .startObject() - .startObject("test") - .endObject() - .endObject() - .endArray() - .endObject().bytes()); + BytesReference source = jsonBuilder().startObject() + .field("description", "my_pipeline") + .startArray("processors") + .startObject() + .startObject("test") + .endObject() + .endObject() + .endArray() + .endObject().bytes(); + PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source); client().putPipeline(putPipelineRequest).get(); - GetPipelineRequest getPipelineRequest = new GetPipelineRequest(); - getPipelineRequest.setIds("_id"); + GetPipelineRequest getPipelineRequest = new GetPipelineRequest("_id"); GetPipelineResponse getResponse = client().getPipeline(getPipelineRequest).get(); assertThat(getResponse.isFound(), is(true)); assertThat(getResponse.pipelines().size(), equalTo(1)); @@ -199,12 +191,11 @@ public class IngestClientIT extends ESIntegTestCase { assertThat(doc.get("field"), equalTo("value2")); assertThat(doc.get("processed"), equalTo(true)); - DeletePipelineRequest deletePipelineRequest = new DeletePipelineRequest(); - deletePipelineRequest.setId("_id"); + DeletePipelineRequest deletePipelineRequest = new DeletePipelineRequest("_id"); WritePipelineResponse response = client().deletePipeline(deletePipelineRequest).get(); assertThat(response.isAcknowledged(), is(true)); - getResponse = client().prepareGetPipeline().setIds("_id").get(); + getResponse = client().prepareGetPipeline("_id").get(); assertThat(getResponse.isFound(), is(false)); assertThat(getResponse.pipelines().size(), equalTo(0)); } diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index 798749fe72e..117b95b2cd7 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -82,9 +82,7 @@ public class PipelineStoreTests extends ESTestCase { ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // add a new pipeline: - PutPipelineRequest putRequest = new PutPipelineRequest(); - putRequest.setId(id); - putRequest.setSource(new BytesArray("{\"processors\": []}")); + PutPipelineRequest putRequest = new PutPipelineRequest(id, new BytesArray("{\"processors\": []}")); clusterState = store.innerPut(putRequest, clusterState); store.innerUpdatePipelines(clusterState); pipeline = store.get(id); @@ -94,9 +92,7 @@ public class PipelineStoreTests extends ESTestCase { assertThat(pipeline.getProcessors().size(), equalTo(0)); // overwrite existing pipeline: - putRequest = new PutPipelineRequest(); - putRequest.setId(id); - putRequest.setSource(new BytesArray("{\"processors\": [], \"description\": \"_description\"}")); + putRequest = new PutPipelineRequest(id, new BytesArray("{\"processors\": [], \"description\": \"_description\"}")); clusterState = store.innerPut(putRequest, clusterState); store.innerUpdatePipelines(clusterState); pipeline = store.get(id); @@ -118,8 +114,7 @@ public class PipelineStoreTests extends ESTestCase { assertThat(store.get("_id"), notNullValue()); // Delete pipeline: - DeletePipelineRequest deleteRequest = new DeletePipelineRequest(); - deleteRequest.setId("_id"); + DeletePipelineRequest deleteRequest = new DeletePipelineRequest("_id"); clusterState = store.innerDelete(deleteRequest, clusterState); store.innerUpdatePipelines(clusterState); assertThat(store.get("_id"), nullValue()); @@ -167,9 +162,7 @@ public class PipelineStoreTests extends ESTestCase { assertThat(pipeline, nullValue()); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty - PutPipelineRequest putRequest = new PutPipelineRequest(); - putRequest.setId(id); - putRequest.setSource(new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}")); + PutPipelineRequest putRequest = new PutPipelineRequest(id, new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}")); clusterState = store.innerPut(putRequest, clusterState); store.innerUpdatePipelines(clusterState); pipeline = store.get(id); @@ -179,8 +172,7 @@ public class PipelineStoreTests extends ESTestCase { assertThat(pipeline.getProcessors().size(), equalTo(1)); assertThat(pipeline.getProcessors().get(0).getType(), equalTo("set")); - DeletePipelineRequest deleteRequest = new DeletePipelineRequest(); - deleteRequest.setId(id); + DeletePipelineRequest deleteRequest = new DeletePipelineRequest(id); clusterState = store.innerDelete(deleteRequest, clusterState); store.innerUpdatePipelines(clusterState); pipeline = store.get(id); From d2c608e1a837253367cd3f367394e19345b5a43b Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jan 2016 16:51:27 +0100 Subject: [PATCH 255/347] fix test bug --- .../action/ingest/IngestProxyActionFilter.java | 4 +++- .../action/ingest/IngestProxyActionFilterTests.java | 7 ++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java index be40395a009..4eec053b8f3 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java @@ -31,6 +31,7 @@ import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; @@ -110,7 +111,8 @@ public final class IngestProxyActionFilter implements ActionFilter { private DiscoveryNode randomIngestNode() { assert NodeModule.isNodeIngestEnabled(clusterService.localNode().attributes()) == false; - DiscoveryNode[] ingestNodes = clusterService.state().getNodes().getIngestNodes().values().toArray(DiscoveryNode.class); + DiscoveryNodes nodes = clusterService.state().getNodes(); + DiscoveryNode[] ingestNodes = nodes.getIngestNodes().values().toArray(DiscoveryNode.class); if (ingestNodes.length == 0) { throw new IllegalStateException("There are no ingest nodes in this cluster, unable to forward request to an ingest node."); } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java index a3b107d4c1e..a48398e9807 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -66,7 +67,7 @@ public class IngestProxyActionFilterTests extends ESTestCase { @SuppressWarnings("unchecked") private IngestProxyActionFilter buildFilter(int ingestNodes, int totalNodes) { - ClusterState clusterState = mock(ClusterState.class); + ClusterState.Builder clusterState = new ClusterState.Builder(new ClusterName("_name")); DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder(); DiscoveryNode localNode = null; for (int i = 0; i < totalNodes; i++) { @@ -83,10 +84,10 @@ public class IngestProxyActionFilterTests extends ESTestCase { localNode = node; } } - when(clusterState.nodes()).thenReturn(builder.build()); + clusterState.nodes(builder); ClusterService clusterService = mock(ClusterService.class); when(clusterService.localNode()).thenReturn(localNode); - when(clusterService.state()).thenReturn(clusterState); + when(clusterService.state()).thenReturn(clusterState.build()); transportService = mock(TransportService.class); return new IngestProxyActionFilter(clusterService, transportService); } From 428043700dd71d1417b05bce131dad09042fa1f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 13 Jan 2016 16:38:38 +0100 Subject: [PATCH 256/347] RescoreBuilder: Add parsing and creating of RescoreSearchContext Adding the ability to parse from xContent to the rescore builder. Also making RescoreBuilder an interface and renaming the current base builder that encapsulates the `window_size` setting the the concrete rescorer implementation to RescoreBaseBuilder. --- .../action/search/SearchRequestBuilder.java | 17 +- .../common/io/stream/StreamInput.java | 9 +- .../common/io/stream/StreamOutput.java | 6 +- .../search/builder/SearchSourceBuilder.java | 4 +- .../search/rescore/QueryRescorer.java | 20 +- .../search/rescore/QueryRescorerBuilder.java | 239 +++++++++++++++++ .../search/rescore/RescoreBaseBuilder.java | 173 ++++++++++++ .../search/rescore/RescoreBuilder.java | 252 +----------------- .../search/rescore/RescoreParseElement.java | 11 +- .../search/rescore/Rescorer.java | 7 +- .../builder/SearchSourceBuilderTests.java | 4 +- .../search/functionscore/QueryRescorerIT.java | 48 ++-- .../rescore/QueryRescoreBuilderTests.java | 224 ++++++++++++++-- 13 files changed, 690 insertions(+), 324 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java create mode 100644 core/src/main/java/org/elasticsearch/search/rescore/RescoreBaseBuilder.java diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 1557c266bd4..3bcc01daccb 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -32,6 +32,7 @@ import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.search.rescore.RescoreBaseBuilder; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortOrder; @@ -391,25 +392,25 @@ public class SearchRequestBuilder extends ActionRequestBuilder(); diff --git a/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java index 7f95ff10824..319055639ac 100644 --- a/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java +++ b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java @@ -27,7 +27,7 @@ import org.apache.lucene.search.TopDocs; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.ParsedQuery; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; @@ -120,17 +120,17 @@ public final class QueryRescorer implements Rescorer { } } - private static final ObjectParser RESCORE_PARSER = new ObjectParser<>("query", null); + private static final ObjectParser RESCORE_PARSER = new ObjectParser<>("query", null); static { - RESCORE_PARSER.declareObject(QueryRescoreContext::setParsedQuery, (p, c) -> c.indexShard().getQueryShardContext().parse(p), new ParseField("rescore_query")); + RESCORE_PARSER.declareObject(QueryRescoreContext::setQuery, (p, c) -> c.parse(p).query(), new ParseField("rescore_query")); RESCORE_PARSER.declareFloat(QueryRescoreContext::setQueryWeight, new ParseField("query_weight")); RESCORE_PARSER.declareFloat(QueryRescoreContext::setRescoreQueryWeight, new ParseField("rescore_query_weight")); RESCORE_PARSER.declareString(QueryRescoreContext::setScoreMode, new ParseField("score_mode")); } @Override - public RescoreSearchContext parse(XContentParser parser, SearchContext context) throws IOException { + public RescoreSearchContext parse(XContentParser parser, QueryShardContext context) throws IOException { return RESCORE_PARSER.parse(parser, new QueryRescoreContext(this), context); } @@ -178,22 +178,24 @@ public final class QueryRescorer implements Rescorer { public static class QueryRescoreContext extends RescoreSearchContext { + static final int DEFAULT_WINDOW_SIZE = 10; + public QueryRescoreContext(QueryRescorer rescorer) { - super(NAME, 10, rescorer); + super(NAME, DEFAULT_WINDOW_SIZE, rescorer); this.scoreMode = QueryRescoreMode.Total; } - private ParsedQuery parsedQuery; + private Query query; private float queryWeight = 1.0f; private float rescoreQueryWeight = 1.0f; private QueryRescoreMode scoreMode; - public void setParsedQuery(ParsedQuery parsedQuery) { - this.parsedQuery = parsedQuery; + public void setQuery(Query query) { + this.query = query; } public Query query() { - return parsedQuery.query(); + return query; } public float queryWeight() { diff --git a/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java new file mode 100644 index 00000000000..936353f7868 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java @@ -0,0 +1,239 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.rescore; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.rescore.QueryRescorer.QueryRescoreContext; + +import java.io.IOException; +import java.util.Locale; +import java.util.Objects; + +public class QueryRescorerBuilder implements RescoreBuilder { + + public static final String NAME = "query"; + + public static final QueryRescorerBuilder PROTOTYPE = new QueryRescorerBuilder(new MatchAllQueryBuilder()); + + public static final float DEFAULT_RESCORE_QUERYWEIGHT = 1.0f; + public static final float DEFAULT_QUERYWEIGHT = 1.0f; + public static final QueryRescoreMode DEFAULT_SCORE_MODE = QueryRescoreMode.Total; + private final QueryBuilder queryBuilder; + private float rescoreQueryWeight = DEFAULT_RESCORE_QUERYWEIGHT; + private float queryWeight = DEFAULT_QUERYWEIGHT; + private QueryRescoreMode scoreMode = DEFAULT_SCORE_MODE; + + private static ParseField RESCORE_QUERY_FIELD = new ParseField("rescore_query"); + private static ParseField QUERY_WEIGHT_FIELD = new ParseField("query_weight"); + private static ParseField RESCORE_QUERY_WEIGHT_FIELD = new ParseField("rescore_query_weight"); + private static ParseField SCORE_MODE_FIELD = new ParseField("score_mode"); + + private static final ObjectParser QUERY_RESCORE_PARSER = new ObjectParser<>(NAME, null); + + static { + QUERY_RESCORE_PARSER.declareObject(InnerBuilder::setQueryBuilder, (p, c) -> { + try { + return c.parseInnerQueryBuilder(); + } catch (IOException e) { + throw new ParsingException(p.getTokenLocation(), "Could not parse inner query", e); + } + } , RESCORE_QUERY_FIELD); + QUERY_RESCORE_PARSER.declareFloat(InnerBuilder::setQueryWeight, QUERY_WEIGHT_FIELD); + QUERY_RESCORE_PARSER.declareFloat(InnerBuilder::setRescoreQueryWeight, RESCORE_QUERY_WEIGHT_FIELD); + QUERY_RESCORE_PARSER.declareString((struct, value) -> struct.setScoreMode(QueryRescoreMode.fromString(value)), SCORE_MODE_FIELD); + } + + /** + * Creates a new {@link QueryRescorerBuilder} instance + * @param builder the query builder to build the rescore query from + */ + public QueryRescorerBuilder(QueryBuilder builder) { + this.queryBuilder = builder; + } + + /** + * @return the query used for this rescore query + */ + public QueryBuilder getRescoreQuery() { + return this.queryBuilder; + } + + /** + * Sets the original query weight for rescoring. The default is 1.0 + */ + public QueryRescorerBuilder setQueryWeight(float queryWeight) { + this.queryWeight = queryWeight; + return this; + } + + + /** + * Gets the original query weight for rescoring. The default is 1.0 + */ + public float getQueryWeight() { + return this.queryWeight; + } + + /** + * Sets the original query weight for rescoring. The default is 1.0 + */ + public QueryRescorerBuilder setRescoreQueryWeight(float rescoreQueryWeight) { + this.rescoreQueryWeight = rescoreQueryWeight; + return this; + } + + /** + * Gets the original query weight for rescoring. The default is 1.0 + */ + public float getRescoreQueryWeight() { + return this.rescoreQueryWeight; + } + + /** + * Sets the original query score mode. The default is {@link QueryRescoreMode#Total}. + */ + public QueryRescorerBuilder setScoreMode(QueryRescoreMode scoreMode) { + this.scoreMode = scoreMode; + return this; + } + + /** + * Gets the original query score mode. The default is total + */ + public QueryRescoreMode getScoreMode() { + return this.scoreMode; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.field(RESCORE_QUERY_FIELD.getPreferredName(), queryBuilder); + builder.field(QUERY_WEIGHT_FIELD.getPreferredName(), queryWeight); + builder.field(RESCORE_QUERY_WEIGHT_FIELD.getPreferredName(), rescoreQueryWeight); + builder.field(SCORE_MODE_FIELD.getPreferredName(), scoreMode.name().toLowerCase(Locale.ROOT)); + builder.endObject(); + return builder; + } + + @Override + public QueryRescorerBuilder fromXContent(QueryParseContext parseContext) throws IOException { + InnerBuilder innerBuilder = QUERY_RESCORE_PARSER.parse(parseContext.parser(), new InnerBuilder(), parseContext); + return innerBuilder.build(); + } + + @Override + public QueryRescoreContext build(QueryShardContext context) throws IOException { + org.elasticsearch.search.rescore.QueryRescorer rescorer = new org.elasticsearch.search.rescore.QueryRescorer(); + QueryRescoreContext queryRescoreContext = new QueryRescoreContext(rescorer); + queryRescoreContext.setQuery(this.queryBuilder.toQuery(context)); + queryRescoreContext.setQueryWeight(this.queryWeight); + queryRescoreContext.setRescoreQueryWeight(this.rescoreQueryWeight); + queryRescoreContext.setScoreMode(this.scoreMode); + return queryRescoreContext; + } + + @Override + public final int hashCode() { + return Objects.hash(getClass(), scoreMode, queryWeight, rescoreQueryWeight, queryBuilder); + } + + @Override + public final boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + QueryRescorerBuilder other = (QueryRescorerBuilder) obj; + return Objects.equals(scoreMode, other.scoreMode) && + Objects.equals(queryWeight, other.queryWeight) && + Objects.equals(rescoreQueryWeight, other.rescoreQueryWeight) && + Objects.equals(queryBuilder, other.queryBuilder); + } + + @Override + public QueryRescorerBuilder readFrom(StreamInput in) throws IOException { + QueryRescorerBuilder rescorer = new QueryRescorerBuilder(in.readQuery()); + rescorer.setScoreMode(QueryRescoreMode.PROTOTYPE.readFrom(in)); + rescorer.setRescoreQueryWeight(in.readFloat()); + rescorer.setQueryWeight(in.readFloat()); + return rescorer; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeQuery(queryBuilder); + scoreMode.writeTo(out); + out.writeFloat(rescoreQueryWeight); + out.writeFloat(queryWeight); + } + + @Override + public String getWriteableName() { + return NAME; + } + + /** + * Helper to be able to use {@link ObjectParser}, since we need the inner query builder + * for the constructor of {@link QueryRescorerBuilder}, but {@link ObjectParser} only + * allows filling properties of an already constructed value. + */ + private class InnerBuilder { + + private QueryBuilder queryBuilder; + private float rescoreQueryWeight = DEFAULT_RESCORE_QUERYWEIGHT; + private float queryWeight = DEFAULT_QUERYWEIGHT; + private QueryRescoreMode scoreMode = DEFAULT_SCORE_MODE; + + void setQueryBuilder(QueryBuilder builder) { + this.queryBuilder = builder; + } + + QueryRescorerBuilder build() { + QueryRescorerBuilder queryRescoreBuilder = new QueryRescorerBuilder(queryBuilder); + queryRescoreBuilder.setQueryWeight(queryWeight); + queryRescoreBuilder.setRescoreQueryWeight(rescoreQueryWeight); + queryRescoreBuilder.setScoreMode(scoreMode); + return queryRescoreBuilder; + } + + void setQueryWeight(float queryWeight) { + this.queryWeight = queryWeight; + } + + void setRescoreQueryWeight(float rescoreQueryWeight) { + this.rescoreQueryWeight = rescoreQueryWeight; + } + + void setScoreMode(QueryRescoreMode scoreMode) { + this.scoreMode = scoreMode; + } + } +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/search/rescore/RescoreBaseBuilder.java b/core/src/main/java/org/elasticsearch/search/rescore/RescoreBaseBuilder.java new file mode 100644 index 00000000000..ddcabf9ada5 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/rescore/RescoreBaseBuilder.java @@ -0,0 +1,173 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.rescore; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; + +import java.io.IOException; +import java.util.Objects; + +/** + * The base builder for rescorers. Wraps a conrete instance of {@link RescoreBuilder} and + * adds the ability to specify the optional `window_size` parameter + */ +public class RescoreBaseBuilder implements ToXContent, Writeable { + + private RescoreBuilder rescorer; + private Integer windowSize; + public static final RescoreBaseBuilder PROTOTYPE = new RescoreBaseBuilder(new QueryRescorerBuilder(new MatchAllQueryBuilder())); + + private static ParseField WINDOW_SIZE_FIELD = new ParseField("window_size"); + + public RescoreBaseBuilder(RescoreBuilder rescorer) { + if (rescorer == null) { + throw new IllegalArgumentException("rescorer cannot be null"); + } + this.rescorer = rescorer; + } + + public RescoreBuilder rescorer() { + return this.rescorer; + } + + public RescoreBaseBuilder windowSize(int windowSize) { + this.windowSize = windowSize; + return this; + } + + public Integer windowSize() { + return windowSize; + } + + public RescoreBaseBuilder fromXContent(QueryParseContext parseContext) throws IOException { + XContentParser parser = parseContext.parser(); + String fieldName = null; + RescoreBuilder rescorer = null; + Integer windowSize = null; + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = parser.currentName(); + } else if (token.isValue()) { + if (parseContext.parseFieldMatcher().match(fieldName, WINDOW_SIZE_FIELD)) { + windowSize = parser.intValue(); + } else { + throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support [" + fieldName + "]"); + } + } else if (token == XContentParser.Token.START_OBJECT) { + // we only have QueryRescorer at this point + if (QueryRescorerBuilder.NAME.equals(fieldName)) { + rescorer = QueryRescorerBuilder.PROTOTYPE.fromXContent(parseContext); + } else { + throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support rescorer with name [" + fieldName + "]"); + } + } else { + throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "] after [" + fieldName + "]"); + } + } + if (rescorer == null) { + throw new ParsingException(parser.getTokenLocation(), "missing rescore type"); + } + RescoreBaseBuilder rescoreBuilder = new RescoreBaseBuilder(rescorer); + if (windowSize != null) { + rescoreBuilder.windowSize(windowSize.intValue()); + } + return rescoreBuilder; + } + + public RescoreSearchContext build(QueryShardContext context) throws IOException { + RescoreSearchContext rescoreContext = this.rescorer.build(context); + if (windowSize != null) { + rescoreContext.setWindowSize(this.windowSize); + } + return rescoreContext; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (windowSize != null) { + builder.field("window_size", windowSize); + } + rescorer.toXContent(builder, params); + return builder; + } + + public static QueryRescorerBuilder queryRescorer(QueryBuilder queryBuilder) { + return new QueryRescorerBuilder(queryBuilder); + } + + @Override + public final int hashCode() { + return Objects.hash(windowSize, rescorer); + } + + @Override + public final boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + RescoreBaseBuilder other = (RescoreBaseBuilder) obj; + return Objects.equals(windowSize, other.windowSize) && + Objects.equals(rescorer, other.rescorer); + } + + @Override + public RescoreBaseBuilder readFrom(StreamInput in) throws IOException { + RescoreBaseBuilder builder = new RescoreBaseBuilder(in.readRescorer()); + builder.windowSize = in.readOptionalVInt(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeRescorer(rescorer); + out.writeOptionalVInt(this.windowSize); + } + + @Override + public final String toString() { + try { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.prettyPrint(); + builder.startObject(); + toXContent(builder, EMPTY_PARAMS); + builder.endObject(); + return builder.string(); + } catch (Exception e) { + return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}"; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/search/rescore/RescoreBuilder.java b/core/src/main/java/org/elasticsearch/search/rescore/RescoreBuilder.java index 7510d24f82d..eeefb1e5f5e 100644 --- a/core/src/main/java/org/elasticsearch/search/rescore/RescoreBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/rescore/RescoreBuilder.java @@ -19,256 +19,16 @@ package org.elasticsearch.search.rescore; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.query.MatchAllQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; -import java.util.Locale; -import java.util.Objects; -public class RescoreBuilder implements ToXContent, Writeable { +public interface RescoreBuilder extends ToXContent, NamedWriteable { - private Rescorer rescorer; - private Integer windowSize; - public static final RescoreBuilder PROTOYPE = new RescoreBuilder(new QueryRescorer(new MatchAllQueryBuilder())); + RescoreSearchContext build(QueryShardContext context) throws IOException; - public RescoreBuilder(Rescorer rescorer) { - if (rescorer == null) { - throw new IllegalArgumentException("rescorer cannot be null"); - } - this.rescorer = rescorer; - } - - public Rescorer rescorer() { - return this.rescorer; - } - - public RescoreBuilder windowSize(int windowSize) { - this.windowSize = windowSize; - return this; - } - - public Integer windowSize() { - return windowSize; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (windowSize != null) { - builder.field("window_size", windowSize); - } - rescorer.toXContent(builder, params); - return builder; - } - - public static QueryRescorer queryRescorer(QueryBuilder queryBuilder) { - return new QueryRescorer(queryBuilder); - } - - @Override - public final int hashCode() { - return Objects.hash(windowSize, rescorer); - } - - @Override - public final boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - RescoreBuilder other = (RescoreBuilder) obj; - return Objects.equals(windowSize, other.windowSize) && - Objects.equals(rescorer, other.rescorer); - } - - @Override - public RescoreBuilder readFrom(StreamInput in) throws IOException { - RescoreBuilder builder = new RescoreBuilder(in.readRescorer()); - Integer windowSize = in.readOptionalVInt(); - if (windowSize != null) { - builder.windowSize(windowSize); - } - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeRescorer(rescorer); - out.writeOptionalVInt(this.windowSize); - } - - @Override - public final String toString() { - try { - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.prettyPrint(); - builder.startObject(); - toXContent(builder, EMPTY_PARAMS); - builder.endObject(); - return builder.string(); - } catch (Exception e) { - return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}"; - } - } - - public static abstract class Rescorer implements ToXContent, NamedWriteable { - - private String name; - - public Rescorer(String name) { - this.name = name; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(name); - builder = innerToXContent(builder, params); - builder.endObject(); - return builder; - } - - protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException; - - @Override - public abstract int hashCode(); - - @Override - public abstract boolean equals(Object obj); - } - - public static class QueryRescorer extends Rescorer { - - private static final String NAME = "query"; - public static final QueryRescorer PROTOTYPE = new QueryRescorer(new MatchAllQueryBuilder()); - public static final float DEFAULT_RESCORE_QUERYWEIGHT = 1.0f; - public static final float DEFAULT_QUERYWEIGHT = 1.0f; - public static final QueryRescoreMode DEFAULT_SCORE_MODE = QueryRescoreMode.Total; - private final QueryBuilder queryBuilder; - private float rescoreQueryWeight = DEFAULT_RESCORE_QUERYWEIGHT; - private float queryWeight = DEFAULT_QUERYWEIGHT; - private QueryRescoreMode scoreMode = DEFAULT_SCORE_MODE; - - /** - * Creates a new {@link QueryRescorer} instance - * @param builder the query builder to build the rescore query from - */ - public QueryRescorer(QueryBuilder builder) { - super(NAME); - this.queryBuilder = builder; - } - - /** - * @return the query used for this rescore query - */ - public QueryBuilder getRescoreQuery() { - return this.queryBuilder; - } - - /** - * Sets the original query weight for rescoring. The default is 1.0 - */ - public QueryRescorer setQueryWeight(float queryWeight) { - this.queryWeight = queryWeight; - return this; - } - - - /** - * Gets the original query weight for rescoring. The default is 1.0 - */ - public float getQueryWeight() { - return this.queryWeight; - } - - /** - * Sets the original query weight for rescoring. The default is 1.0 - */ - public QueryRescorer setRescoreQueryWeight(float rescoreQueryWeight) { - this.rescoreQueryWeight = rescoreQueryWeight; - return this; - } - - /** - * Gets the original query weight for rescoring. The default is 1.0 - */ - public float getRescoreQueryWeight() { - return this.rescoreQueryWeight; - } - - /** - * Sets the original query score mode. The default is {@link QueryRescoreMode#Total}. - */ - public QueryRescorer setScoreMode(QueryRescoreMode scoreMode) { - this.scoreMode = scoreMode; - return this; - } - - /** - * Gets the original query score mode. The default is total - */ - public QueryRescoreMode getScoreMode() { - return this.scoreMode; - } - - @Override - protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("rescore_query", queryBuilder); - builder.field("query_weight", queryWeight); - builder.field("rescore_query_weight", rescoreQueryWeight); - builder.field("score_mode", scoreMode.name().toLowerCase(Locale.ROOT)); - return builder; - } - - @Override - public final int hashCode() { - return Objects.hash(getClass(), scoreMode, queryWeight, rescoreQueryWeight, queryBuilder); - } - - @Override - public final boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - QueryRescorer other = (QueryRescorer) obj; - return Objects.equals(scoreMode, other.scoreMode) && - Objects.equals(queryWeight, other.queryWeight) && - Objects.equals(rescoreQueryWeight, other.rescoreQueryWeight) && - Objects.equals(queryBuilder, other.queryBuilder); - } - - @Override - public QueryRescorer readFrom(StreamInput in) throws IOException { - QueryRescorer rescorer = new QueryRescorer(in.readQuery()); - rescorer.setScoreMode(QueryRescoreMode.PROTOTYPE.readFrom(in)); - rescorer.setRescoreQueryWeight(in.readFloat()); - rescorer.setQueryWeight(in.readFloat()); - return rescorer; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeQuery(queryBuilder); - scoreMode.writeTo(out); - out.writeFloat(rescoreQueryWeight); - out.writeFloat(queryWeight); - } - - @Override - public String getWriteableName() { - return NAME; - } - } -} + RB fromXContent(QueryParseContext parseContext) throws IOException; +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/search/rescore/RescoreParseElement.java b/core/src/main/java/org/elasticsearch/search/rescore/RescoreParseElement.java index 7f9f2725fbc..149db6cec2c 100644 --- a/core/src/main/java/org/elasticsearch/search/rescore/RescoreParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/rescore/RescoreParseElement.java @@ -21,9 +21,12 @@ package org.elasticsearch.search.rescore; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.internal.SearchContext; +import java.io.IOException; + /** * */ @@ -33,14 +36,14 @@ public class RescoreParseElement implements SearchParseElement { public void parse(XContentParser parser, SearchContext context) throws Exception { if (parser.currentToken() == XContentParser.Token.START_ARRAY) { while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - parseSingleRescoreContext(parser, context); + context.addRescore(parseSingleRescoreContext(parser, context.indexShard().getQueryShardContext())); } } else { - parseSingleRescoreContext(parser, context); + context.addRescore(parseSingleRescoreContext(parser, context.indexShard().getQueryShardContext())); } } - public void parseSingleRescoreContext(XContentParser parser, SearchContext context) throws Exception { + public RescoreSearchContext parseSingleRescoreContext(XContentParser parser, QueryShardContext context) throws ElasticsearchParseException, IOException { String fieldName = null; RescoreSearchContext rescoreContext = null; Integer windowSize = null; @@ -71,7 +74,7 @@ public class RescoreParseElement implements SearchParseElement { if (windowSize != null) { rescoreContext.setWindowSize(windowSize.intValue()); } - context.addRescore(rescoreContext); + return rescoreContext; } } diff --git a/core/src/main/java/org/elasticsearch/search/rescore/Rescorer.java b/core/src/main/java/org/elasticsearch/search/rescore/Rescorer.java index 3c90289fde5..e3465a4df8f 100644 --- a/core/src/main/java/org/elasticsearch/search/rescore/Rescorer.java +++ b/core/src/main/java/org/elasticsearch/search/rescore/Rescorer.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.Explanation; import org.apache.lucene.search.TopDocs; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; @@ -68,11 +69,11 @@ public interface Rescorer { * Parses the {@link RescoreSearchContext} for this impelementation * * @param parser the parser to read the context from - * @param context the current search context + * @param context the current shard context * @return the parsed {@link RescoreSearchContext} * @throws IOException if an {@link IOException} occurs while parsing the context */ - public RescoreSearchContext parse(XContentParser parser, SearchContext context) throws IOException; + public RescoreSearchContext parse(XContentParser parser, QueryShardContext context) throws IOException; /** * Extracts all terms needed to exectue this {@link Rescorer}. This method @@ -81,7 +82,7 @@ public interface Rescorer { * {@link SearchType#DFS_QUERY_THEN_FETCH} */ public void extractTerms(SearchContext context, RescoreSearchContext rescoreContext, Set termsSet); - + /* * TODO: At this point we only have one implemenation which modifies the * TopDocs given. Future implemenations might return actual resutls that diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 5a1b99fe05f..e992e33eeb6 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -57,7 +57,7 @@ import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder.InnerHit; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilderTests; -import org.elasticsearch.search.rescore.RescoreBuilder; +import org.elasticsearch.search.rescore.RescoreBaseBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilder; @@ -281,7 +281,7 @@ public class SearchSourceBuilderTests extends ESTestCase { int numRescores = randomIntBetween(1, 5); for (int i = 0; i < numRescores; i++) { // NORELEASE need a random rescore builder method - RescoreBuilder rescoreBuilder = new RescoreBuilder(RescoreBuilder.queryRescorer(QueryBuilders.termQuery(randomAsciiOfLengthBetween(5, 20), + RescoreBaseBuilder rescoreBuilder = new RescoreBaseBuilder(RescoreBaseBuilder.queryRescorer(QueryBuilders.termQuery(randomAsciiOfLengthBetween(5, 20), randomAsciiOfLengthBetween(5, 20)))); builder.addRescorer(rescoreBuilder); } diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 5644f893603..c9b895b1995 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -38,8 +38,8 @@ import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.rescore.QueryRescoreMode; -import org.elasticsearch.search.rescore.RescoreBuilder; -import org.elasticsearch.search.rescore.RescoreBuilder.QueryRescorer; +import org.elasticsearch.search.rescore.QueryRescorerBuilder; +import org.elasticsearch.search.rescore.RescoreBaseBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; @@ -80,7 +80,7 @@ public class QueryRescorerIT extends ESIntegTestCase { for (int j = 0 ; j < iters; j++) { SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchAllQuery()) - .setRescorer(RescoreBuilder.queryRescorer( + .setRescorer(RescoreBaseBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.matchAllQuery(), ScoreFunctionBuilders.weightFactorFunction(100)).boostMode(CombineFunction.REPLACE)) .setQueryWeight(0.0f).setRescoreQueryWeight(1.0f), 1).setSize(randomIntBetween(2, 10)).execute() @@ -116,7 +116,7 @@ public class QueryRescorerIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer( - RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "quick brown").slop(2).boost(4.0f)) + RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "quick brown").slop(2).boost(4.0f)) .setRescoreQueryWeight(2), 5).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(3l)); @@ -126,7 +126,7 @@ public class QueryRescorerIT extends ESIntegTestCase { searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) - .setRescorer(RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(3)), 5) + .setRescorer(RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(3)), 5) .execute().actionGet(); assertHitCount(searchResponse, 3); @@ -136,7 +136,7 @@ public class QueryRescorerIT extends ESIntegTestCase { searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) - .setRescorer(RescoreBuilder.queryRescorer((QueryBuilders.matchPhraseQuery("field1", "the quick brown"))), 5).execute() + .setRescorer(RescoreBaseBuilder.queryRescorer((QueryBuilders.matchPhraseQuery("field1", "the quick brown"))), 5).execute() .actionGet(); assertHitCount(searchResponse, 3); @@ -181,7 +181,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(5) .setRescorer( - RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 20).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(5)); @@ -197,7 +197,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setSize(5) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setRescorer( - RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 20).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(5)); @@ -214,7 +214,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setSize(5) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setRescorer( - RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 20).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(5)); @@ -263,7 +263,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(5) .setRescorer( - RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 2).execute().actionGet(); // Only top 2 hits were re-ordered: assertThat(searchResponse.getHits().hits().length, equalTo(4)); @@ -280,7 +280,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(5) .setRescorer( - RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 3).execute().actionGet(); // Only top 3 hits were re-ordered: @@ -333,7 +333,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(5) .setRescorer( - RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(1.0f).setRescoreQueryWeight(-1f), 3).execute().actionGet(); // 6 and 1 got worse, and then the hit (2) outside the rescore window were sorted ahead: @@ -424,7 +424,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(resultSize) .setRescorer( - RescoreBuilder + RescoreBaseBuilder .queryRescorer( QueryBuilders .constantScoreQuery(QueryBuilders.matchPhraseQuery("field1", intToEnglish).slop(3))) @@ -462,7 +462,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(resultSize) .setRescorer( - RescoreBuilder + RescoreBaseBuilder .queryRescorer( QueryBuilders .constantScoreQuery(QueryBuilders.matchPhraseQuery("field1", "not in the index").slop(3))) @@ -480,7 +480,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(resultSize) .setRescorer( - RescoreBuilder + RescoreBaseBuilder .queryRescorer( QueryBuilders.matchPhraseQuery("field1", intToEnglish).slop(0)) .setQueryWeight(1.0f).setRescoreQueryWeight(1.0f), 2 * rescoreWindow).execute().actionGet(); @@ -512,7 +512,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer( - RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(2).boost(4.0f)) + RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(2).boost(4.0f)) .setQueryWeight(0.5f).setRescoreQueryWeight(0.4f), 5).setExplain(true).execute() .actionGet(); assertHitCount(searchResponse, 3); @@ -538,7 +538,7 @@ public class QueryRescorerIT extends ESIntegTestCase { String[] scoreModes = new String[]{ "max", "min", "avg", "total", "multiply", "" }; String[] descriptionModes = new String[]{ "max of:", "min of:", "avg of:", "sum of:", "product of:", "sum of:" }; for (int innerMode = 0; innerMode < scoreModes.length; innerMode++) { - QueryRescorer innerRescoreQuery = RescoreBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown").boost(4.0f)) + QueryRescorerBuilder innerRescoreQuery = RescoreBaseBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown").boost(4.0f)) .setQueryWeight(0.5f).setRescoreQueryWeight(0.4f); if (!"".equals(scoreModes[innerMode])) { @@ -561,7 +561,7 @@ public class QueryRescorerIT extends ESIntegTestCase { } for (int outerMode = 0; outerMode < scoreModes.length; outerMode++) { - QueryRescorer outerRescoreQuery = RescoreBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown") + QueryRescorerBuilder outerRescoreQuery = RescoreBaseBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown") .boost(4.0f)).setQueryWeight(0.5f).setRescoreQueryWeight(0.4f); if (!"".equals(scoreModes[outerMode])) { @@ -599,7 +599,7 @@ public class QueryRescorerIT extends ESIntegTestCase { for (int i = 0; i < numDocs - 4; i++) { String[] intToEnglish = new String[] { English.intToEnglish(i), English.intToEnglish(i + 1), English.intToEnglish(i + 2), English.intToEnglish(i + 3) }; - QueryRescorer rescoreQuery = RescoreBuilder + QueryRescorerBuilder rescoreQuery = RescoreBaseBuilder .queryRescorer( QueryBuilders.boolQuery() .disableCoord(true) @@ -682,10 +682,10 @@ public class QueryRescorerIT extends ESIntegTestCase { public void testMultipleRescores() throws Exception { int numDocs = indexRandomNumbers("keyword", 1, true); - QueryRescorer eightIsGreat = RescoreBuilder.queryRescorer( + QueryRescorerBuilder eightIsGreat = RescoreBaseBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(8)), ScoreFunctionBuilders.weightFactorFunction(1000.0f)).boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total); - QueryRescorer sevenIsBetter = RescoreBuilder.queryRescorer( + QueryRescorerBuilder sevenIsBetter = RescoreBaseBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(7)), ScoreFunctionBuilders.weightFactorFunction(10000.0f)).boostMode(CombineFunction.REPLACE)) .setScoreMode(QueryRescoreMode.Total); @@ -703,10 +703,10 @@ public class QueryRescorerIT extends ESIntegTestCase { // We have no idea what the second hit will be because we didn't get a chance to look for seven // Now use one rescore to drag the number we're looking for into the window of another - QueryRescorer ninetyIsGood = RescoreBuilder.queryRescorer( + QueryRescorerBuilder ninetyIsGood = RescoreBaseBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.queryStringQuery("*ninety*"), ScoreFunctionBuilders.weightFactorFunction(1000.0f)) .boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total); - QueryRescorer oneToo = RescoreBuilder.queryRescorer( + QueryRescorerBuilder oneToo = RescoreBaseBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.queryStringQuery("*one*"), ScoreFunctionBuilders.weightFactorFunction(1000.0f)) .boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total); request.clearRescorers().addRescorer(ninetyIsGood, numDocs).addRescorer(oneToo, 10); @@ -759,7 +759,7 @@ public class QueryRescorerIT extends ESIntegTestCase { request.setQuery(QueryBuilders.termQuery("text", "hello")); request.setFrom(1); request.setSize(4); - request.addRescorer(RescoreBuilder.queryRescorer(QueryBuilders.matchAllQuery()), 50); + request.addRescorer(RescoreBaseBuilder.queryRescorer(QueryBuilders.matchAllQuery()), 50); assertEquals(4, request.get().getHits().hits().length); } diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index 2aa55f8b626..a305e8ca5e0 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -19,19 +19,45 @@ package org.elasticsearch.search.rescore; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperBuilders; +import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryParser; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.rescore.RescoreBuilder.QueryRescorer; -import org.elasticsearch.search.rescore.RescoreBuilder.Rescorer; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryParser; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.search.rescore.QueryRescorer.QueryRescoreContext; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; import org.junit.AfterClass; import org.junit.BeforeClass; import java.io.IOException; +import java.util.HashSet; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -40,6 +66,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { private static final int NUMBER_OF_TESTBUILDERS = 20; private static NamedWriteableRegistry namedWriteableRegistry; + private static IndicesQueriesRegistry indicesQueriesRegistry; /** * setup for the whole base test class @@ -47,13 +74,17 @@ public class QueryRescoreBuilderTests extends ESTestCase { @BeforeClass public static void init() { namedWriteableRegistry = new NamedWriteableRegistry(); - namedWriteableRegistry.registerPrototype(Rescorer.class, org.elasticsearch.search.rescore.RescoreBuilder.QueryRescorer.PROTOTYPE); - namedWriteableRegistry.registerPrototype(QueryBuilder.class, new MatchAllQueryBuilder()); + namedWriteableRegistry.registerPrototype(RescoreBuilder.class, org.elasticsearch.search.rescore.QueryRescorerBuilder.PROTOTYPE); + @SuppressWarnings("rawtypes") + Set injectedQueryParsers = new HashSet<>(); + injectedQueryParsers.add(new MatchAllQueryParser()); + indicesQueriesRegistry = new IndicesQueriesRegistry(Settings.settingsBuilder().build(), injectedQueryParsers, namedWriteableRegistry); } @AfterClass public static void afterClass() throws Exception { namedWriteableRegistry = null; + indicesQueriesRegistry = null; } /** @@ -61,8 +92,8 @@ public class QueryRescoreBuilderTests extends ESTestCase { */ public void testSerialization() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { - RescoreBuilder original = randomRescoreBuilder(); - RescoreBuilder deserialized = serializedCopy(original); + RescoreBaseBuilder original = randomRescoreBuilder(); + RescoreBaseBuilder deserialized = serializedCopy(original); assertEquals(deserialized, original); assertEquals(deserialized.hashCode(), original.hashCode()); assertNotSame(deserialized, original); @@ -74,7 +105,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { */ public void testEqualsAndHashcode() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { - RescoreBuilder firstBuilder = randomRescoreBuilder(); + RescoreBaseBuilder firstBuilder = randomRescoreBuilder(); assertFalse("rescore builder is equal to null", firstBuilder.equals(null)); assertFalse("rescore builder is equal to incompatible type", firstBuilder.equals("")); assertTrue("rescore builder is not equal to self", firstBuilder.equals(firstBuilder)); @@ -82,13 +113,13 @@ public class QueryRescoreBuilderTests extends ESTestCase { equalTo(firstBuilder.hashCode())); assertThat("different rescore builder should not be equal", mutate(firstBuilder), not(equalTo(firstBuilder))); - RescoreBuilder secondBuilder = serializedCopy(firstBuilder); + RescoreBaseBuilder secondBuilder = serializedCopy(firstBuilder); assertTrue("rescore builder is not equal to self", secondBuilder.equals(secondBuilder)); assertTrue("rescore builder is not equal to its copy", firstBuilder.equals(secondBuilder)); assertTrue("equals is not symmetric", secondBuilder.equals(firstBuilder)); assertThat("rescore builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(firstBuilder.hashCode())); - RescoreBuilder thirdBuilder = serializedCopy(secondBuilder); + RescoreBaseBuilder thirdBuilder = serializedCopy(secondBuilder); assertTrue("rescore builder is not equal to self", thirdBuilder.equals(thirdBuilder)); assertTrue("rescore builder is not equal to its copy", secondBuilder.equals(thirdBuilder)); assertThat("rescore builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(thirdBuilder.hashCode())); @@ -99,8 +130,165 @@ public class QueryRescoreBuilderTests extends ESTestCase { } } - private RescoreBuilder mutate(RescoreBuilder original) throws IOException { - RescoreBuilder mutation = serializedCopy(original); + /** + * creates random rescorer, renders it to xContent and back to new instance that should be equal to original + */ + public void testFromXContent() throws IOException { + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); + context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + RescoreBaseBuilder rescoreBuilder = randomRescoreBuilder(); + + XContentParser parser = createParser(rescoreBuilder); + context.reset(parser); + parser.nextToken(); + RescoreBaseBuilder secondRescoreBuilder = RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + assertNotSame(rescoreBuilder, secondRescoreBuilder); + assertEquals(rescoreBuilder, secondRescoreBuilder); + assertEquals(rescoreBuilder.hashCode(), secondRescoreBuilder.hashCode()); + } + } + + private static XContentParser createParser(RescoreBaseBuilder rescoreBuilder) throws IOException { + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + if (randomBoolean()) { + builder.prettyPrint(); + } + builder.startObject(); + rescoreBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + + return XContentHelper.createParser(builder.bytes()); + } + + /** + * test that build() outputs a {@link RescoreSearchContext} that is similar to the one + * we would get when parsing the xContent the test rescore builder is rendering out + */ + public void testBuildRescoreSearchContext() throws ElasticsearchParseException, IOException { + Settings indexSettings = Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + Index index = new Index(randomAsciiOfLengthBetween(1, 10)); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); + // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer + QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, null, indicesQueriesRegistry) { + @Override + public MappedFieldType fieldMapper(String name) { + StringFieldMapper.Builder builder = MapperBuilders.stringField(name); + return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType(); + } + }; + + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + RescoreBaseBuilder rescoreBuilder = randomRescoreBuilder(); + QueryRescoreContext rescoreContext = (QueryRescoreContext) rescoreBuilder.build(mockShardContext); + XContentParser parser = createParser(rescoreBuilder); + + QueryRescoreContext parsedRescoreContext = (QueryRescoreContext) new RescoreParseElement().parseSingleRescoreContext(parser, mockShardContext); + assertNotSame(rescoreContext, parsedRescoreContext); + assertEquals(rescoreContext.window(), parsedRescoreContext.window()); + assertEquals(rescoreContext.query(), parsedRescoreContext.query()); + assertEquals(rescoreContext.queryWeight(), parsedRescoreContext.queryWeight(), Float.MIN_VALUE); + assertEquals(rescoreContext.rescoreQueryWeight(), parsedRescoreContext.rescoreQueryWeight(), Float.MIN_VALUE); + assertEquals(rescoreContext.scoreMode(), parsedRescoreContext.scoreMode()); + } + } + + /** + * test parsing exceptions for incorrect rescorer syntax + */ + public void testUnknownFieldsExpection() throws IOException { + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); + context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); + + String rescoreElement = "{\n" + + " \"window_size\" : 20,\n" + + " \"bad_rescorer_name\" : { }\n" + + "}\n"; + prepareContext(context, rescoreElement); + try { + RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + fail("expected a parsing exception"); + } catch (ParsingException e) { + assertEquals("rescore doesn't support rescorer with name [bad_rescorer_name]", e.getMessage()); + } + + rescoreElement = "{\n" + + " \"bad_fieldName\" : 20\n" + + "}\n"; + prepareContext(context, rescoreElement); + try { + RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + fail("expected a parsing exception"); + } catch (ParsingException e) { + assertEquals("rescore doesn't support [bad_fieldName]", e.getMessage()); + } + + rescoreElement = "{\n" + + " \"window_size\" : 20,\n" + + " \"query\" : [ ]\n" + + "}\n"; + prepareContext(context, rescoreElement); + try { + RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + fail("expected a parsing exception"); + } catch (ParsingException e) { + assertEquals("unexpected token [START_ARRAY] after [query]", e.getMessage()); + } + + rescoreElement = "{ }"; + prepareContext(context, rescoreElement); + try { + RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + fail("expected a parsing exception"); + } catch (ParsingException e) { + assertEquals("missing rescore type", e.getMessage()); + } + + rescoreElement = "{\n" + + " \"window_size\" : 20,\n" + + " \"query\" : { \"bad_fieldname\" : 1.0 } \n" + + "}\n"; + prepareContext(context, rescoreElement); + try { + RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + fail("expected a parsing exception"); + } catch (IllegalArgumentException e) { + assertEquals("[query] unknown field [bad_fieldname], parser not found", e.getMessage()); + } + + rescoreElement = "{\n" + + " \"window_size\" : 20,\n" + + " \"query\" : { \"rescore_query\" : { \"unknown_queryname\" : { } } } \n" + + "}\n"; + prepareContext(context, rescoreElement); + try { + RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + fail("expected a parsing exception"); + } catch (ParsingException e) { + assertEquals("[query] failed to parse field [rescore_query]", e.getMessage()); + } + + rescoreElement = "{\n" + + " \"window_size\" : 20,\n" + + " \"query\" : { \"rescore_query\" : { \"match_all\" : { } } } \n" + + "}\n"; + prepareContext(context, rescoreElement); + RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + } + + /** + * create a new parser from the rescorer string representation and reset context with it + */ + private static void prepareContext(QueryParseContext context, String rescoreElement) throws IOException { + XContentParser parser = XContentFactory.xContent(rescoreElement).createParser(rescoreElement); + context.reset(parser); + // move to first token, this is where the internal fromXContent + assertTrue(parser.nextToken() == XContentParser.Token.START_OBJECT); + } + + private static RescoreBaseBuilder mutate(RescoreBaseBuilder original) throws IOException { + RescoreBaseBuilder mutation = serializedCopy(original); if (randomBoolean()) { Integer windowSize = original.windowSize(); if (windowSize != null) { @@ -109,7 +297,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { mutation.windowSize(randomIntBetween(0, 100)); } } else { - QueryRescorer queryRescorer = (QueryRescorer) mutation.rescorer(); + QueryRescorerBuilder queryRescorer = (QueryRescorerBuilder) mutation.rescorer(); switch (randomIntBetween(0, 3)) { case 0: queryRescorer.setQueryWeight(queryRescorer.getQueryWeight() + 0.1f); @@ -138,10 +326,10 @@ public class QueryRescoreBuilderTests extends ESTestCase { /** * create random shape that is put under test */ - private static RescoreBuilder randomRescoreBuilder() { + private static RescoreBaseBuilder randomRescoreBuilder() { QueryBuilder queryBuilder = new MatchAllQueryBuilder().boost(randomFloat()).queryName(randomAsciiOfLength(20)); - org.elasticsearch.search.rescore.RescoreBuilder.QueryRescorer rescorer = new - org.elasticsearch.search.rescore.RescoreBuilder.QueryRescorer(queryBuilder); + org.elasticsearch.search.rescore.QueryRescorerBuilder rescorer = new + org.elasticsearch.search.rescore.QueryRescorerBuilder(queryBuilder); if (randomBoolean()) { rescorer.setQueryWeight(randomFloat()); } @@ -151,18 +339,18 @@ public class QueryRescoreBuilderTests extends ESTestCase { if (randomBoolean()) { rescorer.setScoreMode(randomFrom(QueryRescoreMode.values())); } - RescoreBuilder builder = new RescoreBuilder(rescorer); + RescoreBaseBuilder builder = new RescoreBaseBuilder(rescorer); if (randomBoolean()) { builder.windowSize(randomIntBetween(0, 100)); } return builder; } - private static RescoreBuilder serializedCopy(RescoreBuilder original) throws IOException { + private static RescoreBaseBuilder serializedCopy(RescoreBaseBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { - return RescoreBuilder.PROTOYPE.readFrom(in); + return RescoreBaseBuilder.PROTOTYPE.readFrom(in); } } } From 1550d0f013f2f3c97bf6d17e5ec32172fedfc17b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 15 Jan 2016 14:54:38 +0100 Subject: [PATCH 257/347] Switch to using RescoreBuilder in SearchSourceBuilder --- .../elasticsearch/search/SearchModule.java | 23 ++++++++----- .../elasticsearch/search/SearchService.java | 31 ++++------------- .../search/builder/SearchSourceBuilder.java | 34 +++++++------------ .../search/rescore/RescoreBaseBuilder.java | 2 ++ .../builder/SearchSourceBuilderTests.java | 17 ++++------ .../highlight/HighlightBuilderTests.java | 14 ++++---- .../rescore/QueryRescoreBuilderTests.java | 17 +++------- 7 files changed, 53 insertions(+), 85 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index c33471f4432..739b97034bf 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -19,14 +19,6 @@ package org.elasticsearch.search; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.Supplier; - import org.apache.lucene.search.BooleanQuery; import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.builders.CircleBuilder; @@ -227,9 +219,19 @@ import org.elasticsearch.search.highlight.HighlightPhase; import org.elasticsearch.search.highlight.Highlighter; import org.elasticsearch.search.highlight.Highlighters; import org.elasticsearch.search.query.QueryPhase; +import org.elasticsearch.search.rescore.QueryRescorerBuilder; +import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.Suggesters; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; + /** * */ @@ -327,6 +329,7 @@ public class SearchModule extends AbstractModule { bind(IndicesQueriesRegistry.class).toInstance(buildQueryParserRegistry()); configureFetchSubPhase(); configureShapes(); + configureRescorers(); } protected void configureFetchSubPhase() { @@ -467,6 +470,10 @@ public class SearchModule extends AbstractModule { } } + private void configureRescorers() { + namedWriteableRegistry.registerPrototype(RescoreBuilder.class, QueryRescorerBuilder.PROTOTYPE); + } + private void registerBuiltinFunctionScoreParsers() { registerFunctionScoreParser(new ScriptScoreFunctionParser()); registerFunctionScoreParser(new GaussDecayFunctionParser()); diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 6a84bb44ae7..a5511277bc5 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -23,6 +23,7 @@ import com.carrotsearch.hppc.ObjectFloatHashMap; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.ObjectSet; import com.carrotsearch.hppc.cursors.ObjectCursor; + import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; @@ -100,6 +101,7 @@ import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.search.query.ScrollQuerySearchResult; +import org.elasticsearch.search.rescore.RescoreBaseBuilder; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -772,33 +774,12 @@ public class SearchService extends AbstractLifecycleComponent imp } } if (source.rescores() != null) { - XContentParser completeRescoreParser = null; try { - XContentBuilder completeRescoreBuilder = XContentFactory.jsonBuilder(); - completeRescoreBuilder.startObject(); - completeRescoreBuilder.startArray("rescore"); - for (BytesReference rescore : source.rescores()) { - XContentParser parser = XContentFactory.xContent(rescore).createParser(rescore); - parser.nextToken(); - completeRescoreBuilder.copyCurrentStructure(parser); + for (RescoreBaseBuilder rescore : source.rescores()) { + context.addRescore(rescore.build(context.indexShard().getQueryShardContext())); } - completeRescoreBuilder.endArray(); - completeRescoreBuilder.endObject(); - BytesReference completeRescoreBytes = completeRescoreBuilder.bytes(); - completeRescoreParser = XContentFactory.xContent(completeRescoreBytes).createParser(completeRescoreBytes); - completeRescoreParser.nextToken(); - completeRescoreParser.nextToken(); - completeRescoreParser.nextToken(); - this.elementParsers.get("rescore").parse(completeRescoreParser, context); - } catch (Exception e) { - String sSource = "_na_"; - try { - sSource = source.toString(); - } catch (Throwable e1) { - // ignore - } - XContentLocation location = completeRescoreParser != null ? completeRescoreParser.getTokenLocation() : null; - throw new SearchParseException(context, "failed to parse rescore source [" + sSource + "]", location, e); + } catch (IOException e) { + throw new SearchContextException(context, "failed to create RescoreSearchContext", e); } } if (source.fields() != null) { diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 777e32ee565..4db3a0583bb 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.builder; import com.carrotsearch.hppc.ObjectFloatHashMap; import com.carrotsearch.hppc.cursors.ObjectCursor; + import org.elasticsearch.Version; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.Nullable; @@ -151,7 +152,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ private BytesReference innerHitsBuilder; - private List rescoreBuilders; + private List rescoreBuilders; private ObjectFloatHashMap indexBoost = null; @@ -459,19 +460,11 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } public SearchSourceBuilder addRescorer(RescoreBaseBuilder rescoreBuilder) { - try { if (rescoreBuilders == null) { rescoreBuilders = new ArrayList<>(); } - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - rescoreBuilder.toXContent(builder, EMPTY_PARAMS); - builder.endObject(); - rescoreBuilders.add(builder.bytes()); + rescoreBuilders.add(rescoreBuilder); return this; - } catch (IOException e) { - throw new RuntimeException(e); - } } public SearchSourceBuilder clearRescorers() { @@ -498,7 +491,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ /** * Gets the bytes representing the rescore builders for this request. */ - public List rescores() { + public List rescores() { return rescoreBuilders; } @@ -878,10 +871,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } builder.sorts = sorts; } else if (context.parseFieldMatcher().match(currentFieldName, RESCORE_FIELD)) { - List rescoreBuilders = new ArrayList<>(); + List rescoreBuilders = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser); - rescoreBuilders.add(xContentBuilder.bytes()); + rescoreBuilders.add(RescoreBaseBuilder.PROTOTYPE.fromXContent(context)); } builder.rescoreBuilders = rescoreBuilders; } else if (context.parseFieldMatcher().match(currentFieldName, STATS_FIELD)) { @@ -1048,10 +1040,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ if (rescoreBuilders != null) { builder.startArray(RESCORE_FIELD.getPreferredName()); - for (BytesReference rescoreBuilder : rescoreBuilders) { - XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(rescoreBuilder); - parser.nextToken(); - builder.copyCurrentStructure(parser); + for (RescoreBaseBuilder rescoreBuilder : rescoreBuilders) { + rescoreBuilder.toXContent(builder, params); } builder.endArray(); } @@ -1197,9 +1187,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } if (in.readBoolean()) { int size = in.readVInt(); - List rescoreBuilders = new ArrayList<>(); + List rescoreBuilders = new ArrayList<>(); for (int i = 0; i < size; i++) { - rescoreBuilders.add(in.readBytesReference()); + rescoreBuilders.add(RescoreBaseBuilder.PROTOTYPE.readFrom(in)); } builder.rescoreBuilders = rescoreBuilders; } @@ -1313,8 +1303,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ out.writeBoolean(hasRescoreBuilders); if (hasRescoreBuilders) { out.writeVInt(rescoreBuilders.size()); - for (BytesReference rescoreBuilder : rescoreBuilders) { - out.writeBytesReference(rescoreBuilder); + for (RescoreBaseBuilder rescoreBuilder : rescoreBuilders) { + rescoreBuilder.writeTo(out); } } boolean hasScriptFields = scriptFields != null; diff --git a/core/src/main/java/org/elasticsearch/search/rescore/RescoreBaseBuilder.java b/core/src/main/java/org/elasticsearch/search/rescore/RescoreBaseBuilder.java index ddcabf9ada5..a0201ea8362 100644 --- a/core/src/main/java/org/elasticsearch/search/rescore/RescoreBaseBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/rescore/RescoreBaseBuilder.java @@ -115,10 +115,12 @@ public class RescoreBaseBuilder implements ToXContent, Writeable injectedQueryParsers = new HashSet<>(); - injectedQueryParsers.add(new MatchAllQueryParser()); - indicesQueriesRegistry = new IndicesQueriesRegistry(Settings.settingsBuilder().build(), injectedQueryParsers, namedWriteableRegistry); + namedWriteableRegistry.registerPrototype(RescoreBuilder.class, QueryRescorerBuilder.PROTOTYPE); + indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry(); } @AfterClass @@ -154,10 +148,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { if (randomBoolean()) { builder.prettyPrint(); } - builder.startObject(); rescoreBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); - builder.endObject(); - return XContentHelper.createParser(builder.bytes()); } @@ -326,7 +317,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { /** * create random shape that is put under test */ - private static RescoreBaseBuilder randomRescoreBuilder() { + public static RescoreBaseBuilder randomRescoreBuilder() { QueryBuilder queryBuilder = new MatchAllQueryBuilder().boost(randomFloat()).queryName(randomAsciiOfLength(20)); org.elasticsearch.search.rescore.QueryRescorerBuilder rescorer = new org.elasticsearch.search.rescore.QueryRescorerBuilder(queryBuilder); From 437fe6c056dd729d05215157b203de9ea3bfdc1e Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Wed, 20 Jan 2016 12:12:40 -0800 Subject: [PATCH 258/347] make DeDotProcessor's constructor package-private --- .../java/org/elasticsearch/ingest/processor/DeDotProcessor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java index 6e0610d90d6..7ee146d70f1 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java @@ -41,7 +41,7 @@ public class DeDotProcessor implements Processor { private final String processorTag; private final String separator; - public DeDotProcessor(String processorTag, String separator) { + DeDotProcessor(String processorTag, String separator) { this.processorTag = processorTag; this.separator = separator; } From f1204cb1ab5ca428e593603bd1eecf6015c9fde5 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Wed, 20 Jan 2016 12:28:22 -0800 Subject: [PATCH 259/347] add an AbstractProcessor to help hold the re-used processorTag variable --- .../ingest/core/AbstractProcessor.java | 38 +++++++++++++++++++ .../processor/AbstractStringProcessor.java | 14 ++----- .../ingest/processor/AppendProcessor.java | 14 ++----- .../ingest/processor/ConvertProcessor.java | 14 ++----- .../ingest/processor/DateProcessor.java | 14 ++----- .../ingest/processor/DeDotProcessor.java | 13 ++----- .../ingest/processor/FailProcessor.java | 13 ++----- .../ingest/processor/GsubProcessor.java | 13 ++----- .../ingest/processor/JoinProcessor.java | 13 ++----- .../ingest/processor/LowercaseProcessor.java | 4 +- .../ingest/processor/RemoveProcessor.java | 12 ++---- .../ingest/processor/RenameProcessor.java | 13 ++----- .../ingest/processor/SetProcessor.java | 14 ++----- .../ingest/processor/SplitProcessor.java | 14 ++----- .../ingest/processor/TrimProcessor.java | 4 +- .../ingest/processor/UppercaseProcessor.java | 4 +- .../ingest/grok/GrokProcessor.java | 14 ++----- .../ingest/geoip/GeoIpProcessor.java | 14 ++----- 18 files changed, 100 insertions(+), 139 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/ingest/core/AbstractProcessor.java diff --git a/core/src/main/java/org/elasticsearch/ingest/core/AbstractProcessor.java b/core/src/main/java/org/elasticsearch/ingest/core/AbstractProcessor.java new file mode 100644 index 00000000000..e709ae395cb --- /dev/null +++ b/core/src/main/java/org/elasticsearch/ingest/core/AbstractProcessor.java @@ -0,0 +1,38 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +package org.elasticsearch.ingest.core; + +/** + * An Abstract Processor that holds a processorTag field to be used + * by other processors. + */ +public abstract class AbstractProcessor implements Processor { + protected final String tag; + + protected AbstractProcessor(String tag) { + this.tag = tag; + } + + @Override + public String getTag() { + return tag; + } +} diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java index c5dbef4b363..32e54765b18 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.IngestDocument; @@ -30,13 +31,11 @@ import java.util.Map; * Base class for processors that manipulate strings and require a single "fields" array config value, which * holds a list of field names in string format. */ -public abstract class AbstractStringProcessor implements Processor { - - private final String processorTag; +public abstract class AbstractStringProcessor extends AbstractProcessor { private final String field; - protected AbstractStringProcessor(String processorTag, String field) { - this.processorTag = processorTag; + protected AbstractStringProcessor(String tag, String field) { + super(tag); this.field = field; } @@ -53,11 +52,6 @@ public abstract class AbstractStringProcessor implements Processor { document.setFieldValue(field, process(val)); } - @Override - public String getTag() { - return processorTag; - } - protected abstract String process(String value); public static abstract class Factory extends AbstractProcessorFactory { diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java index 5917d5c5da2..deff384cf92 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java @@ -19,12 +19,12 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.ingest.core.ValueSource; import org.elasticsearch.ingest.core.ConfigurationUtils; -import org.elasticsearch.ingest.core.Processor; import java.util.Map; @@ -33,16 +33,15 @@ import java.util.Map; * provided values will be added. If the field is a scalar it will be converted to a single item list and the provided * values will be added to the newly created list. */ -public class AppendProcessor implements Processor { +public class AppendProcessor extends AbstractProcessor { public static final String TYPE = "append"; - private final String processorTag; private final TemplateService.Template field; private final ValueSource value; - AppendProcessor(String processorTag, TemplateService.Template field, ValueSource value) { - this.processorTag = processorTag; + AppendProcessor(String tag, TemplateService.Template field, ValueSource value) { + super(tag); this.field = field; this.value = value; } @@ -65,11 +64,6 @@ public class AppendProcessor implements Processor { return TYPE; } - @Override - public String getTag() { - return processorTag; - } - public static final class Factory extends AbstractProcessorFactory { private final TemplateService templateService; diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java index 58f5bc57d16..5b6bacf2ed1 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java @@ -19,10 +19,10 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; -import org.elasticsearch.ingest.core.Processor; import java.util.ArrayList; import java.util.List; @@ -33,7 +33,7 @@ import java.util.Map; * Processor that converts fields content to a different type. Supported types are: integer, float, boolean and string. * Throws exception if the field is not there or the conversion fails. */ -public class ConvertProcessor implements Processor { +public class ConvertProcessor extends AbstractProcessor { enum Type { INTEGER { @@ -91,12 +91,11 @@ public class ConvertProcessor implements Processor { public static final String TYPE = "convert"; - private final String processorTag; private final String field; private final Type convertType; - ConvertProcessor(String processorTag, String field, Type convertType) { - this.processorTag = processorTag; + ConvertProcessor(String tag, String field, Type convertType) { + super(tag); this.field = field; this.convertType = convertType; } @@ -135,11 +134,6 @@ public class ConvertProcessor implements Processor { return TYPE; } - @Override - public String getTag() { - return processorTag; - } - public static class Factory extends AbstractProcessorFactory { @Override public ConvertProcessor doCreate(String processorTag, Map config) throws Exception { diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java index 61a4d142043..230871d5081 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java @@ -20,10 +20,10 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; -import org.elasticsearch.ingest.core.Processor; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; @@ -36,12 +36,11 @@ import java.util.Map; import java.util.Optional; import java.util.function.Function; -public final class DateProcessor implements Processor { +public final class DateProcessor extends AbstractProcessor { public static final String TYPE = "date"; static final String DEFAULT_TARGET_FIELD = "@timestamp"; - private final String processorTag; private final DateTimeZone timezone; private final Locale locale; private final String matchField; @@ -49,8 +48,8 @@ public final class DateProcessor implements Processor { private final List matchFormats; private final List> dateParsers; - DateProcessor(String processorTag, DateTimeZone timezone, Locale locale, String matchField, List matchFormats, String targetField) { - this.processorTag = processorTag; + DateProcessor(String tag, DateTimeZone timezone, Locale locale, String matchField, List matchFormats, String targetField) { + super(tag); this.timezone = timezone; this.locale = locale; this.matchField = matchField; @@ -97,11 +96,6 @@ public final class DateProcessor implements Processor { return TYPE; } - @Override - public String getTag() { - return processorTag; - } - DateTimeZone getTimezone() { return timezone; } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java index 7ee146d70f1..295a9884997 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.IngestDocument; @@ -33,16 +34,15 @@ import java.util.Map; * Processor that replaces dots in document field names with a * specified separator. */ -public class DeDotProcessor implements Processor { +public class DeDotProcessor extends AbstractProcessor { public static final String TYPE = "dedot"; static final String DEFAULT_SEPARATOR = "_"; - private final String processorTag; private final String separator; - DeDotProcessor(String processorTag, String separator) { - this.processorTag = processorTag; + DeDotProcessor(String tag, String separator) { + super(tag); this.separator = separator; } @@ -60,11 +60,6 @@ public class DeDotProcessor implements Processor { return TYPE; } - @Override - public String getTag() { - return processorTag; - } - /** * Recursively iterates through Maps and Lists in search of map entries with * keys containing dots. The dots in these fields are replaced with {@link #separator}. diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java index 1af1a5f9a62..65b4b602bd0 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.IngestDocument; @@ -31,15 +32,14 @@ import java.util.Map; * Processor that raises a runtime exception with a provided * error message. */ -public class FailProcessor implements Processor { +public class FailProcessor extends AbstractProcessor { public static final String TYPE = "fail"; - private final String processorTag; private final TemplateService.Template message; - FailProcessor(String processorTag, TemplateService.Template message) { - this.processorTag = processorTag; + FailProcessor(String tag, TemplateService.Template message) { + super(tag); this.message = message; } @@ -57,11 +57,6 @@ public class FailProcessor implements Processor { return TYPE; } - @Override - public String getTag() { - return processorTag; - } - public static class Factory extends AbstractProcessorFactory { private final TemplateService templateService; diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java index 763aefead8b..3dc4b3f0cad 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; @@ -32,17 +33,16 @@ import java.util.regex.Pattern; * Processor that allows to search for patterns in field content and replace them with corresponding string replacement. * Support fields of string type only, throws exception if a field is of a different type. */ -public class GsubProcessor implements Processor { +public class GsubProcessor extends AbstractProcessor { public static final String TYPE = "gsub"; - private final String processorTag; private final String field; private final Pattern pattern; private final String replacement; - GsubProcessor(String processorTag, String field, Pattern pattern, String replacement) { - this.processorTag = processorTag; + GsubProcessor(String tag, String field, Pattern pattern, String replacement) { + super(tag); this.field = field; this.pattern = pattern; this.replacement = replacement; @@ -77,11 +77,6 @@ public class GsubProcessor implements Processor { return TYPE; } - @Override - public String getTag() { - return processorTag; - } - public static class Factory extends AbstractProcessorFactory { @Override public GsubProcessor doCreate(String processorTag, Map config) throws Exception { diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java index a2f50867c0d..3516929e26f 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; @@ -32,16 +33,15 @@ import java.util.stream.Collectors; * Processor that joins the different items of an array into a single string value using a separator between each item. * Throws exception is the specified field is not an array. */ -public class JoinProcessor implements Processor { +public class JoinProcessor extends AbstractProcessor { public static final String TYPE = "join"; - private final String processorTag; private final String field; private final String separator; - JoinProcessor(String processorTag, String field, String separator) { - this.processorTag = processorTag; + JoinProcessor(String tag, String field, String separator) { + super(tag); this.field = field; this.separator = separator; } @@ -71,11 +71,6 @@ public class JoinProcessor implements Processor { return TYPE; } - @Override - public String getTag() { - return processorTag; - } - public static class Factory extends AbstractProcessorFactory { @Override public JoinProcessor doCreate(String processorTag, Map config) throws Exception { diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java index 282d1e29716..617efd9b480 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java @@ -46,8 +46,8 @@ public class LowercaseProcessor extends AbstractStringProcessor { public static class Factory extends AbstractStringProcessor.Factory { @Override - protected LowercaseProcessor newProcessor(String processorTag, String field) { - return new LowercaseProcessor(processorTag, field); + protected LowercaseProcessor newProcessor(String tag, String field) { + return new LowercaseProcessor(tag, field); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java index 1a9ad7323bb..e994954a034 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.TemplateService; @@ -30,15 +31,14 @@ import java.util.Map; /** * Processor that removes existing fields. Nothing happens if the field is not present. */ -public class RemoveProcessor implements Processor { +public class RemoveProcessor extends AbstractProcessor { public static final String TYPE = "remove"; - private final String processorTag; private final TemplateService.Template field; - RemoveProcessor(String processorTag, TemplateService.Template field) { - this.processorTag = processorTag; + RemoveProcessor(String tag, TemplateService.Template field) { + super(tag); this.field = field; } @@ -56,10 +56,6 @@ public class RemoveProcessor implements Processor { return TYPE; } - @Override - public String getTag() { - return processorTag; - } public static class Factory extends AbstractProcessorFactory { private final TemplateService templateService; diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java index 5528d2f5210..7726a720b49 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; @@ -29,16 +30,15 @@ import java.util.Map; /** * Processor that allows to rename existing fields. Will throw exception if the field is not present. */ -public class RenameProcessor implements Processor { +public class RenameProcessor extends AbstractProcessor { public static final String TYPE = "rename"; - private final String processorTag; private final String oldFieldName; private final String newFieldName; - RenameProcessor(String processorTag, String oldFieldName, String newFieldName) { - this.processorTag = processorTag; + RenameProcessor(String tag, String oldFieldName, String newFieldName) { + super(tag); this.oldFieldName = oldFieldName; this.newFieldName = newFieldName; } @@ -76,11 +76,6 @@ public class RenameProcessor implements Processor { return TYPE; } - @Override - public String getTag() { - return processorTag; - } - public static class Factory extends AbstractProcessorFactory { @Override public RenameProcessor doCreate(String processorTag, Map config) throws Exception { diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java index 8f706bd8893..e046a5f3bdb 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java @@ -19,12 +19,12 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.ingest.core.ValueSource; import org.elasticsearch.ingest.core.ConfigurationUtils; -import org.elasticsearch.ingest.core.Processor; import java.util.Map; @@ -32,16 +32,15 @@ import java.util.Map; * Processor that adds new fields with their corresponding values. If the field is already present, its value * will be replaced with the provided one. */ -public class SetProcessor implements Processor { +public class SetProcessor extends AbstractProcessor { public static final String TYPE = "set"; - private final String processorTag; private final TemplateService.Template field; private final ValueSource value; - SetProcessor(String processorTag, TemplateService.Template field, ValueSource value) { - this.processorTag = processorTag; + SetProcessor(String tag, TemplateService.Template field, ValueSource value) { + super(tag); this.field = field; this.value = value; } @@ -64,11 +63,6 @@ public class SetProcessor implements Processor { return TYPE; } - @Override - public String getTag() { - return processorTag; - } - public static final class Factory extends AbstractProcessorFactory { private final TemplateService templateService; diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java index a0f62769802..bfcfa47f7e2 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java @@ -19,10 +19,10 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; -import org.elasticsearch.ingest.core.Processor; import java.util.Arrays; import java.util.Map; @@ -32,16 +32,15 @@ import java.util.Map; * New field value will be an array containing all of the different extracted items. * Throws exception if the field is null or a type other than string. */ -public class SplitProcessor implements Processor { +public class SplitProcessor extends AbstractProcessor { public static final String TYPE = "split"; - private final String processorTag; private final String field; private final String separator; - SplitProcessor(String processorTag, String field, String separator) { - this.processorTag = processorTag; + SplitProcessor(String tag, String field, String separator) { + super(tag); this.field = field; this.separator = separator; } @@ -68,11 +67,6 @@ public class SplitProcessor implements Processor { return TYPE; } - @Override - public String getTag() { - return processorTag; - } - public static class Factory extends AbstractProcessorFactory { @Override public SplitProcessor doCreate(String processorTag, Map config) throws Exception { diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java index 8a75ed24b43..c66cc848933 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java @@ -43,8 +43,8 @@ public class TrimProcessor extends AbstractStringProcessor { public static class Factory extends AbstractStringProcessor.Factory { @Override - protected TrimProcessor newProcessor(String processorTag, String field) { - return new TrimProcessor(processorTag, field); + protected TrimProcessor newProcessor(String tag, String field) { + return new TrimProcessor(tag, field); } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java index 600365e5afe..e6a1f77cb86 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java @@ -45,8 +45,8 @@ public class UppercaseProcessor extends AbstractStringProcessor { public static class Factory extends AbstractStringProcessor.Factory { @Override - protected UppercaseProcessor newProcessor(String processorTag, String field) { - return new UppercaseProcessor(processorTag, field); + protected UppercaseProcessor newProcessor(String tag, String field) { + return new UppercaseProcessor(tag, field); } } } diff --git a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java index 3a40f9fa0df..4df8d673072 100644 --- a/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java +++ b/modules/ingest-grok/src/main/java/org/elasticsearch/ingest/grok/GrokProcessor.java @@ -19,24 +19,23 @@ package org.elasticsearch.ingest.grok; +import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.IngestDocument; -import org.elasticsearch.ingest.core.Processor; import java.util.HashMap; import java.util.Map; -public final class GrokProcessor implements Processor { +public final class GrokProcessor extends AbstractProcessor { public static final String TYPE = "grok"; - private final String processorTag; private final String matchField; private final Grok grok; - public GrokProcessor(String processorTag, Grok grok, String matchField) { - this.processorTag = processorTag; + public GrokProcessor(String tag, Grok grok, String matchField) { + super(tag); this.matchField = matchField; this.grok = grok; } @@ -57,11 +56,6 @@ public final class GrokProcessor implements Processor { return TYPE; } - @Override - public String getTag() { - return processorTag; - } - String getMatchField() { return matchField; } diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index c1eb6830d49..b6ec723e1f3 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -32,9 +32,9 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; -import org.elasticsearch.ingest.core.Processor; import java.io.Closeable; import java.io.IOException; @@ -53,18 +53,17 @@ import java.util.Set; import static org.elasticsearch.ingest.core.ConfigurationUtils.readOptionalList; import static org.elasticsearch.ingest.core.ConfigurationUtils.readStringProperty; -public final class GeoIpProcessor implements Processor { +public final class GeoIpProcessor extends AbstractProcessor { public static final String TYPE = "geoip"; - private final String processorTag; private final String sourceField; private final String targetField; private final DatabaseReader dbReader; private final Set fields; - GeoIpProcessor(String processorTag, String sourceField, DatabaseReader dbReader, String targetField, Set fields) throws IOException { - this.processorTag = processorTag; + GeoIpProcessor(String tag, String sourceField, DatabaseReader dbReader, String targetField, Set fields) throws IOException { + super(tag); this.sourceField = sourceField; this.targetField = targetField; this.dbReader = dbReader; @@ -103,11 +102,6 @@ public final class GeoIpProcessor implements Processor { return TYPE; } - @Override - public String getTag() { - return processorTag; - } - String getSourceField() { return sourceField; } From b61dc6d69c7dd519f825e59624b8f6cbdeb8725c Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 21 Jan 2016 12:01:10 +0100 Subject: [PATCH 260/347] cleanup generics for the ingest action filters --- .../org/elasticsearch/action/ingest/IngestActionFilter.java | 4 ++-- .../elasticsearch/action/ingest/IngestProxyActionFilter.java | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java index 2a10570cff7..b35e24c51f0 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java @@ -55,7 +55,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio } @Override - public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { + public , Response extends ActionResponse> void apply(Task task, String action, Request request, ActionListener listener, ActionFilterChain chain) { switch (action) { case IndexAction.NAME: IndexRequest indexRequest = (IndexRequest) request; @@ -82,7 +82,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio } @Override - public void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { + public void apply(String action, Response response, ActionListener listener, ActionFilterChain chain) { chain.proceed(action, response, listener); } diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java index 4eec053b8f3..fef7a37bd69 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java @@ -61,7 +61,7 @@ public final class IngestProxyActionFilter implements ActionFilter { } @Override - public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { + public , Response extends ActionResponse> void apply(Task task, String action, Request request, ActionListener listener, ActionFilterChain chain) { Action ingestAction; switch (action) { case IndexAction.NAME: @@ -100,7 +100,7 @@ public final class IngestProxyActionFilter implements ActionFilter { } @Override - public void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { + public void apply(String action, Response response, ActionListener listener, ActionFilterChain chain) { chain.proceed(action, response, listener); } From ae48422072d29987f32e88b18bc12b561f4feced Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 21 Jan 2016 13:59:00 +0100 Subject: [PATCH 261/347] Fix IngestMetadata parsing and add unittest --- .../common/xcontent/ObjectParser.java | 8 ++- .../elasticsearch/ingest/IngestMetadata.java | 28 ++++---- .../ingest/PipelineConfiguration.java | 36 +++++------ .../ingest/IngestMetadataTests.java | 64 +++++++++++++++++++ .../suggest/CompletionSuggestSearchIT.java | 2 +- 5 files changed, 103 insertions(+), 35 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java index 979a1f2522c..395dcad8221 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java @@ -223,7 +223,7 @@ public final class ObjectParser implements BiFunction implements BiFunction consumer.accept(v, objectParser.apply(p, c)), field, ValueType.OBJECT); } + public void declareObjectArray(BiConsumer> consumer, BiFunction objectParser, ParseField field) { + declareField((p, v, c) -> consumer.accept(v, parseArray(p, () -> objectParser.apply(p, c))), field, ValueType.OBJECT_ARRAY); + } + + public void declareObjectOrDefault(BiConsumer consumer, BiFunction objectParser, Supplier defaultValue, ParseField field) { declareField((p, v, c) -> { if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { @@ -333,6 +338,7 @@ public final class ObjectParser implements BiFunction impl public final static String TYPE = "ingest"; public final static IngestMetadata PROTO = new IngestMetadata(); - private final ParseField PIPELINES_FIELD = new ParseField("pipeline"); + private static final ParseField PIPELINES_FIELD = new ParseField("pipeline"); + private static final ObjectParser, Void> INGEST_METADATA_PARSER = new ObjectParser<>("ingest_metadata", ArrayList::new); + + static { + INGEST_METADATA_PARSER.declareObjectArray(List::addAll , PipelineConfiguration.getParser(), PIPELINES_FIELD); + } + // We can't use Pipeline class directly in cluster state, because we don't have the processor factories around when // IngestMetadata is registered as custom metadata. @@ -86,20 +95,11 @@ public final class IngestMetadata extends AbstractDiffable impl @Override public MetaData.Custom fromXContent(XContentParser parser) throws IOException { - ObjectParser ingestMetaDataParser = new ObjectParser<>("ingest_metadata", null); - Map pipelines = new HashMap<>(); - ingestMetaDataParser.declareField((parser1, aVoid, aVoid2) -> { - XContentParser.Token token; - while ((token = parser1.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.START_OBJECT) { - PipelineConfiguration pipeline = new PipelineConfiguration.Builder(parser1).build(); - pipelines.put(pipeline.getId(), pipeline); - } - } - }, PIPELINES_FIELD, ObjectParser.ValueType.OBJECT); - ingestMetaDataParser.parse(parser); - + List configs = INGEST_METADATA_PARSER.parse(parser); + for (PipelineConfiguration pipeline : configs) { + pipelines.put(pipeline.getId(), pipeline); + } return new IngestMetadata(pipelines); } diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java b/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java index 628cf2446cb..90ab2a76c2e 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Map; import java.util.Objects; +import java.util.function.BiFunction; /** * Encapsulates a pipeline's id and configuration as a blob @@ -46,36 +47,33 @@ public final class PipelineConfiguration implements Writeable PARSER = new ObjectParser<>("pipeline_config", Builder::new); + static { + PARSER.declareString(Builder::setId, new ParseField("id")); + PARSER.declareField((parser, builder, aVoid) -> { + XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent()); + XContentHelper.copyCurrentStructure(contentBuilder.generator(), parser); + builder.setConfig(contentBuilder.bytes()); + }, new ParseField("config"), ObjectParser.ValueType.OBJECT); + } - public static class Builder { - - private final static ObjectParser PARSER = new ObjectParser<>("pipeline_config", null); - - static { - PARSER.declareString(Builder::setId, new ParseField("id")); - PARSER.declareField((parser, builder, aVoid) -> { - XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent()); - XContentHelper.copyCurrentEvent(contentBuilder.generator(), parser); - builder.setConfig(contentBuilder.bytes()); - }, new ParseField("config"), ObjectParser.ValueType.OBJECT); - } + public static BiFunction getParser() { + return (p, c) -> PARSER.apply(p ,c).build(); + } + private static class Builder { private String id; private BytesReference config; - public Builder(XContentParser parser) throws IOException { - PARSER.parse(parser, this); - } - - public void setId(String id) { + void setId(String id) { this.id = id; } - public void setConfig(BytesReference config) { + void setConfig(BytesReference config) { this.config = config; } - public PipelineConfiguration build() { + PipelineConfiguration build() { return new PipelineConfiguration(id, config); } } diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java new file mode 100644 index 00000000000..a6cf12389a0 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class IngestMetadataTests extends ESTestCase { + + public void testFromXContent() throws IOException { + PipelineConfiguration pipeline = new PipelineConfiguration( + "1",new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}") + ); + PipelineConfiguration pipeline2 = new PipelineConfiguration( + "2",new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field1\", \"value\": \"_value1\"}}]}") + ); + Map map = new HashMap<>(); + map.put(pipeline.getId(), pipeline); + map.put(pipeline2.getId(), pipeline2); + IngestMetadata ingestMetadata = new IngestMetadata(map); + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.prettyPrint(); + builder.startObject(); + ingestMetadata.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + String string = builder.string(); + final XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(string); + MetaData.Custom custom = ingestMetadata.fromXContent(parser); + assertTrue(custom instanceof IngestMetadata); + IngestMetadata m = (IngestMetadata) custom; + assertEquals(2, m.getPipelines().size()); + assertEquals("1", m.getPipelines().get("1").getId()); + assertEquals("2", m.getPipelines().get("2").getId()); + assertEquals(pipeline.getConfigAsMap(), m.getPipelines().get("1").getConfigAsMap()); + assertEquals(pipeline2.getConfigAsMap(), m.getPipelines().get("2").getConfigAsMap()); + } +} diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index fac7f71446a..1543433be32 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -230,7 +230,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { SuggestResponse suggestResponse = client().suggest(request).get(); assertThat(suggestResponse.getSuccessfulShards(), equalTo(0)); for (ShardOperationFailedException exception : suggestResponse.getShardFailures()) { - assertThat(exception.reason(), containsString("ParsingException[[completion] failed to parse field [payload]]; nested: IllegalStateException[expected value but got [START_OBJECT]]")); + assertThat(exception.reason(), containsString("ParsingException[[completion] failed to parse field [payload]]; nested: IllegalStateException[Can't get text on a START_OBJECT")); } } From 086633a5ca0633aeaab81c6d1f2ebe04790335d4 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 20 Jan 2016 15:10:22 +0100 Subject: [PATCH 262/347] Make sort order enum writable. Related to #15178 --- .../elasticsearch/search/sort/SortOrder.java | 34 +++++++++++- .../search/sort/SortOrderTests.java | 52 +++++++++++++++++++ 2 files changed, 85 insertions(+), 1 deletion(-) create mode 100644 core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java b/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java index cb2bca28fb3..001924d1bdf 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java @@ -19,12 +19,19 @@ package org.elasticsearch.search.sort; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.Locale; + /** * A sorting order. * * */ -public enum SortOrder { +public enum SortOrder implements Writeable { /** * Ascending order. */ @@ -42,5 +49,30 @@ public enum SortOrder { public String toString() { return "desc"; } + }; + + public static final SortOrder DEFAULT = DESC; + private static final SortOrder PROTOTYPE = DEFAULT; + + @Override + public SortOrder readFrom(StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown SortOrder ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + + public static SortOrder readOrderFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal()); + } + + public static SortOrder fromString(String op) { + return valueOf(op.toUpperCase(Locale.ROOT)); } } diff --git a/core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java b/core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java new file mode 100644 index 00000000000..e505ec68e6a --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class SortOrderTests extends ESTestCase { + + /** Check that ordinals remain stable as we rely on them for serialisation. */ + public void testDistanceUnitNames() { + assertEquals(0, SortOrder.ASC.ordinal()); + assertEquals(1, SortOrder.DESC.ordinal()); + } + + public void testReadWrite() throws Exception { + for (SortOrder unit : SortOrder.values()) { + try (BytesStreamOutput out = new BytesStreamOutput()) { + unit.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat("Roundtrip serialisation failed.", SortOrder.readOrderFrom(in), equalTo(unit)); + } + } + } + } + + public void testFromString() { + for (SortOrder unit : SortOrder.values()) { + assertThat("Roundtrip string parsing failed.", SortOrder.fromString(unit.toString()), equalTo(unit)); + } + } +} From b1afde257bd5d7d737bd47880e03e8d1def290f7 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 21 Jan 2016 15:57:52 +0100 Subject: [PATCH 263/347] Drop multi data path upgrade tool Since we require upgrades to 2.x we don't need this anymore on master. --- .../common/util/MultiDataPathUpgrader.java | 383 ------------------ .../gateway/GatewayMetaState.java | 2 - .../OldIndexBackwardsCompatibilityIT.java | 1 - .../util/MultiDataPathUpgraderTests.java | 297 -------------- 4 files changed, 683 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java delete mode 100644 core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java diff --git a/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java b/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java deleted file mode 100644 index 8d049003824..00000000000 --- a/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java +++ /dev/null @@ -1,383 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common.util; - -import org.apache.lucene.index.CheckIndex; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.Lock; -import org.apache.lucene.store.LockObtainFailedException; -import org.apache.lucene.store.SimpleFSDirectory; -import org.apache.lucene.util.IOUtils; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.env.ShardLock; -import org.elasticsearch.gateway.MetaDataStateFormat; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.ShardPath; -import org.elasticsearch.index.shard.ShardStateMetaData; - -import java.io.IOException; -import java.io.PrintStream; -import java.nio.charset.StandardCharsets; -import java.nio.file.DirectoryStream; -import java.nio.file.FileStore; -import java.nio.file.FileVisitResult; -import java.nio.file.FileVisitor; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardCopyOption; -import java.nio.file.attribute.BasicFileAttributes; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -/** - */ -public class MultiDataPathUpgrader { - - private final NodeEnvironment nodeEnvironment; - private final ESLogger logger = Loggers.getLogger(getClass()); - - - /** - * Creates a new upgrader instance - * @param nodeEnvironment the node env to operate on. - * - */ - public MultiDataPathUpgrader(NodeEnvironment nodeEnvironment) { - this.nodeEnvironment = nodeEnvironment; - } - - - /** - * Upgrades the given shard Id from multiple shard paths into the given target path. - * - * @see #pickShardPath(org.elasticsearch.index.shard.ShardId) - */ - public void upgrade(ShardId shard, ShardPath targetPath) throws IOException { - final Path[] paths = nodeEnvironment.availableShardPaths(shard); // custom data path doesn't need upgrading - if (isTargetPathConfigured(paths, targetPath) == false) { - throw new IllegalArgumentException("shard path must be one of the shards data paths"); - } - assert needsUpgrading(shard) : "Should not upgrade a path that needs no upgrading"; - logger.info("{} upgrading multi data dir to {}", shard, targetPath.getDataPath()); - final ShardStateMetaData loaded = ShardStateMetaData.FORMAT.loadLatestState(logger, paths); - if (loaded == null) { - throw new IllegalStateException(shard + " no shard state found in any of: " + Arrays.toString(paths) + " please check and remove them if possible"); - } - logger.info("{} loaded shard state {}", shard, loaded); - - ShardStateMetaData.FORMAT.write(loaded, loaded.version, targetPath.getShardStatePath()); - Files.createDirectories(targetPath.resolveIndex()); - try (SimpleFSDirectory directory = new SimpleFSDirectory(targetPath.resolveIndex())) { - try (final Lock lock = directory.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { - upgradeFiles(shard, targetPath, targetPath.resolveIndex(), ShardPath.INDEX_FOLDER_NAME, paths); - } catch (LockObtainFailedException ex) { - throw new IllegalStateException("Can't obtain lock on " + targetPath.resolveIndex(), ex); - } - - } - - - upgradeFiles(shard, targetPath, targetPath.resolveTranslog(), ShardPath.TRANSLOG_FOLDER_NAME, paths); - - logger.info("{} wipe upgraded directories", shard); - for (Path path : paths) { - if (path.equals(targetPath.getShardStatePath()) == false) { - logger.info("{} wipe shard directories: [{}]", shard, path); - IOUtils.rm(path); - } - } - - if (FileSystemUtils.files(targetPath.resolveIndex()).length == 0) { - throw new IllegalStateException("index folder [" + targetPath.resolveIndex() + "] is empty"); - } - - if (FileSystemUtils.files(targetPath.resolveTranslog()).length == 0) { - throw new IllegalStateException("translog folder [" + targetPath.resolveTranslog() + "] is empty"); - } - } - - /** - * Runs check-index on the target shard and throws an exception if it failed - */ - public void checkIndex(ShardPath targetPath) throws IOException { - BytesStreamOutput os = new BytesStreamOutput(); - PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name()); - try (Directory directory = new SimpleFSDirectory(targetPath.resolveIndex()); - final CheckIndex checkIndex = new CheckIndex(directory)) { - checkIndex.setInfoStream(out); - CheckIndex.Status status = checkIndex.checkIndex(); - out.flush(); - if (!status.clean) { - logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8)); - throw new IllegalStateException("index check failure"); - } - } - } - - /** - * Returns true iff the given shard needs upgrading. - */ - public boolean needsUpgrading(ShardId shard) { - final Path[] paths = nodeEnvironment.availableShardPaths(shard); - // custom data path doesn't need upgrading neither single path envs - if (paths.length > 1) { - int numPathsExist = 0; - for (Path path : paths) { - if (Files.exists(path.resolve(MetaDataStateFormat.STATE_DIR_NAME))) { - numPathsExist++; - if (numPathsExist > 1) { - return true; - } - } - } - } - return false; - } - - /** - * Picks a target ShardPath to allocate and upgrade the given shard to. It picks the target based on a simple - * heuristic: - *
    - *
  • if the smallest datapath has 2x more space available that the shards total size the datapath with the most bytes for that shard is picked to minimize the amount of bytes to copy
  • - *
  • otherwise the largest available datapath is used as the target no matter how big of a slice of the shard it already holds.
  • - *
- */ - public ShardPath pickShardPath(ShardId shard) throws IOException { - if (needsUpgrading(shard) == false) { - throw new IllegalStateException("Shard doesn't need upgrading"); - } - final NodeEnvironment.NodePath[] paths = nodeEnvironment.nodePaths(); - - // if we need upgrading make sure we have all paths. - for (NodeEnvironment.NodePath path : paths) { - Files.createDirectories(path.resolve(shard)); - } - final ShardFileInfo[] shardFileInfo = getShardFileInfo(shard, paths); - long totalBytesUsedByShard = 0; - long leastUsableSpace = Long.MAX_VALUE; - long mostUsableSpace = Long.MIN_VALUE; - assert shardFileInfo.length == nodeEnvironment.availableShardPaths(shard).length; - for (ShardFileInfo info : shardFileInfo) { - totalBytesUsedByShard += info.spaceUsedByShard; - leastUsableSpace = Math.min(leastUsableSpace, info.usableSpace + info.spaceUsedByShard); - mostUsableSpace = Math.max(mostUsableSpace, info.usableSpace + info.spaceUsedByShard); - } - - if (mostUsableSpace < totalBytesUsedByShard) { - throw new IllegalStateException("Can't upgrade path available space: " + new ByteSizeValue(mostUsableSpace) + " required space: " + new ByteSizeValue(totalBytesUsedByShard)); - } - ShardFileInfo target = shardFileInfo[0]; - if (leastUsableSpace >= (2 * totalBytesUsedByShard)) { - for (ShardFileInfo info : shardFileInfo) { - if (info.spaceUsedByShard > target.spaceUsedByShard) { - target = info; - } - } - } else { - for (ShardFileInfo info : shardFileInfo) { - if (info.usableSpace > target.usableSpace) { - target = info; - } - } - } - return new ShardPath(false, target.path, target.path, IndexMetaData.INDEX_UUID_NA_VALUE /* we don't know */, shard); - } - - private ShardFileInfo[] getShardFileInfo(ShardId shard, NodeEnvironment.NodePath[] paths) throws IOException { - final ShardFileInfo[] info = new ShardFileInfo[paths.length]; - for (int i = 0; i < info.length; i++) { - Path path = paths[i].resolve(shard); - final long usabelSpace = getUsabelSpace(paths[i]); - info[i] = new ShardFileInfo(path, usabelSpace, getSpaceUsedByShard(path)); - } - return info; - } - - protected long getSpaceUsedByShard(Path path) throws IOException { - final long[] spaceUsedByShard = new long[] {0}; - if (Files.exists(path)) { - Files.walkFileTree(path, new FileVisitor() { - @Override - public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { - return FileVisitResult.CONTINUE; - } - - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - if (attrs.isRegularFile()) { - spaceUsedByShard[0] += attrs.size(); - } - return FileVisitResult.CONTINUE; - } - - @Override - public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { - return FileVisitResult.CONTINUE; - } - - @Override - public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { - return FileVisitResult.CONTINUE; - } - }); - } - return spaceUsedByShard[0]; - } - - protected long getUsabelSpace(NodeEnvironment.NodePath path) throws IOException { - FileStore fileStore = path.fileStore; - return fileStore.getUsableSpace(); - } - - static class ShardFileInfo { - final Path path; - final long usableSpace; - final long spaceUsedByShard; - - ShardFileInfo(Path path, long usableSpace, long spaceUsedByShard) { - this.path = path; - this.usableSpace = usableSpace; - this.spaceUsedByShard = spaceUsedByShard; - } - } - - - - private void upgradeFiles(ShardId shard, ShardPath targetPath, final Path targetDir, String folderName, Path[] paths) throws IOException { - List movedFiles = new ArrayList<>(); - for (Path path : paths) { - if (path.equals(targetPath.getDataPath()) == false) { - final Path sourceDir = path.resolve(folderName); - if (Files.exists(sourceDir)) { - logger.info("{} upgrading [{}] from [{}] to [{}]", shard, folderName, sourceDir, targetDir); - try (DirectoryStream stream = Files.newDirectoryStream(sourceDir)) { - Files.createDirectories(targetDir); - for (Path file : stream) { - if (IndexWriter.WRITE_LOCK_NAME.equals(file.getFileName().toString()) || Files.isDirectory(file)) { - continue; // skip write.lock - } - logger.info("{} move file [{}] size: [{}]", shard, file.getFileName(), Files.size(file)); - final Path targetFile = targetDir.resolve(file.getFileName()); - /* We are pessimistic and do a copy first to the other path and then and atomic move to rename it such that - in the worst case the file exists twice but is never lost or half written.*/ - final Path targetTempFile = Files.createTempFile(targetDir, "upgrade_", "_" + file.getFileName().toString()); - Files.copy(file, targetTempFile, StandardCopyOption.COPY_ATTRIBUTES, StandardCopyOption.REPLACE_EXISTING); - Files.move(targetTempFile, targetFile, StandardCopyOption.ATOMIC_MOVE); // we are on the same FS - this must work otherwise all bets are off - Files.delete(file); - movedFiles.add(targetFile); - } - } - } - } - } - if (movedFiles.isEmpty() == false) { - // fsync later it might be on disk already - logger.info("{} fsync files", shard); - for (Path moved : movedFiles) { - logger.info("{} syncing [{}]", shard, moved.getFileName()); - IOUtils.fsync(moved, false); - } - logger.info("{} syncing directory [{}]", shard, targetDir); - IOUtils.fsync(targetDir, true); - } - } - - - /** - * Returns true iff the target path is one of the given paths. - */ - private boolean isTargetPathConfigured(final Path[] paths, ShardPath targetPath) { - for (Path path : paths) { - if (path.equals(targetPath.getDataPath())) { - return true; - } - } - return false; - } - - /** - * Runs an upgrade on all shards located under the given node environment if there is more than 1 data.path configured - * otherwise this method will return immediately. - */ - public static void upgradeMultiDataPath(NodeEnvironment nodeEnv, ESLogger logger) throws IOException { - if (nodeEnv.nodeDataPaths().length > 1) { - final MultiDataPathUpgrader upgrader = new MultiDataPathUpgrader(nodeEnv); - final Set allIndices = nodeEnv.findAllIndices(); - - for (String index : allIndices) { - for (ShardId shardId : findAllShardIds(nodeEnv.indexPaths(new Index(index)))) { - try (ShardLock lock = nodeEnv.shardLock(shardId, 0)) { - if (upgrader.needsUpgrading(shardId)) { - final ShardPath shardPath = upgrader.pickShardPath(shardId); - upgrader.upgrade(shardId, shardPath); - // we have to check if the index path exists since we might - // have only upgraded the shard state that is written under /indexname/shardid/_state - // in the case we upgraded a dedicated index directory index - if (Files.exists(shardPath.resolveIndex())) { - upgrader.checkIndex(shardPath); - } - } else { - logger.debug("{} no upgrade needed - already upgraded"); - } - } - } - } - } - } - - private static Set findAllShardIds(Path... locations) throws IOException { - final Set shardIds = new HashSet<>(); - for (final Path location : locations) { - if (Files.isDirectory(location)) { - shardIds.addAll(findAllShardsForIndex(location)); - } - } - return shardIds; - } - - private static Set findAllShardsForIndex(Path indexPath) throws IOException { - Set shardIds = new HashSet<>(); - if (Files.isDirectory(indexPath)) { - try (DirectoryStream stream = Files.newDirectoryStream(indexPath)) { - String currentIndex = indexPath.getFileName().toString(); - for (Path shardPath : stream) { - String fileName = shardPath.getFileName().toString(); - if (Files.isDirectory(shardPath) && fileName.chars().allMatch(Character::isDigit)) { - int shardId = Integer.parseInt(fileName); - ShardId id = new ShardId(currentIndex, shardId); - shardIds.add(id); - } - } - } - } - return shardIds; - } - -} diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index 117a0c6959b..c6a65ff082c 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -34,7 +34,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.MultiDataPathUpgrader; import org.elasticsearch.env.NodeEnvironment; import java.nio.file.DirectoryStream; @@ -77,7 +76,6 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL if (DiscoveryNode.dataNode(settings)) { ensureNoPre019ShardState(nodeEnv); - MultiDataPathUpgrader.upgradeMultiDataPath(nodeEnv, logger); } if (DiscoveryNode.masterNode(settings) || DiscoveryNode.dataNode(settings)) { diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index 8a18b728200..481276fc292 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.MultiDataPathUpgrader; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; diff --git a/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java b/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java deleted file mode 100644 index 25c765e6480..00000000000 --- a/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java +++ /dev/null @@ -1,297 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common.util; - -import org.apache.lucene.util.CollectionUtil; -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TestUtil; -import org.elasticsearch.bwcompat.OldIndexBackwardsCompatibilityIT; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.routing.AllocationId; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.gateway.MetaDataStateFormat; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.ShardPath; -import org.elasticsearch.index.shard.ShardStateMetaData; -import org.elasticsearch.test.ESTestCase; - -import java.io.BufferedWriter; -import java.io.IOException; -import java.io.InputStream; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.nio.file.DirectoryStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -/** - */ -@LuceneTestCase.SuppressFileSystems("ExtrasFS") -public class MultiDataPathUpgraderTests extends ESTestCase { - - public void testUpgradeRandomPaths() throws IOException { - try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) { - final String uuid = Strings.base64UUID(); - final ShardId shardId = new ShardId("foo", 0); - final Path[] shardDataPaths = nodeEnvironment.availableShardPaths(shardId); - if (nodeEnvironment.nodeDataPaths().length == 1) { - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment); - assertFalse(helper.needsUpgrading(shardId)); - return; - } - int numIdxFiles = 0; - int numTranslogFiles = 0; - int metaStateVersion = 0; - for (Path shardPath : shardDataPaths) { - final Path translog = shardPath.resolve(ShardPath.TRANSLOG_FOLDER_NAME); - final Path idx = shardPath.resolve(ShardPath.INDEX_FOLDER_NAME); - Files.createDirectories(translog); - Files.createDirectories(idx); - int numFiles = randomIntBetween(1, 10); - for (int i = 0; i < numFiles; i++, numIdxFiles++) { - String filename = Integer.toString(numIdxFiles); - try (BufferedWriter w = Files.newBufferedWriter(idx.resolve(filename + ".tst"), StandardCharsets.UTF_8)) { - w.write(filename); - } - } - numFiles = randomIntBetween(1, 10); - for (int i = 0; i < numFiles; i++, numTranslogFiles++) { - String filename = Integer.toString(numTranslogFiles); - try (BufferedWriter w = Files.newBufferedWriter(translog.resolve(filename + ".translog"), StandardCharsets.UTF_8)) { - w.write(filename); - } - } - ++metaStateVersion; - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(metaStateVersion, true, uuid, AllocationId.newInitializing()), metaStateVersion, shardDataPaths); - } - final Path path = randomFrom(shardDataPaths); - ShardPath targetPath = new ShardPath(false, path, path, uuid, new ShardId("foo", 0)); - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment); - helper.upgrade(shardId, targetPath); - assertFalse(helper.needsUpgrading(shardId)); - if (shardDataPaths.length > 1) { - for (Path shardPath : shardDataPaths) { - if (shardPath.equals(targetPath.getDataPath())) { - continue; - } - final Path translog = shardPath.resolve(ShardPath.TRANSLOG_FOLDER_NAME); - final Path idx = shardPath.resolve(ShardPath.INDEX_FOLDER_NAME); - final Path state = shardPath.resolve(MetaDataStateFormat.STATE_DIR_NAME); - assertFalse(Files.exists(translog)); - assertFalse(Files.exists(idx)); - assertFalse(Files.exists(state)); - assertFalse(Files.exists(shardPath)); - } - } - - final ShardStateMetaData stateMetaData = ShardStateMetaData.FORMAT.loadLatestState(logger, targetPath.getShardStatePath()); - assertEquals(metaStateVersion, stateMetaData.version); - assertTrue(stateMetaData.primary); - assertEquals(uuid, stateMetaData.indexUUID); - final Path translog = targetPath.getDataPath().resolve(ShardPath.TRANSLOG_FOLDER_NAME); - final Path idx = targetPath.getDataPath().resolve(ShardPath.INDEX_FOLDER_NAME); - Files.deleteIfExists(idx.resolve("write.lock")); - assertEquals(numTranslogFiles, FileSystemUtils.files(translog).length); - assertEquals(numIdxFiles, FileSystemUtils.files(idx).length); - final HashSet translogFiles = Sets.newHashSet(FileSystemUtils.files(translog)); - for (int i = 0; i < numTranslogFiles; i++) { - final String name = Integer.toString(i); - translogFiles.contains(translog.resolve(name + ".translog")); - byte[] content = Files.readAllBytes(translog.resolve(name + ".translog")); - assertEquals(name , new String(content, StandardCharsets.UTF_8)); - } - final HashSet idxFiles = Sets.newHashSet(FileSystemUtils.files(idx)); - for (int i = 0; i < numIdxFiles; i++) { - final String name = Integer.toString(i); - idxFiles.contains(idx.resolve(name + ".tst")); - byte[] content = Files.readAllBytes(idx.resolve(name + ".tst")); - assertEquals(name , new String(content, StandardCharsets.UTF_8)); - } - } - } - - /** - * Run upgrade on a real bwc index - */ - public void testUpgradeRealIndex() throws IOException, URISyntaxException { - List indexes = new ArrayList<>(); - try (DirectoryStream stream = Files.newDirectoryStream(getBwcIndicesPath(), "index-*.zip")) { - for (Path path : stream) { - indexes.add(path); - } - } - CollectionUtil.introSort(indexes, new Comparator() { - @Override - public int compare(Path o1, Path o2) { - return o1.getFileName().compareTo(o2.getFileName()); - } - }); - final ShardId shardId = new ShardId("test", 0); - final Path path = randomFrom(indexes); - final Path indexFile = path; - final String indexName = indexFile.getFileName().toString().replace(".zip", "").toLowerCase(Locale.ROOT); - try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) { - if (nodeEnvironment.nodeDataPaths().length == 1) { - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment); - assertFalse(helper.needsUpgrading(shardId)); - return; - } - Path unzipDir = createTempDir(); - Path unzipDataDir = unzipDir.resolve("data"); - // decompress the index - try (InputStream stream = Files.newInputStream(indexFile)) { - TestUtil.unzip(stream, unzipDir); - } - // check it is unique - assertTrue(Files.exists(unzipDataDir)); - Path[] list = FileSystemUtils.files(unzipDataDir); - if (list.length != 1) { - throw new IllegalStateException("Backwards index must contain exactly one cluster but was " + list.length); - } - // the bwc scripts packs the indices under this path - Path src = list[0].resolve("nodes/0/indices/" + indexName); - assertTrue("[" + indexFile + "] missing index dir: " + src.toString(), Files.exists(src)); - Path[] multiDataPath = new Path[nodeEnvironment.nodeDataPaths().length]; - int i = 0; - for (NodeEnvironment.NodePath nodePath : nodeEnvironment.nodePaths()) { - multiDataPath[i++] = nodePath.indicesPath; - } - logger.info("--> injecting index [{}] into multiple data paths", indexName); - OldIndexBackwardsCompatibilityIT.copyIndex(logger, src, indexName, multiDataPath); - final ShardPath shardPath = new ShardPath(false, nodeEnvironment.availableShardPaths(new ShardId(indexName, 0))[0], nodeEnvironment.availableShardPaths(new ShardId(indexName, 0))[0], IndexMetaData.INDEX_UUID_NA_VALUE, new ShardId(indexName, 0)); - - logger.info("{}", (Object)FileSystemUtils.files(shardPath.resolveIndex())); - - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment); - helper.upgrade(new ShardId(indexName, 0), shardPath); - helper.checkIndex(shardPath); - assertFalse(helper.needsUpgrading(new ShardId(indexName, 0))); - } - } - - public void testNeedsUpgrade() throws IOException { - try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) { - String uuid = Strings.randomBase64UUID(); - final ShardId shardId = new ShardId("foo", 0); - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(1, true, uuid, AllocationId.newInitializing()), 1, nodeEnvironment.availableShardPaths(shardId)); - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment); - boolean multiDataPaths = nodeEnvironment.nodeDataPaths().length > 1; - boolean needsUpgrading = helper.needsUpgrading(shardId); - if (multiDataPaths) { - assertTrue(needsUpgrading); - } else { - assertFalse(needsUpgrading); - } - } - } - - public void testPickTargetShardPath() throws IOException { - try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) { - final ShardId shard = new ShardId("foo", 0); - final Path[] paths = nodeEnvironment.availableShardPaths(shard); - if (paths.length == 1) { - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment); - try { - helper.pickShardPath(new ShardId("foo", 0)); - fail("one path needs no upgrading"); - } catch (IllegalStateException ex) { - // only one path - } - } else { - final Map> pathToSpace = new HashMap<>(); - final Path expectedPath; - if (randomBoolean()) { // path with most of the file bytes - expectedPath = randomFrom(paths); - long[] used = new long[paths.length]; - long sumSpaceUsed = 0; - for (int i = 0; i < used.length; i++) { - long spaceUsed = paths[i] == expectedPath ? randomIntBetween(101, 200) : randomIntBetween(10, 100); - sumSpaceUsed += spaceUsed; - used[i] = spaceUsed; - } - for (int i = 0; i < used.length; i++) { - long availalbe = randomIntBetween((int)(2*sumSpaceUsed-used[i]), 4 * (int)sumSpaceUsed); - pathToSpace.put(paths[i], new Tuple<>(availalbe, used[i])); - } - } else { // path with largest available space - expectedPath = randomFrom(paths); - long[] used = new long[paths.length]; - long sumSpaceUsed = 0; - for (int i = 0; i < used.length; i++) { - long spaceUsed = randomIntBetween(10, 100); - sumSpaceUsed += spaceUsed; - used[i] = spaceUsed; - } - - for (int i = 0; i < used.length; i++) { - long availalbe = paths[i] == expectedPath ? randomIntBetween((int)(sumSpaceUsed), (int)(2*sumSpaceUsed)) : randomIntBetween(0, (int)(sumSpaceUsed) - 1) ; - pathToSpace.put(paths[i], new Tuple<>(availalbe, used[i])); - } - - } - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment) { - @Override - protected long getUsabelSpace(NodeEnvironment.NodePath path) throws IOException { - return pathToSpace.get(path.resolve(shard)).v1(); - } - - @Override - protected long getSpaceUsedByShard(Path path) throws IOException { - return pathToSpace.get(path).v2(); - } - }; - String uuid = Strings.randomBase64UUID(); - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(1, true, uuid, AllocationId.newInitializing()), 1, paths); - final ShardPath shardPath = helper.pickShardPath(new ShardId("foo", 0)); - assertEquals(expectedPath, shardPath.getDataPath()); - assertEquals(expectedPath, shardPath.getShardStatePath()); - } - - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment) { - @Override - protected long getUsabelSpace(NodeEnvironment.NodePath path) throws IOException { - return randomIntBetween(0, 10); - } - - @Override - protected long getSpaceUsedByShard(Path path) throws IOException { - return randomIntBetween(11, 20); - } - }; - - try { - helper.pickShardPath(new ShardId("foo", 0)); - fail("not enough space"); - } catch (IllegalStateException ex) { - // not enough space - } - } - } -} From 8f063b0a0773d1ad05d86c3361860fc1c785c193 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 21 Jan 2016 17:22:15 +0100 Subject: [PATCH 264/347] Do not apply minimum_should_match on auto generated boolean query if the coordination factor is disabled. Affects match, multi_match, query_string and simple_query_string queries. Direct bool queries are not affected anymore (minimum_should_match is applied even if the coord factor is disabled). --- .../org/elasticsearch/common/lucene/search/Queries.java | 6 ------ .../org/elasticsearch/index/query/MatchQueryBuilder.java | 5 ++++- .../elasticsearch/index/query/QueryStringQueryBuilder.java | 5 ++++- .../elasticsearch/index/query/SimpleQueryStringBuilder.java | 5 ++++- .../org/elasticsearch/index/search/MultiMatchQuery.java | 5 ++++- .../elasticsearch/index/query/BoolQueryBuilderTests.java | 2 +- 6 files changed, 17 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java index 01184d1cffb..73c3fc9400d 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java @@ -117,12 +117,6 @@ public class Queries { if (minimumShouldMatch == null) { return query; } - // Queries with a single word expanded with synonyms - // have their coordination factor disabled (@see org.apache.lucene.util.QueryBuilder#analyzeBoolean()). - // minimumShouldMatch should not be applicable in such case. - if (query.isCoordDisabled()) { - return query; - } int optionalClauses = 0; for (BooleanClause c : query.clauses()) { if (c.getOccur() == BooleanClause.Occur.SHOULD) { diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java index d9a99cc50cb..9184281607d 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java @@ -372,7 +372,10 @@ public class MatchQueryBuilder extends AbstractQueryBuilder { return null; } - if (query instanceof BooleanQuery) { + // If the coordination factor is disabled on a boolean query we don't apply the minimum should match. + // This is done to make sure that the minimum_should_match doesn't get applied when there is only one word + // and multiple variations of the same word in the query (synonyms for instance). + if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) { query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch); } else if (query instanceof ExtendedCommonTermsQuery) { ((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch); diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index 59e04e40951..fcab39b96e0 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -735,7 +735,10 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder Date: Thu, 21 Jan 2016 17:22:33 +0100 Subject: [PATCH 265/347] Also remove upgrade from OldIndexBackwardsCompatibilityIT --- .../bwcompat/OldIndexBackwardsCompatibilityIT.java | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index 481276fc292..22d93f024e1 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -207,10 +207,6 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { } void importIndex(String indexName) throws IOException { - final Iterable instances = internalCluster().getInstances(NodeEnvironment.class); - for (NodeEnvironment nodeEnv : instances) { // upgrade multidata path - MultiDataPathUpgrader.upgradeMultiDataPath(nodeEnv, logger); - } // force reloading dangling indices with a cluster state republish client().admin().cluster().prepareReroute().get(); ensureGreen(indexName); @@ -218,6 +214,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { // randomly distribute the files from src over dests paths public static void copyIndex(final ESLogger logger, final Path src, final String indexName, final Path... dests) throws IOException { + Path destinationDataPath = dests[randomInt(dests.length - 1)]; for (Path dest : dests) { Path indexDir = dest.resolve(indexName); assertFalse(Files.exists(indexDir)); @@ -243,7 +240,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { } Path relativeFile = src.relativize(file); - Path destFile = dests[randomInt(dests.length - 1)].resolve(indexName).resolve(relativeFile); + Path destFile = destinationDataPath.resolve(indexName).resolve(relativeFile); logger.trace("--> Moving " + relativeFile.toString() + " to " + destFile.toString()); Files.move(file, destFile); assertFalse(Files.exists(file)); From 34b5d37c742c52d36f8672230155d0da2ecea20b Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 21 Jan 2016 16:57:41 +0100 Subject: [PATCH 266/347] Check for invalid index settings on metadata upgrade this change allows us to open existing IndexMetaData that contains invalid, removed settings or settings with invalid values and instead of filling up the users disks with exceptions we _archive_ the settings with and `archive.` prefix. This allows us to warn the user via logs (once it's archived) as well as via external tools like the upgrade validation tool since those archived settings will be preserved even over restarts etc. It will prevent indices from failing during the allocaiton phase but instead will print a prominent warning on index metadata recovery from disk. --- .../metadata/MetaDataIndexUpgradeService.java | 50 +++++++++++-- .../common/settings/IndexScopedSettings.java | 13 ++++ .../MetaDataIndexUpgradeServiceTests.java | 70 +++++++++++++++++++ 3 files changed, 129 insertions(+), 4 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index c6ec2a4376d..fb077a532a3 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -20,11 +20,15 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.misc.IndexMergeTool; import org.elasticsearch.Version; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.MapperService; @@ -32,6 +36,7 @@ import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.mapper.MapperRegistry; import java.util.Collections; +import java.util.Map; import java.util.Set; import static java.util.Collections.unmodifiableSet; @@ -48,11 +53,13 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet; public class MetaDataIndexUpgradeService extends AbstractComponent { private final MapperRegistry mapperRegistry; + private final IndexScopedSettings indexScopedSettigns; @Inject - public MetaDataIndexUpgradeService(Settings settings, MapperRegistry mapperRegistry) { + public MetaDataIndexUpgradeService(Settings settings, MapperRegistry mapperRegistry, IndexScopedSettings indexScopedSettings) { super(settings); this.mapperRegistry = mapperRegistry; + this.indexScopedSettigns = indexScopedSettings; } /** @@ -65,13 +72,13 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData) { // Throws an exception if there are too-old segments: if (isUpgraded(indexMetaData)) { - return indexMetaData; + return archiveBrokenIndexSettings(indexMetaData); } checkSupportedVersion(indexMetaData); IndexMetaData newMetaData = indexMetaData; checkMappingsCompatibility(newMetaData); newMetaData = markAsUpgraded(newMetaData); - return newMetaData; + return archiveBrokenIndexSettings(newMetaData); } @@ -79,7 +86,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { * Checks if the index was already opened by this version of Elasticsearch and doesn't require any additional checks. */ private boolean isUpgraded(IndexMetaData indexMetaData) { - return indexMetaData.getUpgradedVersion().onOrAfter(Version.V_3_0_0); + return indexMetaData.getUpgradedVersion().onOrAfter(Version.V_3_0_0); // TODO should this be Version.CURRENT? } /** @@ -171,4 +178,39 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { } } + private static final String ARCHIVED_SETTINGS_PREFIX = "archived."; + + IndexMetaData archiveBrokenIndexSettings(IndexMetaData indexMetaData) { + Settings settings = indexMetaData.getSettings(); + Settings.Builder builder = Settings.builder(); + boolean changed = false; + for (Map.Entry entry : settings.getAsMap().entrySet()) { + try { + Setting setting = indexScopedSettigns.get(entry.getKey()); + if (setting != null) { + setting.get(settings); + builder.put(entry.getKey(), entry.getValue()); + } else { + if (indexScopedSettigns.isPrivateSetting(entry.getKey()) || entry.getKey().startsWith(ARCHIVED_SETTINGS_PREFIX)) { + builder.put(entry.getKey(), entry.getValue()); + } else { + changed = true; + logger.warn("[{}] found unknown index setting: {} value: {} - archiving", indexMetaData.getIndex(), entry.getKey(), entry.getValue()); + // we put them back in here such that tools can check from the outside if there are any indices with broken settings. The setting can remain there + // but we want users to be aware that some of their setting are broken and they can research why and what they need to do to replace them. + builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue()); + } + } + } catch (IllegalArgumentException ex) { + changed = true; + logger.warn("[{}] found invalid index setting: {} value: {} - archiving",ex, indexMetaData.getIndex(), entry.getKey(), entry.getValue()); + // we put them back in here such that tools can check from the outside if there are any indices with broken settings. The setting can remain there + // but we want users to be aware that some of their setting sare broken and they can research why and what they need to do to replace them. + builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue()); + } + } + + return changed ? IndexMetaData.builder(indexMetaData).settings(builder.build()).build() : indexMetaData; + } + } diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 997ea794c99..86ead8c7ff9 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -152,4 +152,17 @@ public final class IndexScopedSettings extends AbstractScopedSettings { public IndexScopedSettings copy(Settings settings, IndexMetaData metaData) { return new IndexScopedSettings(settings, this, metaData); } + + public boolean isPrivateSetting(String key) { + switch (key) { + case IndexMetaData.SETTING_CREATION_DATE: + case IndexMetaData.SETTING_INDEX_UUID: + case IndexMetaData.SETTING_VERSION_CREATED: + case IndexMetaData.SETTING_VERSION_UPGRADED: + case MergePolicyConfig.INDEX_MERGE_ENABLED: + return true; + default: + return false; + } + } } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java new file mode 100644 index 00000000000..6e892339960 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java @@ -0,0 +1,70 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.Version; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; + +public class MetaDataIndexUpgradeServiceTests extends ESTestCase { + + public void testArchiveBrokenIndexSettings() { + MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + IndexMetaData src = newIndexMeta("foo", Settings.EMPTY); + IndexMetaData indexMetaData = service.archiveBrokenIndexSettings(src); + assertSame(indexMetaData, src); + + src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build()); + indexMetaData = service.archiveBrokenIndexSettings(src); + assertNotSame(indexMetaData, src); + assertEquals("-200", indexMetaData.getSettings().get("archived.index.refresh_interval")); + + src = newIndexMeta("foo", Settings.builder().put("index.codec", "best_compression1").build()); + indexMetaData = service.archiveBrokenIndexSettings(src); + assertNotSame(indexMetaData, src); + assertEquals("best_compression1", indexMetaData.getSettings().get("archived.index.codec")); + + src = newIndexMeta("foo", Settings.builder().put("index.refresh.interval", "-1").build()); + indexMetaData = service.archiveBrokenIndexSettings(src); + assertNotSame(indexMetaData, src); + assertEquals("-1", indexMetaData.getSettings().get("archived.index.refresh.interval")); + + src = newIndexMeta("foo", indexMetaData.getSettings()); // double archive? + indexMetaData = service.archiveBrokenIndexSettings(src); + assertSame(indexMetaData, src); + } + + public static IndexMetaData newIndexMeta(String name, Settings indexSettings) { + Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_CREATION_DATE, 1) + .put(IndexMetaData.SETTING_INDEX_UUID, "BOOM") + .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_0_18_1_ID) + .put(indexSettings) + .build(); + IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); + return metaData; + } + +} From 2c4e13bf7cb30df24e882d3368e9b4799df8394b Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Tue, 19 Jan 2016 20:58:40 -0800 Subject: [PATCH 267/347] split string into ArrayList so it can be appended to Fixes #16109. --- .../ingest/processor/SplitProcessor.java | 7 +++++- .../ingest/core/CompoundProcessorTests.java | 6 +++++ .../ingest/processor/SplitProcessorTests.java | 24 +++++++++++++++++++ 3 files changed, 36 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java index bfcfa47f7e2..425c3a58b2a 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java @@ -24,7 +24,10 @@ import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; +import java.util.List; import java.util.Map; /** @@ -59,7 +62,9 @@ public class SplitProcessor extends AbstractProcessor { if (oldVal == null) { throw new IllegalArgumentException("field [" + field + "] is null, cannot split."); } - document.setFieldValue(field, Arrays.asList(oldVal.split(separator))); + List splitList = new ArrayList<>(); + Collections.addAll(splitList, oldVal.split(separator)); + document.setFieldValue(field, splitList); } @Override diff --git a/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java index f19500ab186..f21644e6005 100644 --- a/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java @@ -20,11 +20,17 @@ package org.elasticsearch.ingest.core; import org.elasticsearch.ingest.TestProcessor; +import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.processor.AppendProcessor; +import org.elasticsearch.ingest.processor.SetProcessor; +import org.elasticsearch.ingest.processor.SplitProcessor; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java index dbf164c89aa..08a26c4bcb3 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java @@ -19,15 +19,20 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.core.CompoundProcessor; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.test.ESTestCase; +import org.hamcrest.CoreMatchers; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Map; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -77,4 +82,23 @@ public class SplitProcessorTests extends ESTestCase { assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); } } + + public void testSplitAppendable() throws Exception { + TemplateService templateService = TestTemplateService.instance(); + Map splitConfig = new HashMap<>(); + splitConfig.put("field", "flags"); + splitConfig.put("separator", "\\|"); + Processor splitProcessor = (new SplitProcessor.Factory()).create(splitConfig); + Map appendConfig = new HashMap<>(); + appendConfig.put("field", "flags"); + appendConfig.put("value", Collections.singletonList("additional_flag")); + Processor appendProcessor = (new AppendProcessor.Factory(templateService)).create(appendConfig); + CompoundProcessor compoundProcessor = new CompoundProcessor(splitProcessor, appendProcessor); + Map source = new HashMap<>(); + source.put("flags", "new|hot|super|fun|interesting"); + IngestDocument ingestDocument = new IngestDocument(source, new HashMap<>()); + compoundProcessor.execute(ingestDocument); + List expectedFlags = Arrays.asList("new", "hot", "super", "fun", "interesting", "additional_flag"); + assertThat(ingestDocument.getFieldValue("flags", List.class), CoreMatchers.equalTo(expectedFlags)); + } } From 50d184066f123dfd80eedbec09870f1bf7ba2d44 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 21 Jan 2016 10:00:46 -0700 Subject: [PATCH 268/347] [DOCS] Document the `string_distance` parameter for term suggestor --- .../search/suggesters/term-suggest.asciidoc | 76 +++++++++++-------- 1 file changed, 44 insertions(+), 32 deletions(-) diff --git a/docs/reference/search/suggesters/term-suggest.asciidoc b/docs/reference/search/suggesters/term-suggest.asciidoc index 55fce633481..965a487e293 100644 --- a/docs/reference/search/suggesters/term-suggest.asciidoc +++ b/docs/reference/search/suggesters/term-suggest.asciidoc @@ -9,70 +9,70 @@ suggest text is analyzed before terms are suggested. The suggested terms are provided per analyzed suggest text token. The `term` suggester doesn't take the query into account that is part of request. -==== Common suggest options: +==== Common suggest options: [horizontal] -`text`:: +`text`:: The suggest text. The suggest text is a required option that needs to be set globally or per suggestion. -`field`:: +`field`:: The field to fetch the candidate suggestions from. This is an required option that either needs to be set globally or per - suggestion. + suggestion. -`analyzer`:: +`analyzer`:: The analyzer to analyse the suggest text with. Defaults - to the search analyzer of the suggest field. + to the search analyzer of the suggest field. -`size`:: +`size`:: The maximum corrections to be returned per suggest text - token. + token. -`sort`:: +`sort`:: Defines how suggestions should be sorted per suggest text term. Two possible values: + - ** `score`: Sort by score first, then document frequency and - then the term itself. + ** `score`: Sort by score first, then document frequency and + then the term itself. ** `frequency`: Sort by document frequency first, then similarity - score and then the term itself. + score and then the term itself. + -`suggest_mode`:: +`suggest_mode`:: The suggest mode controls what suggestions are included or controls for what suggest text terms, suggestions should be - suggested. Three possible values can be specified: -+ + suggested. Three possible values can be specified: ++ ** `missing`: Only provide suggestions for suggest text terms that are - not in the index. This is the default. + not in the index. This is the default. ** `popular`: Only suggest suggestions that occur in more docs then - the original suggest text term. + the original suggest text term. ** `always`: Suggest any matching suggestions based on terms in the suggest text. -==== Other term suggest options: +==== Other term suggest options: [horizontal] -`lowercase_terms`:: - Lower cases the suggest text terms after text analysis. +`lowercase_terms`:: + Lower cases the suggest text terms after text analysis. -`max_edits`:: +`max_edits`:: The maximum edit distance candidate suggestions can have in order to be considered as a suggestion. Can only be a value between 1 and 2. Any other value result in an bad request error being - thrown. Defaults to 2. + thrown. Defaults to 2. -`prefix_length`:: +`prefix_length`:: The number of minimal prefix characters that must match in order be a candidate suggestions. Defaults to 1. Increasing this number improves spellcheck performance. Usually misspellings don't - occur in the beginning of terms. (Old name "prefix_len" is deprecated) + occur in the beginning of terms. (Old name "prefix_len" is deprecated) -`min_word_length`:: +`min_word_length`:: The minimum length a suggest text term must have in order to be included. Defaults to 4. (Old name "min_word_len" is deprecated) -`shard_size`:: +`shard_size`:: Sets the maximum number of suggestions to be retrieved from each individual shard. During the reduce phase only the top N suggestions are returned based on the `size` option. Defaults to the @@ -81,24 +81,24 @@ doesn't take the query into account that is part of request. corrections at the cost of performance. Due to the fact that terms are partitioned amongst shards, the shard level document frequencies of spelling corrections may not be precise. Increasing this will make these - document frequencies more precise. + document frequencies more precise. -`max_inspections`:: +`max_inspections`:: A factor that is used to multiply with the `shards_size` in order to inspect more candidate spell corrections on the shard level. Can improve accuracy at the cost of performance. - Defaults to 5. + Defaults to 5. -`min_doc_freq`:: +`min_doc_freq`:: The minimal threshold in number of documents a suggestion should appear in. This can be specified as an absolute number or as a relative percentage of number of documents. This can improve quality by only suggesting high frequency terms. Defaults to 0f and is not enabled. If a value higher than 1 is specified then the number cannot be fractional. The shard level document frequencies are used for - this option. + this option. -`max_term_freq`:: +`max_term_freq`:: The maximum threshold in number of documents a suggest text token can exist in order to be included. Can be a relative percentage number (e.g 0.4) or an absolute number to represent document @@ -108,3 +108,15 @@ doesn't take the query into account that is part of request. usually spelled correctly on top of this also improves the spellcheck performance. The shard level document frequencies are used for this option. + +`string_distance`:: + Which string distance implementation to use for comparing how similar + suggested terms are. Five possible values can be specfied: + `internal` - The default based on damerau_levenshtein but highly optimized + for comparing string distancee for terms inside the index. + `damerau_levenshtein` - String distance algorithm based on + Damerau-Levenshtein algorithm. + `levenstein` - String distance algorithm based on Levenstein edit distance + algorithm. + `jarowinkler` - String distance algorithm based on Jaro-Winkler algorithm. + `ngram` - String distance algorithm based on character n-grams. From 20f634952ccbe86591376cdefa39561587acb43b Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 24 Dec 2015 12:18:07 +0100 Subject: [PATCH 269/347] mustache: Improve the mustache script engine * Added a `content_type` option at compile time to decide how variable values are encoded. Possible values are `application/json` and `plain/text`. Defaults to `application/json`. * Added support for variable placeholders to lookup values from specific slots in arrays/lists. --- .../CustomReflectionObjectHandler.java | 153 ++++++++++++++++++ .../mustache/JsonEscapingMustacheFactory.java | 7 +- .../mustache/MustacheScriptEngineService.java | 24 ++- .../mustache/NoneEscapingMustacheFactory.java | 40 +++++ .../mustache/MustacheScriptEngineTests.java | 5 +- .../script/mustache/MustacheTests.java | 144 ++++++++++++++--- 6 files changed, 345 insertions(+), 28 deletions(-) create mode 100644 modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java create mode 100644 modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/NoneEscapingMustacheFactory.java diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java new file mode 100644 index 00000000000..30983395d93 --- /dev/null +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java @@ -0,0 +1,153 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.mustache; + +import com.github.mustachejava.reflect.ReflectionObjectHandler; +import org.elasticsearch.common.util.iterable.Iterables; + +import java.lang.reflect.Array; +import java.util.AbstractMap; +import java.util.Collection; +import java.util.Set; +import java.util.Iterator; +import java.util.Map; +import java.util.HashMap; + +final class CustomReflectionObjectHandler extends ReflectionObjectHandler { + + @Override + public Object coerce(Object object) { + if (object == null) { + return null; + } + + if (object.getClass().isArray()) { + return new ArrayMap(object); + } else if (object instanceof Collection) { + @SuppressWarnings("unchecked") + Collection collection = (Collection) object; + return new CollectionMap(collection); + } else { + return super.coerce(object); + } + } + + final static class ArrayMap extends AbstractMap implements Iterable { + + private final Object array; + private final int length; + + public ArrayMap(Object array) { + this.array = array; + this.length = Array.getLength(array); + } + + @Override + public Object get(Object key) { + if (key instanceof Number) { + return Array.get(array, ((Number) key).intValue()); + } + try { + int index = Integer.parseInt(key.toString()); + return Array.get(array, index); + } catch (NumberFormatException nfe) { + // if it's not a number it is as if the key doesn't exist + return null; + } + } + + @Override + public boolean containsKey(Object key) { + return get(key) != null; + } + + @Override + public Set> entrySet() { + Map map = new HashMap<>(length); + for (int i = 0; i < length; i++) { + map.put(i, Array.get(array, i)); + } + return map.entrySet(); + } + + @Override + public Iterator iterator() { + return new Iterator() { + + int index = 0; + + @Override + public boolean hasNext() { + return index < length; + } + + @Override + public Object next() { + return Array.get(array, index++); + } + }; + } + + } + + final static class CollectionMap extends AbstractMap implements Iterable { + + private final Collection col; + + public CollectionMap(Collection col) { + this.col = col; + } + + @Override + public Object get(Object key) { + if (key instanceof Number) { + return Iterables.get(col, ((Number) key).intValue()); + } + try { + int index = Integer.parseInt(key.toString()); + return Iterables.get(col, index); + } catch (NumberFormatException nfe) { + // if it's not a number it is as if the key doesn't exist + return null; + } + } + + @Override + public boolean containsKey(Object key) { + return get(key) != null; + } + + @Override + public Set> entrySet() { + Map map = new HashMap<>(col.size()); + int i = 0; + for (Object item : col) { + map.put(i++, item); + } + return map.entrySet(); + } + + @Override + public Iterator iterator() { + return col.iterator(); + } + } + +} diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java index 7734d0334bf..38d48b98f4e 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java @@ -28,13 +28,12 @@ import java.io.Writer; /** * A MustacheFactory that does simple JSON escaping. */ -public final class JsonEscapingMustacheFactory extends DefaultMustacheFactory { - +final class JsonEscapingMustacheFactory extends DefaultMustacheFactory { + @Override public void encode(String value, Writer writer) { try { - JsonStringEncoder utils = new JsonStringEncoder(); - writer.write(utils.quoteAsString(value));; + writer.write(JsonStringEncoder.getInstance().quoteAsString(value)); } catch (IOException e) { throw new MustacheException("Failed to encode value: " + value); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java index 41f8924595b..685ba6a4aae 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java @@ -22,6 +22,7 @@ import java.lang.ref.SoftReference; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Collections; +import java.io.Reader; import java.util.Map; import org.elasticsearch.SpecialPermission; @@ -40,6 +41,7 @@ import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; import com.github.mustachejava.Mustache; +import com.github.mustachejava.DefaultMustacheFactory; /** * Main entry point handling template registration, compilation and @@ -49,9 +51,12 @@ import com.github.mustachejava.Mustache; * process: First compile the string representing the template, the resulting * {@link Mustache} object can then be re-used for subsequent executions. */ -public class MustacheScriptEngineService extends AbstractComponent implements ScriptEngineService { +public final class MustacheScriptEngineService extends AbstractComponent implements ScriptEngineService { public static final String NAME = "mustache"; + static final String CONTENT_TYPE_PARAM = "content_type"; + static final String JSON_CONTENT_TYPE = "application/json"; + static final String PLAIN_TEXT_CONTENT_TYPE = "text/plain"; /** Thread local UTF8StreamWriter to store template execution results in, thread local to save object creation.*/ private static ThreadLocal> utf8StreamWriter = new ThreadLocal<>(); @@ -86,8 +91,21 @@ public class MustacheScriptEngineService extends AbstractComponent implements Sc * */ @Override public Object compile(String template, Map params) { - /** Factory to generate Mustache objects from. */ - return (new JsonEscapingMustacheFactory()).compile(new FastStringReader(template), "query-template"); + String contentType = params.getOrDefault(CONTENT_TYPE_PARAM, JSON_CONTENT_TYPE); + final DefaultMustacheFactory mustacheFactory; + switch (contentType){ + case PLAIN_TEXT_CONTENT_TYPE: + mustacheFactory = new NoneEscapingMustacheFactory(); + break; + case JSON_CONTENT_TYPE: + default: + // assume that the default is json encoding: + mustacheFactory = new JsonEscapingMustacheFactory(); + break; + } + mustacheFactory.setObjectHandler(new CustomReflectionObjectHandler()); + Reader reader = new FastStringReader(template); + return mustacheFactory.compile(reader, "query-template"); } @Override diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/NoneEscapingMustacheFactory.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/NoneEscapingMustacheFactory.java new file mode 100644 index 00000000000..3539402df98 --- /dev/null +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/NoneEscapingMustacheFactory.java @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.script.mustache; + +import com.github.mustachejava.DefaultMustacheFactory; +import com.github.mustachejava.MustacheException; + +import java.io.IOException; +import java.io.Writer; + +/** + * A MustacheFactory that does no string escaping. + */ +final class NoneEscapingMustacheFactory extends DefaultMustacheFactory { + + @Override + public void encode(String value, Writer writer) { + try { + writer.write(value); + } catch (IOException e) { + throw new MustacheException("Failed to encode value: " + value); + } + } +} diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java index 8e8c8981493..b388f8a3ad5 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java @@ -48,12 +48,13 @@ public class MustacheScriptEngineTests extends ESTestCase { } public void testSimpleParameterReplace() { + Map compileParams = Collections.singletonMap("content_type", "application/json"); { String template = "GET _search {\"query\": " + "{\"boosting\": {" + "\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}" + "}}, \"negative_boost\": {{boost_val}} } }}"; Map vars = new HashMap<>(); vars.put("boost_val", "0.3"); - BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template, Collections.emptyMap())), vars).run(); + BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template, compileParams)), vars).run(); assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.3 } }}", new String(o.toBytes(), Charset.forName("UTF-8"))); @@ -64,7 +65,7 @@ public class MustacheScriptEngineTests extends ESTestCase { Map vars = new HashMap<>(); vars.put("boost_val", "0.3"); vars.put("body_val", "\"quick brown\""); - BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template, Collections.emptyMap())), vars).run(); + BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template, compileParams)), vars).run(); assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"\\\"quick brown\\\"\"}}}, \"negative_boost\": 0.3 } }}", new String(o.toBytes(), Charset.forName("UTF-8"))); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java index d8cf7732378..9c560210e2c 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java @@ -18,36 +18,142 @@ */ package org.elasticsearch.script.mustache; -import com.github.mustachejava.DefaultMustacheFactory; import com.github.mustachejava.Mustache; -import com.github.mustachejava.MustacheFactory; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptEngineService; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; -import java.io.StringReader; -import java.io.StringWriter; + +import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.HashSet; + +import static java.util.Collections.singleton; +import static java.util.Collections.singletonMap; +import static org.elasticsearch.script.mustache.MustacheScriptEngineService.CONTENT_TYPE_PARAM; +import static org.elasticsearch.script.mustache.MustacheScriptEngineService.JSON_CONTENT_TYPE; +import static org.elasticsearch.script.mustache.MustacheScriptEngineService.PLAIN_TEXT_CONTENT_TYPE; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.containsString; -/** - * Figure out how Mustache works for the simplest use case. Leaving in here for now for reference. - * */ public class MustacheTests extends ESTestCase { - public void test() { - HashMap scopes = new HashMap<>(); - scopes.put("boost_val", "0.2"); + private ScriptEngineService engine = new MustacheScriptEngineService(Settings.EMPTY); + + public void testBasics() { String template = "GET _search {\"query\": " + "{\"boosting\": {" - + "\"positive\": {\"match\": {\"body\": \"gift\"}}," - + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}" - + "}}, \"negative_boost\": {{boost_val}} } }}"; - MustacheFactory f = new DefaultMustacheFactory(); - Mustache mustache = f.compile(new StringReader(template), "example"); - StringWriter writer = new StringWriter(); - mustache.execute(writer, scopes); - writer.flush(); + + "\"positive\": {\"match\": {\"body\": \"gift\"}}," + + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}" + + "}}, \"negative_boost\": {{boost_val}} } }}"; + Map params = Collections.singletonMap("boost_val", "0.2"); + + Mustache mustache = (Mustache) engine.compile(template, Collections.emptyMap()); + CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "my-name", "mustache", mustache); + ExecutableScript result = engine.executable(compiledScript, params); assertEquals( "Mustache templating broken", "GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.2 } }}", - writer.toString()); + ((BytesReference) result.run()).toUtf8() + ); } + + public void testArrayAccess() throws Exception { + String template = "{{data.0}} {{data.1}}"; + CompiledScript mustache = new CompiledScript(ScriptService.ScriptType.INLINE, "inline", "mustache", engine.compile(template, Collections.emptyMap())); + Map vars = new HashMap<>(); + Object data = randomFrom( + new String[] { "foo", "bar" }, + Arrays.asList("foo", "bar")); + vars.put("data", data); + Object output = engine.executable(mustache, vars).run(); + assertThat(output, notNullValue()); + assertThat(output, instanceOf(BytesReference.class)); + BytesReference bytes = (BytesReference) output; + assertThat(bytes.toUtf8(), equalTo("foo bar")); + + // Sets can come out in any order + Set setData = new HashSet<>(); + setData.add("foo"); + setData.add("bar"); + vars.put("data", setData); + output = engine.executable(mustache, vars).run(); + assertThat(output, notNullValue()); + assertThat(output, instanceOf(BytesReference.class)); + bytes = (BytesReference) output; + assertThat(bytes.toUtf8(), both(containsString("foo")).and(containsString("bar"))); + } + + public void testArrayInArrayAccess() throws Exception { + String template = "{{data.0.0}} {{data.0.1}}"; + CompiledScript mustache = new CompiledScript(ScriptService.ScriptType.INLINE, "inline", "mustache", engine.compile(template, Collections.emptyMap())); + Map vars = new HashMap<>(); + Object data = randomFrom( + new String[][] { new String[] { "foo", "bar" }}, + Collections.singletonList(new String[] { "foo", "bar" }), + singleton(new String[] { "foo", "bar" }) + ); + vars.put("data", data); + Object output = engine.executable(mustache, vars).run(); + assertThat(output, notNullValue()); + assertThat(output, instanceOf(BytesReference.class)); + BytesReference bytes = (BytesReference) output; + assertThat(bytes.toUtf8(), equalTo("foo bar")); + } + + public void testMapInArrayAccess() throws Exception { + String template = "{{data.0.key}} {{data.1.key}}"; + CompiledScript mustache = new CompiledScript(ScriptService.ScriptType.INLINE, "inline", "mustache", engine.compile(template, Collections.emptyMap())); + Map vars = new HashMap<>(); + Object data = randomFrom( + new Object[] { singletonMap("key", "foo"), singletonMap("key", "bar") }, + Arrays.asList(singletonMap("key", "foo"), singletonMap("key", "bar"))); + vars.put("data", data); + Object output = engine.executable(mustache, vars).run(); + assertThat(output, notNullValue()); + assertThat(output, instanceOf(BytesReference.class)); + BytesReference bytes = (BytesReference) output; + assertThat(bytes.toUtf8(), equalTo("foo bar")); + + // HashSet iteration order isn't fixed + Set setData = new HashSet<>(); + setData.add(singletonMap("key", "foo")); + setData.add(singletonMap("key", "bar")); + vars.put("data", setData); + output = engine.executable(mustache, vars).run(); + assertThat(output, notNullValue()); + assertThat(output, instanceOf(BytesReference.class)); + bytes = (BytesReference) output; + assertThat(bytes.toUtf8(), both(containsString("foo")).and(containsString("bar"))); + } + + public void testEscaping() { + // json string escaping enabled: + Map params = randomBoolean() ? Collections.emptyMap() : Collections.singletonMap(CONTENT_TYPE_PARAM, JSON_CONTENT_TYPE); + Mustache mustache = (Mustache) engine.compile("{ \"field1\": \"{{value}}\"}", Collections.emptyMap()); + CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "name", "mustache", mustache); + ExecutableScript executableScript = engine.executable(compiledScript, Collections.singletonMap("value", "a \"value\"")); + BytesReference rawResult = (BytesReference) executableScript.run(); + String result = rawResult.toUtf8(); + assertThat(result, equalTo("{ \"field1\": \"a \\\"value\\\"\"}")); + + // json string escaping disabled: + mustache = (Mustache) engine.compile("{ \"field1\": \"{{value}}\"}", Collections.singletonMap(CONTENT_TYPE_PARAM, PLAIN_TEXT_CONTENT_TYPE)); + compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "name", "mustache", mustache); + executableScript = engine.executable(compiledScript, Collections.singletonMap("value", "a \"value\"")); + rawResult = (BytesReference) executableScript.run(); + result = rawResult.toUtf8(); + assertThat(result, equalTo("{ \"field1\": \"a \"value\"\"}")); + } + } From 3a6c2d008e77142f4545d613ab134cd56366a1b6 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Thu, 21 Jan 2016 08:53:47 -0800 Subject: [PATCH 270/347] rename processor_tag to tag --- .../ingest/SimulateProcessorResult.java | 3 ++- .../ingest/core/AbstractProcessorFactory.java | 4 ++-- .../ingest/core/PipelineFactoryTests.java | 4 +--- .../AppendProcessorFactoryTests.java | 3 ++- .../ConvertProcessorFactoryTests.java | 4 ++-- .../processor/DateProcessorFactoryTests.java | 3 ++- .../processor/DeDotProcessorFactoryTests.java | 3 ++- .../processor/FailProcessorFactoryTests.java | 4 ++-- .../processor/GsubProcessorFactoryTests.java | 4 ++-- .../processor/JoinProcessorFactoryTests.java | 3 ++- .../LowercaseProcessorFactoryTests.java | 3 ++- .../RemoveProcessorFactoryTests.java | 3 ++- .../RenameProcessorFactoryTests.java | 3 ++- .../processor/SetProcessorFactoryTests.java | 3 ++- .../processor/SplitProcessorFactoryTests.java | 3 ++- .../processor/TrimProcessorFactoryTests.java | 3 ++- .../UppercaseProcessorFactoryTests.java | 3 ++- .../grok/GrokProcessorFactoryTests.java | 3 ++- .../geoip/GeoIpProcessorFactoryTests.java | 3 ++- .../test/ingest/40_simulate.yaml | 20 +++++++++---------- 20 files changed, 47 insertions(+), 35 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java index 664925b3640..6a38434d4c0 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import java.io.IOException; @@ -92,7 +93,7 @@ public class SimulateProcessorResult implements Writeable implements Processor.Factory

{ - static final String PROCESSOR_TAG_KEY = "processor_tag"; + public static final String TAG_KEY = "tag"; @Override public P create(Map config) throws Exception { - String tag = ConfigurationUtils.readOptionalStringProperty(config, PROCESSOR_TAG_KEY); + String tag = ConfigurationUtils.readOptionalStringProperty(config, TAG_KEY); return doCreate(tag, config); } diff --git a/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java index f195f8ef045..229290372b6 100644 --- a/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java @@ -20,8 +20,6 @@ package org.elasticsearch.ingest.core; import org.elasticsearch.ingest.TestProcessor; -import org.elasticsearch.ingest.core.Pipeline; -import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; @@ -37,7 +35,7 @@ public class PipelineFactoryTests extends ESTestCase { public void testCreate() throws Exception { Map processorConfig0 = new HashMap<>(); Map processorConfig1 = new HashMap<>(); - processorConfig0.put(AbstractProcessorFactory.PROCESSOR_TAG_KEY, "first-processor"); + processorConfig0.put(AbstractProcessorFactory.TAG_KEY, "first-processor"); Map pipelineConfig = new HashMap<>(); pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); pipelineConfig.put(Pipeline.PROCESSORS_KEY, Arrays.asList(Collections.singletonMap("test", processorConfig0), Collections.singletonMap("test", processorConfig1))); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java index e236abe12f0..b72c144605f 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -50,7 +51,7 @@ public class AppendProcessorFactoryTests extends ESTestCase { } config.put("value", value); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); AppendProcessor appendProcessor = factory.create(config); assertThat(appendProcessor.getTag(), equalTo(processorTag)); assertThat(appendProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java index 280be970044..706433141d4 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.processor.ConvertProcessor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -37,7 +37,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("type", type.toString()); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); ConvertProcessor convertProcessor = factory.create(config); assertThat(convertProcessor.getTag(), equalTo(processorTag)); assertThat(convertProcessor.getField(), equalTo("field1")); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java index 41b9e0f9258..a145a7c5149 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTimeZone; @@ -42,7 +43,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("match_field", sourceField); config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); DateProcessor processor = factory.create(config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getMatchField(), equalTo(sourceField)); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java index bca16060be1..63eee56cc68 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -40,7 +41,7 @@ public class DeDotProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("separator", "_"); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); DeDotProcessor deDotProcessor = factory.create(config); assertThat(deDotProcessor.getSeparator(), equalTo("_")); assertThat(deDotProcessor.getTag(), equalTo(processorTag)); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java index 55ad338efca..993c7ccd904 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.ingest.processor.FailProcessor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -43,7 +43,7 @@ public class FailProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("message", "error"); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); FailProcessor failProcessor = factory.create(config); assertThat(failProcessor.getTag(), equalTo(processorTag)); assertThat(failProcessor.getMessage().execute(Collections.emptyMap()), equalTo("error")); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java index 9ce74293ed3..fd62f6cdeac 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.processor.GsubProcessor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -36,7 +36,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { config.put("pattern", "\\."); config.put("replacement", "-"); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); GsubProcessor gsubProcessor = factory.create(config); assertThat(gsubProcessor.getTag(), equalTo(processorTag)); assertThat(gsubProcessor.getField(), equalTo("field1")); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java index fffeca51770..2af2b096417 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -34,7 +35,7 @@ public class JoinProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("separator", "-"); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); JoinProcessor joinProcessor = factory.create(config); assertThat(joinProcessor.getTag(), equalTo(processorTag)); assertThat(joinProcessor.getField(), equalTo("field1")); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java index c22ea6641ea..6a4a67e40cf 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -33,7 +34,7 @@ public class LowercaseProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); LowercaseProcessor uppercaseProcessor = factory.create(config); assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java index 6522b565b21..0b03150adb6 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -42,7 +43,7 @@ public class RemoveProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); RemoveProcessor removeProcessor = factory.create(config); assertThat(removeProcessor.getTag(), equalTo(processorTag)); assertThat(removeProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java index 0a5d709f06d..21f5c663671 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -34,7 +35,7 @@ public class RenameProcessorFactoryTests extends ESTestCase { config.put("field", "old_field"); config.put("to", "new_field"); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); RenameProcessor renameProcessor = factory.create(config); assertThat(renameProcessor.getTag(), equalTo(processorTag)); assertThat(renameProcessor.getOldFieldName(), equalTo("old_field")); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java index ffc98379955..a58ee491a7c 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -43,7 +44,7 @@ public class SetProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("value", "value1"); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); SetProcessor setProcessor = factory.create(config); assertThat(setProcessor.getTag(), equalTo(processorTag)); assertThat(setProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java index 9954336030a..7267544c1ff 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -34,7 +35,7 @@ public class SplitProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("separator", "\\."); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); SplitProcessor splitProcessor = factory.create(config); assertThat(splitProcessor.getTag(), equalTo(processorTag)); assertThat(splitProcessor.getField(), equalTo("field1")); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java index cb5e1ed838f..350aaa66e6d 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -33,7 +34,7 @@ public class TrimProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); TrimProcessor uppercaseProcessor = factory.create(config); assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java index a3569bd2ee0..2220438c75f 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -33,7 +34,7 @@ public class UppercaseProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); UppercaseProcessor uppercaseProcessor = factory.create(config); assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); diff --git a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java index 9eb3100729d..f6bed139552 100644 --- a/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java +++ b/modules/ingest-grok/src/test/java/org/elasticsearch/ingest/grok/GrokProcessorFactoryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.grok; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.Collections; @@ -37,7 +38,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("pattern", "(?\\w+)"); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); GrokProcessor processor = factory.create(config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getMatchField(), equalTo("_field")); diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 7653e978127..b59242ece84 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest.geoip; import com.maxmind.geoip2.DatabaseReader; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; import org.junit.BeforeClass; @@ -61,7 +62,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { config.put("source_field", "_field"); String processorTag = randomAsciiOfLength(10); - config.put("processor_tag", processorTag); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); GeoIpProcessor processor = factory.create(config); assertThat(processor.getTag(), equalTo(processorTag)); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml index 67623a0763d..3153ba85a59 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml @@ -206,7 +206,7 @@ "processors": [ { "set" : { - "processor_tag" : "processor[set]-0", + "tag" : "processor[set]-0", "field" : "field2", "value" : "_value" } @@ -232,7 +232,7 @@ } - length: { docs: 1 } - length: { docs.0.processor_results: 2 } - - match: { docs.0.processor_results.0.processor_tag: "processor[set]-0" } + - match: { docs.0.processor_results.0.tag: "processor[set]-0" } - length: { docs.0.processor_results.0.doc._source: 2 } - match: { docs.0.processor_results.0.doc._source.foo: "bar" } - match: { docs.0.processor_results.0.doc._source.field2: "_value" } @@ -356,20 +356,20 @@ "processors": [ { "set" : { - "processor_tag" : "setstatus-1", + "tag" : "setstatus-1", "field" : "status", "value" : 200 } }, { "rename" : { - "processor_tag" : "rename-1", + "tag" : "rename-1", "field" : "foofield", "to" : "field1", "on_failure" : [ { "set" : { - "processor_tag" : "set on_failure rename", + "tag" : "set on_failure rename", "field" : "foofield", "value" : "exists" } @@ -406,15 +406,15 @@ } - length: { docs: 1 } - length: { docs.0.processor_results: 5 } - - match: { docs.0.processor_results.0.processor_tag: "setstatus-1" } + - match: { docs.0.processor_results.0.tag: "setstatus-1" } - match: { docs.0.processor_results.0.doc._source.field1: "123.42 400 " } - match: { docs.0.processor_results.0.doc._source.status: 200 } - - match: { docs.0.processor_results.1.processor_tag: "rename-1" } + - match: { docs.0.processor_results.1.tag: "rename-1" } - match: { docs.0.processor_results.1.error.type: "illegal_argument_exception" } - match: { docs.0.processor_results.1.error.reason: "field [foofield] doesn't exist" } - - match: { docs.0.processor_results.2.processor_tag: "set on_failure rename" } - - is_false: docs.0.processor_results.3.processor_tag - - is_false: docs.0.processor_results.4.processor_tag + - match: { docs.0.processor_results.2.tag: "set on_failure rename" } + - is_false: docs.0.processor_results.3.tag + - is_false: docs.0.processor_results.4.tag - match: { docs.0.processor_results.4.doc._source.foofield: "exists" } - match: { docs.0.processor_results.4.doc._source.foofield2: "ran" } - match: { docs.0.processor_results.4.doc._source.field1: "123.42 400 " } From 24ac9506bd719a17e5b37f74c980ba7090bd847a Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Thu, 21 Jan 2016 16:47:35 +0100 Subject: [PATCH 271/347] Reverted #16048 --- .../org/elasticsearch/index/engine/Engine.java | 2 +- .../bucket/geogrid/GeoHashGridParser.java | 4 +--- .../bucket/histogram/DateHistogramParser.java | 5 ++--- .../bucket/histogram/HistogramParser.java | 7 +++---- .../bucket/missing/MissingAggregator.java | 4 ++-- .../bucket/missing/MissingParser.java | 4 ++-- .../bucket/sampler/SamplerAggregator.java | 3 +-- .../bucket/sampler/SamplerParser.java | 7 +++---- .../SignificantTermsAggregatorFactory.java | 10 +++++++--- .../significant/SignificantTermsParser.java | 3 +-- .../terms/AbstractTermsParametersParser.java | 8 ++++---- .../bucket/terms/TermsAggregatorFactory.java | 2 +- .../aggregations/bucket/terms/TermsParser.java | 4 +--- .../ValuesSourceMetricsAggregationBuilder.java | 1 - .../CardinalityAggregatorFactory.java | 2 +- .../metrics/cardinality/CardinalityParser.java | 3 +-- .../metrics/valuecount/ValueCountParser.java | 4 ++-- .../aggregations/support/ValuesSource.java | 4 ---- .../support/ValuesSourceAggregatorFactory.java | 9 ++++----- .../aggregations/support/ValuesSourceParser.java | 7 ++----- .../search/profile/CollectorResult.java | 6 +++--- .../pipeline/PipelineAggregationHelperTests.java | 2 +- .../pipeline/moving/avg/MovAvgIT.java | 16 ++++++++++------ .../pipeline/serialdiff/SerialDiffIT.java | 6 +++--- .../watcher/ResourceWatcherServiceTests.java | 2 +- 25 files changed, 57 insertions(+), 68 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index 79610906b14..0e112118da8 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -1065,7 +1065,7 @@ public abstract class Engine implements Closeable { } } - public static class CommitId implements Writeable { + public static class CommitId implements Writeable { private final byte[] id; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java index 60302f2da42..6473b5ae7f2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java @@ -62,8 +62,7 @@ public class GeoHashGridParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { - ValuesSourceParser vsParser = ValuesSourceParser.geoPoint(aggregationName, InternalGeoHashGrid.TYPE, context) - .build(); + ValuesSourceParser vsParser = ValuesSourceParser.geoPoint(aggregationName, InternalGeoHashGrid.TYPE, context).build(); int precision = GeoHashGridParams.DEFAULT_PRECISION; int requiredSize = GeoHashGridParams.DEFAULT_MAX_NUM_CELLS; @@ -132,7 +131,6 @@ public class GeoHashGridParser implements Aggregator.Parser { final InternalAggregation aggregation = new InternalGeoHashGrid(name, requiredSize, Collections. emptyList(), pipelineAggregators, metaData); return new NonCollectingAggregator(name, aggregationContext, parent, pipelineAggregators, metaData) { - @Override public InternalAggregation buildEmptyAggregation() { return aggregation; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java index 52d77e1594c..694abf26e66 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java @@ -28,7 +28,6 @@ import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.ValueType; -import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceParser; import org.elasticsearch.search.internal.SearchContext; @@ -79,7 +78,7 @@ public class DateHistogramParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { - ValuesSourceParser vsParser = ValuesSourceParser.numeric(aggregationName, InternalDateHistogram.TYPE, context) + ValuesSourceParser vsParser = ValuesSourceParser.numeric(aggregationName, InternalDateHistogram.TYPE, context) .targetValueType(ValueType.DATE) .formattable(true) .timezoneAware(true) @@ -191,7 +190,7 @@ public class DateHistogramParser implements Aggregator.Parser { .timeZone(vsParser.input().timezone()) .offset(offset).build(); - ValuesSourceConfig config = vsParser.config(); + ValuesSourceConfig config = vsParser.config(); return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, extendedBounds, new InternalDateHistogram.Factory()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java index 31ee6681d4e..c738251d0e0 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java @@ -25,7 +25,6 @@ import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.ValueType; -import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; import org.elasticsearch.search.aggregations.support.ValuesSourceParser; import org.elasticsearch.search.aggregations.support.format.ValueParser; import org.elasticsearch.search.internal.SearchContext; @@ -47,7 +46,7 @@ public class HistogramParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { - ValuesSourceParser vsParser = ValuesSourceParser.numeric(aggregationName, InternalHistogram.TYPE, context) + ValuesSourceParser vsParser = ValuesSourceParser.numeric(aggregationName, InternalHistogram.TYPE, context) .targetValueType(ValueType.NUMERIC) .formattable(true) .build(); @@ -128,7 +127,7 @@ public class HistogramParser implements Aggregator.Parser { Rounding rounding = new Rounding.Interval(interval); if (offset != 0) { - rounding = new Rounding.OffsetRounding(rounding, offset); + rounding = new Rounding.OffsetRounding((Rounding.Interval) rounding, offset); } if (extendedBounds != null) { @@ -137,7 +136,7 @@ public class HistogramParser implements Aggregator.Parser { } return new HistogramAggregator.Factory(aggregationName, vsParser.config(), rounding, order, keyed, minDocCount, extendedBounds, - new InternalHistogram.Factory<>()); + new InternalHistogram.Factory()); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java index 38e15e216c2..1ae7341f611 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java @@ -81,9 +81,9 @@ public class MissingAggregator extends SingleBucketAggregator { return new InternalMissing(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData()); } - public static class Factory extends ValuesSourceAggregatorFactory { + public static class Factory extends ValuesSourceAggregatorFactory { - public Factory(String name, ValuesSourceConfig valueSourceConfig) { + public Factory(String name, ValuesSourceConfig valueSourceConfig) { super(name, InternalMissing.TYPE.name(), valueSourceConfig); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java index 4210e020d8c..6ecdc129dd0 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactory; -import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceParser; import org.elasticsearch.search.internal.SearchContext; @@ -40,7 +39,8 @@ public class MissingParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { - ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalMissing.TYPE, context) + + ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalMissing.TYPE, context) .scriptable(false) .build(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java index 4541aa9d142..8cb980954cb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java @@ -203,8 +203,7 @@ public class SamplerAggregator extends SingleBucketAggregator { private int maxDocsPerValue; private String executionHint; - public DiversifiedFactory(String name, int shardSize, String executionHint, ValuesSourceConfig vsConfig, - int maxDocsPerValue) { + public DiversifiedFactory(String name, int shardSize, String executionHint, ValuesSourceConfig vsConfig, int maxDocsPerValue) { super(name, InternalSampler.TYPE.name(), vsConfig); this.shardSize = shardSize; this.maxDocsPerValue = maxDocsPerValue; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java index d51f43657f7..498a7cb5c66 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactory; -import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceParser; import org.elasticsearch.search.internal.SearchContext; @@ -56,10 +55,10 @@ public class SamplerParser implements Aggregator.Parser { String executionHint = null; int shardSize = DEFAULT_SHARD_SAMPLE_SIZE; int maxDocsPerValue = MAX_DOCS_PER_VALUE_DEFAULT; + ValuesSourceParser vsParser = null; boolean diversityChoiceMade = false; - ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalSampler.TYPE, context).scriptable(true) - .formattable(false).build(); + vsParser = ValuesSourceParser.any(aggregationName, InternalSampler.TYPE, context).scriptable(true).formattable(false).build(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -89,7 +88,7 @@ public class SamplerParser implements Aggregator.Parser { } } - ValuesSourceConfig vsConfig = vsParser.config(); + ValuesSourceConfig vsConfig = vsParser.config(); if (vsConfig.valid()) { return new SamplerAggregator.DiversifiedFactory(aggregationName, shardSize, executionHint, vsConfig, maxDocsPerValue); } else { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java index 9b66fe01b67..399e85728af 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.significant; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; @@ -79,6 +80,8 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggregatorFactory, List pipelineAggregators, Map metaData) throws IOException { + ValuesSource.Bytes.WithOrdinals valueSourceWithOrdinals = (ValuesSource.Bytes.WithOrdinals) valuesSource; + IndexSearcher indexSearcher = aggregationContext.searchContext().searcher(); final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(); return new GlobalOrdinalsSignificantTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, filter, aggregationContext, @@ -95,8 +98,9 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac List pipelineAggregators, Map metaData) throws IOException { final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(); return new GlobalOrdinalsSignificantTermsAggregator.WithHash(name, factories, - (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, filter, aggregationContext, parent, - termsAggregatorFactory, pipelineAggregators, metaData); + (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, filter, + aggregationContext, + parent, termsAggregatorFactory, pipelineAggregators, metaData); } }; @@ -139,7 +143,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac return new TermsAggregator.BucketCountThresholds(bucketCountThresholds); } - public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig valueSourceConfig, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, + public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig valueSourceConfig, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, String executionHint, Query filter, SignificanceHeuristic significanceHeuristic) { super(name, SignificantStringTerms.TYPE.name(), valueSourceConfig); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java index b4b89c29c33..28e0fb5a812 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java @@ -28,7 +28,6 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; -import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceParser; import org.elasticsearch.search.internal.SearchContext; @@ -54,7 +53,7 @@ public class SignificantTermsParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { SignificantTermsParametersParser aggParser = new SignificantTermsParametersParser(significanceHeuristicParserMapper); - ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, SignificantStringTerms.TYPE, context) + ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, SignificantStringTerms.TYPE, context) .scriptable(false) .formattable(true) .build(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParametersParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParametersParser.java index ecd9d3bc693..891526c33c1 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParametersParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParametersParser.java @@ -36,13 +36,13 @@ public abstract class AbstractTermsParametersParser { public static final ParseField SHARD_MIN_DOC_COUNT_FIELD_NAME = new ParseField("shard_min_doc_count"); public static final ParseField REQUIRED_SIZE_FIELD_NAME = new ParseField("size"); public static final ParseField SHOW_TERM_DOC_COUNT_ERROR = new ParseField("show_term_doc_count_error"); - + //These are the results of the parsing. private TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds(); private String executionHint = null; - + private SubAggCollectionMode collectMode = SubAggCollectionMode.DEPTH_FIRST; @@ -59,12 +59,12 @@ public abstract class AbstractTermsParametersParser { public IncludeExclude getIncludeExclude() { return includeExclude; } - + public SubAggCollectionMode getCollectionMode() { return collectMode; } - public void parse(String aggregationName, XContentParser parser, SearchContext context, ValuesSourceParser vsParser, IncludeExclude.Parser incExcParser) throws IOException { + public void parse(String aggregationName, XContentParser parser, SearchContext context, ValuesSourceParser vsParser, IncludeExclude.Parser incExcParser) throws IOException { bucketCountThresholds = getDefaultBucketCountThresholds(); XContentParser.Token token; String currentFieldName = null; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index 04f7adf27b7..270dc009af2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -165,7 +165,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory config, Terms.Order order, + public TermsAggregatorFactory(String name, ValuesSourceConfig config, Terms.Order order, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, String executionHint, SubAggCollectionMode executionMode, boolean showTermDocCountError) { super(name, StringTerms.TYPE.name(), config); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java index a7b60e930ff..478309d1bc0 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java @@ -25,7 +25,6 @@ import org.elasticsearch.search.aggregations.bucket.BucketUtils; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order; import org.elasticsearch.search.aggregations.bucket.terms.TermsParametersParser.OrderElement; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; -import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceParser; import org.elasticsearch.search.internal.SearchContext; @@ -46,8 +45,7 @@ public class TermsParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { TermsParametersParser aggParser = new TermsParametersParser(); - ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, StringTerms.TYPE, context).scriptable(true) - .formattable(true).build(); + ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, StringTerms.TYPE, context).scriptable(true).formattable(true).build(); IncludeExclude.Parser incExcParser = new IncludeExclude.Parser(); aggParser.parse(aggregationName, parser, context, vsParser, incExcParser); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java index 81933144a5a..e6755486225 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java @@ -62,7 +62,6 @@ public abstract class ValuesSourceMetricsAggregationBuilder config, long precisionThreshold) { + CardinalityAggregatorFactory(String name, ValuesSourceConfig config, long precisionThreshold) { super(name, InternalCardinality.TYPE.name(), config); this.precisionThreshold = precisionThreshold; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java index 31552326450..68339457fe7 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactory; -import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceParser; import org.elasticsearch.search.internal.SearchContext; @@ -44,7 +43,7 @@ public class CardinalityParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String name, XContentParser parser, SearchContext context) throws IOException { - ValuesSourceParser vsParser = ValuesSourceParser.any(name, InternalCardinality.TYPE, context).formattable(false).build(); + ValuesSourceParser vsParser = ValuesSourceParser.any(name, InternalCardinality.TYPE, context).formattable(false).build(); long precisionThreshold = -1; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java index 0a9ea4a9072..764f6ce9384 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java @@ -40,7 +40,7 @@ public class ValueCountParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { - ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalValueCount.TYPE, context) + ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalValueCount.TYPE, context) .build(); XContentParser.Token token; @@ -54,6 +54,6 @@ public class ValueCountParser implements Aggregator.Parser { } } - return new ValueCountAggregator.Factory<>(aggregationName, vsParser.config()); + return new ValueCountAggregator.Factory(aggregationName, vsParser.config()); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java index d9fe3ad66c0..b03bc8d6833 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java @@ -53,9 +53,6 @@ import org.elasticsearch.search.aggregations.support.values.ScriptLongValues; import java.io.IOException; -/** - * How to load values for an aggregation. - */ public abstract class ValuesSource { /** @@ -531,7 +528,6 @@ public abstract class ValuesSource { return indexFieldData.load(context).getBytesValues(); } - @Override public org.elasticsearch.index.fielddata.MultiGeoPointValues geoPointValues(LeafReaderContext context) { return indexFieldData.load(context).getGeoPointValues(); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java index 3f56162a2f2..d0eaec2d8bc 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java @@ -78,20 +78,19 @@ public abstract class ValuesSourceAggregatorFactory ext boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) throws IOException; - @SuppressWarnings("unchecked") // Safe because we check the types with isAssignableFrom private void resolveValuesSourceConfigFromAncestors(String aggName, AggregatorFactory parent, Class requiredValuesSourceType) { - ValuesSourceConfig config; + ValuesSourceConfig config; while (parent != null) { if (parent instanceof ValuesSourceAggregatorFactory) { - config = ((ValuesSourceAggregatorFactory) parent).config; + config = ((ValuesSourceAggregatorFactory) parent).config; if (config != null && config.valid()) { if (requiredValuesSourceType == null || requiredValuesSourceType.isAssignableFrom(config.valueSourceType)) { ValueFormat format = config.format; - this.config = (ValuesSourceConfig) config; + this.config = config; // if the user explicitly defined a format pattern, we'll do our best to keep it even when we inherit the // value source form one of the ancestor aggregations if (this.config.formatPattern != null && format != null && format instanceof ValueFormat.Patternable) { - this.config.format = ((ValueFormat.Patternable) format).create(this.config.formatPattern); + this.config.format = ((ValueFormat.Patternable) format).create(this.config.formatPattern); } return; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java index 7c260619173..fced5fdc913 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java @@ -48,16 +48,13 @@ import java.util.HashMap; import java.util.Map; /** - * Parses a description of where to load the value sent by a user into a - * ValuesSourceConfig which can be used to work with the values in various ways, - * one of which is to create an actual ValueSource (done with the help of - * AggregationContext). + * */ public class ValuesSourceParser { static final ParseField TIME_ZONE = new ParseField("time_zone"); - public static Builder any(String aggName, InternalAggregation.Type aggType, SearchContext context) { + public static Builder any(String aggName, InternalAggregation.Type aggType, SearchContext context) { return new Builder<>(aggName, aggType, context, ValuesSource.class); } diff --git a/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java b/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java index d0c006eb5dc..8da14d23d96 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java +++ b/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java @@ -36,7 +36,7 @@ import java.util.Locale; * Collectors used in the search. Children CollectorResult's may be * embedded inside of a parent CollectorResult */ -public class CollectorResult implements ToXContent, Writeable { +public class CollectorResult implements ToXContent, Writeable { public static final String REASON_SEARCH_COUNT = "search_count"; public static final String REASON_SEARCH_TOP_HITS = "search_top_hits"; @@ -125,7 +125,7 @@ public class CollectorResult implements ToXContent, Writeable { builder = builder.startObject() .field(NAME.getPreferredName(), getName()) .field(REASON.getPreferredName(), getReason()) - .field(TIME.getPreferredName(), String.format(Locale.US, "%.10gms", getTime() / 1000000.0)); + .field(TIME.getPreferredName(), String.format(Locale.US, "%.10gms", (double) (getTime() / 1000000.0))); if (!children.isEmpty()) { builder = builder.startArray(CHILDREN.getPreferredName()); @@ -150,7 +150,7 @@ public class CollectorResult implements ToXContent, Writeable { } @Override - public CollectorResult readFrom(StreamInput in) throws IOException { + public Object readFrom(StreamInput in) throws IOException { return new CollectorResult(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java index 65d5fbafbea..e962e90830f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java @@ -109,7 +109,7 @@ public class PipelineAggregationHelperTests extends ESTestCase { * @param values Array of values to compute metric for * @param metric A metric builder which defines what kind of metric should be returned for the values */ - public static double calculateMetric(double[] values, ValuesSourceMetricsAggregationBuilder metric) { + public static double calculateMetric(double[] values, ValuesSourceMetricsAggregationBuilder metric) { if (metric instanceof MinBuilder) { double accumulator = Double.POSITIVE_INFINITY; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index 6184cb9fd68..90d4437fcea 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -77,7 +77,7 @@ public class MovAvgIT extends ESIntegTestCase { static int period; static HoltWintersModel.SeasonalityType seasonalityType; static BucketHelpers.GapPolicy gapPolicy; - static ValuesSourceMetricsAggregationBuilder metric; + static ValuesSourceMetricsAggregationBuilder metric; static List mockHisto; static Map> testValues; @@ -864,7 +864,7 @@ public class MovAvgIT extends ESIntegTestCase { public void testHoltWintersNotEnoughData() { try { - client() + SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) @@ -1003,7 +1003,7 @@ public class MovAvgIT extends ESIntegTestCase { public void testBadModelParams() { try { - client() + SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) @@ -1248,7 +1248,7 @@ public class MovAvgIT extends ESIntegTestCase { for (MovAvgModelBuilder builder : builders) { try { - client() + SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) @@ -1265,10 +1265,14 @@ public class MovAvgIT extends ESIntegTestCase { // All good } } + + + + } - private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) { + private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) { if (!expectedBucketIter.hasNext()) { fail("`expectedBucketIter` iterator ended before `actual` iterator, size mismatch"); } @@ -1351,7 +1355,7 @@ public class MovAvgIT extends ESIntegTestCase { } } - private ValuesSourceMetricsAggregationBuilder randomMetric(String name, String field) { + private ValuesSourceMetricsAggregationBuilder randomMetric(String name, String field) { int rand = randomIntBetween(0,3); switch (rand) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java index 145587a4b27..aebd6a7e780 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java @@ -60,7 +60,7 @@ public class SerialDiffIT extends ESIntegTestCase { static int numBuckets; static int lag; static BucketHelpers.GapPolicy gapPolicy; - static ValuesSourceMetricsAggregationBuilder metric; + static ValuesSourceMetricsAggregationBuilder metric; static List mockHisto; static Map> testValues; @@ -80,7 +80,7 @@ public class SerialDiffIT extends ESIntegTestCase { } } - private ValuesSourceMetricsAggregationBuilder randomMetric(String name, String field) { + private ValuesSourceMetricsAggregationBuilder randomMetric(String name, String field) { int rand = randomIntBetween(0,3); switch (rand) { @@ -95,7 +95,7 @@ public class SerialDiffIT extends ESIntegTestCase { } } - private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) { + private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) { if (!expectedBucketIter.hasNext()) { fail("`expectedBucketIter` iterator ended before `actual` iterator, size mismatch"); } diff --git a/core/src/test/java/org/elasticsearch/watcher/ResourceWatcherServiceTests.java b/core/src/test/java/org/elasticsearch/watcher/ResourceWatcherServiceTests.java index fe36b749625..6c6c45e9cfd 100644 --- a/core/src/test/java/org/elasticsearch/watcher/ResourceWatcherServiceTests.java +++ b/core/src/test/java/org/elasticsearch/watcher/ResourceWatcherServiceTests.java @@ -79,7 +79,7 @@ public class ResourceWatcherServiceTests extends ESTestCase { }; // checking default freq - WatcherHandle handle = service.add(watcher); + WatcherHandle handle = service.add(watcher); assertThat(handle, notNullValue()); assertThat(handle.frequency(), equalTo(ResourceWatcherService.Frequency.MEDIUM)); assertThat(service.lowMonitor.watchers.size(), is(0)); From 7de8d2881b5433448fa10157bd537abe79d13f95 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 21 Jan 2016 13:51:27 -0500 Subject: [PATCH 272/347] Fix shard ID logging in DWSDIT#testAckedIndexing This commit fixes a minor issue with the shard ID that is logged while indexing in DiscoveryWithServiceDisruptionsIT#testAckedIndexing. The issue is that the operation routing hash could lead to a negative remainder modulo the number of primaries (if the hash itself is negative) but should instead be the normalized positive remainder. This issue only impacts the logging of the shard ID as the actual shard ID used during indexing is computed elsewhere but would cause the shard ID in the affected logging statement to not match shard IDs that are logged elsewhere. --- .../discovery/DiscoveryWithServiceDisruptionsIT.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index e9fa8e495d9..268d1064f64 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -45,6 +45,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.math.MathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.zen.ZenDiscovery; @@ -465,7 +466,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { logger.info("[{}] Acquired semaphore and it has {} permits left", name, semaphore.availablePermits()); try { id = Integer.toString(idGenerator.incrementAndGet()); - int shard = Murmur3HashFunction.hash(id) % numPrimaries; + int shard = MathUtils.mod(Murmur3HashFunction.hash(id), numPrimaries); logger.trace("[{}] indexing id [{}] through node [{}] targeting shard [{}]", name, id, node, shard); IndexResponse response = client.prepareIndex("test", "type", id).setSource("{}").setTimeout("1s").get(); assertThat(response.getVersion(), equalTo(1l)); From 26e5cd651ee95a714486fbe9550e56fe75b01a2f Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 21 Jan 2016 14:35:41 -0500 Subject: [PATCH 273/347] Default standard output to the journal in systemd This commit modifies the default setting for standard output in the systemd configuration to the journal instead of /dev/null. This is to address a user pain point where Elasticsearch would fail to start but the error message would be sent to standard output and therefore /dev/null leading to difficult-to-debug situations. --- .../src/main/packaging/systemd/elasticsearch.service | 7 ++----- docs/reference/migration/migrate_3_0.asciidoc | 11 +++++++++++ 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/distribution/src/main/packaging/systemd/elasticsearch.service b/distribution/src/main/packaging/systemd/elasticsearch.service index 4a280a09d38..301586c1038 100644 --- a/distribution/src/main/packaging/systemd/elasticsearch.service +++ b/distribution/src/main/packaging/systemd/elasticsearch.service @@ -26,11 +26,8 @@ ExecStart=/usr/share/elasticsearch/bin/elasticsearch \ -Des.default.path.data=${DATA_DIR} \ -Des.default.path.conf=${CONF_DIR} -# Connects standard output to /dev/null -StandardOutput=null - -# Connects standard error to journal -StandardError=journal +StandardOutput=journal +StandardError=inherit # Specifies the maximum file descriptor number that can be opened by this process LimitNOFILE=65535 diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 2b6daac4123..058995727ae 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -644,3 +644,14 @@ When percolating an existing document then specifying a document in the source o any more. Percolator documents are no longer excluded from the search response. + +=== Default logging using systemd + +In previous versions of Elasticsearch, the default logging +configuration routed standard output to /dev/null and standard error to +the journal. However, there are often critical error messages at +startup that are logged to standard output rather than standard error +and these error messages would be lost to the nether. The default has +changed to now route standard output to the journal and standard error +to inherit this setting (these are the defaults for systemd). These +settings can be modified by editing the elasticsearch.service file. From fcdb24bc9dd5f884d6f99eadfb5004ea5734b260 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 22 Jan 2016 09:09:28 +0100 Subject: [PATCH 274/347] Run Metadata upgrade tool on every version Today we run the metadata upgrade only on the current major version but this should run on every upgrade at least once to ensure we don't miss an important check or upgrade. --- .../metadata/MetaDataIndexUpgradeService.java | 14 +++++++---- .../MetaDataIndexUpgradeServiceTests.java | 23 +++++++++++++++++++ 2 files changed, 32 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index fb077a532a3..8bbd6f09d7e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -72,21 +72,25 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData) { // Throws an exception if there are too-old segments: if (isUpgraded(indexMetaData)) { - return archiveBrokenIndexSettings(indexMetaData); + assert indexMetaData == archiveBrokenIndexSettings(indexMetaData) : "all settings must have been upgraded before"; + return indexMetaData; } checkSupportedVersion(indexMetaData); IndexMetaData newMetaData = indexMetaData; + // we have to run this first otherwise in we try to create IndexSettings + // with broken settings and fail in checkMappingsCompatibility + newMetaData = archiveBrokenIndexSettings(newMetaData); + // only run the check with the upgraded settings!! checkMappingsCompatibility(newMetaData); - newMetaData = markAsUpgraded(newMetaData); - return archiveBrokenIndexSettings(newMetaData); + return markAsUpgraded(newMetaData); } /** * Checks if the index was already opened by this version of Elasticsearch and doesn't require any additional checks. */ - private boolean isUpgraded(IndexMetaData indexMetaData) { - return indexMetaData.getUpgradedVersion().onOrAfter(Version.V_3_0_0); // TODO should this be Version.CURRENT? + boolean isUpgraded(IndexMetaData indexMetaData) { + return indexMetaData.getUpgradedVersion().onOrAfter(Version.CURRENT); } /** diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java index 6e892339960..0f6b5e25437 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import java.util.Collections; @@ -54,6 +55,28 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { assertSame(indexMetaData, src); } + public void testUpgrad() { + MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + IndexMetaData src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build()); + assertFalse(service.isUpgraded(src)); + src = service.upgradeIndexMetaData(src); + assertTrue(service.isUpgraded(src)); + assertEquals("-200", src.getSettings().get("archived.index.refresh_interval")); + assertNull(src.getSettings().get("index.refresh_interval")); + assertSame(src, service.upgradeIndexMetaData(src)); // no double upgrade + } + + public void testIsUpgraded() { + MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + IndexMetaData src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build()); + assertFalse(service.isUpgraded(src)); + Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion()); + src = newIndexMeta("foo", Settings.builder().put(IndexMetaData.SETTING_VERSION_UPGRADED, version).build()); + assertFalse(service.isUpgraded(src)); + src = newIndexMeta("foo", Settings.builder().put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.CURRENT).build()); + assertTrue(service.isUpgraded(src)); + } + public static IndexMetaData newIndexMeta(String name, Settings indexSettings) { Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) From dbff74308bc4013bd915600230f0c8a8b9748940 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 22 Jan 2016 09:48:41 +0100 Subject: [PATCH 275/347] fix typo --- .../cluster/metadata/MetaDataIndexUpgradeServiceTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java index 0f6b5e25437..a43da9e53fa 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java @@ -55,7 +55,7 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { assertSame(indexMetaData, src); } - public void testUpgrad() { + public void testUpgrade() { MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); IndexMetaData src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build()); assertFalse(service.isUpgraded(src)); From fbca68a7c4affcf0a3f03f49abee834e27c2677d Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Fri, 22 Jan 2016 10:15:40 +0100 Subject: [PATCH 276/347] Translog: close channel on failures while converting a writer to a reader TranslogWriter.closeIntoReader transfers the file ownership from a writer to a reader and closes the writer. If the transfer fails, we need to make sure we closed the underlying channel as the writer is already closes. See: http://build-us-00.elastic.co/job/es_core_master_regression/4355 Relates to #16142 --- .../org/elasticsearch/index/translog/TranslogWriter.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index f7d0cd571e8..3ead27f3111 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -192,7 +192,13 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { throw e; } if (closed.compareAndSet(false, true)) { - return new TranslogReader(generation, channel, path, firstOperationOffset, getWrittenOffset(), operationCounter); + try { + return new TranslogReader(generation, channel, path, firstOperationOffset, getWrittenOffset(), operationCounter); + } catch (Throwable t) { + // close the channel, as we are closed and failed to create a new reader + IOUtils.closeWhileHandlingException(channel); + throw t; + } } else { throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed (path [" + path + "]", tragedy); } From cbd0a8b067e71c7be2f301f8f8d379576eef827c Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 22 Jan 2016 10:17:13 +0100 Subject: [PATCH 277/347] Add a testcase for #16142 that fails every time we use HandleTrackingFS This commit also uses a try/finally with success pattern instead of catching and excpetion. TranslogTests reproduce with `-Dtests.seed=DF6A38BAE739227A` every time. Closes #16142 --- .../index/translog/TranslogWriter.java | 14 +++++--- .../index/translog/TranslogTests.java | 36 +++++++++++++++++++ 2 files changed, 45 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 3ead27f3111..a1fc708ddaf 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -192,12 +192,16 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { throw e; } if (closed.compareAndSet(false, true)) { + boolean success = false; try { - return new TranslogReader(generation, channel, path, firstOperationOffset, getWrittenOffset(), operationCounter); - } catch (Throwable t) { - // close the channel, as we are closed and failed to create a new reader - IOUtils.closeWhileHandlingException(channel); - throw t; + final TranslogReader reader = new TranslogReader(generation, channel, path, firstOperationOffset, getWrittenOffset(), operationCounter); + success = true; + return reader; + } finally { + if (success == false) { + // close the channel, as we are closed and failed to create a new reader + IOUtils.closeWhileHandlingException(channel); + } } } else { throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed (path [" + path + "]", tragedy); diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index e1935328232..d0ef2331d9d 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -896,6 +896,42 @@ public class TranslogTests extends ESTestCase { IOUtils.close(writer); } + public void testFailWriterWhileClosing() throws IOException { + Path tempDir = createTempDir(); + final FailSwitch fail = new FailSwitch(); + fail.failNever(); + TranslogConfig config = getTranslogConfig(tempDir); + try (Translog translog = getFailableTranslog(fail, config)) { + final TranslogWriter writer = translog.createWriter(0); + final int numOps = randomIntBetween(10, 100); + byte[] bytes = new byte[4]; + ByteArrayDataOutput out = new ByteArrayDataOutput(bytes); + for (int i = 0; i < numOps; i++) { + out.reset(bytes); + out.writeInt(i); + writer.add(new BytesArray(bytes)); + } + writer.sync(); + try { + fail.failAlways(); + writer.closeIntoReader(); + fail(); + } catch (MockDirectoryWrapper.FakeIOException ex) { + } + try (TranslogReader reader = translog.openReader(writer.path(), Checkpoint.read(translog.location().resolve(Translog.CHECKPOINT_FILE_NAME)))) { + for (int i = 0; i < numOps; i++) { + ByteBuffer buffer = ByteBuffer.allocate(4); + reader.readBytes(buffer, reader.getFirstOperationOffset() + 4 * i); + buffer.flip(); + final int value = buffer.getInt(); + assertEquals(i, value); + } + } + + } + + } + public void testBasicRecovery() throws IOException { List locations = new ArrayList<>(); int translogOperations = randomIntBetween(10, 100); From 1fb2f22f321b5b0e78e7e9202a73f92d1740e45e Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 22 Jan 2016 11:25:26 +0100 Subject: [PATCH 278/347] Convert to the new settings infra --- .../org/elasticsearch/common/settings/ClusterSettings.java | 4 +++- .../main/java/org/elasticsearch/script/ScriptService.java | 6 +++--- .../java/org/elasticsearch/script/ScriptServiceTests.java | 4 ++-- .../java/org/elasticsearch/test/InternalTestCluster.java | 2 +- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 1e764dce42c..9262b22dcef 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -44,6 +44,7 @@ import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.ttl.IndicesTTLService; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; @@ -158,5 +159,6 @@ public final class ClusterSettings extends AbstractScopedSettings { Transport.TRANSPORT_TCP_COMPRESS, IndexSettings.QUERY_STRING_ANALYZE_WILDCARD, IndexSettings.QUERY_STRING_ALLOW_LEADING_WILDCARD, - PrimaryShardAllocator.NODE_INITIAL_SHARDS_SETTING))); + PrimaryShardAllocator.NODE_INITIAL_SHARDS_SETTING, + ScriptService.SCRIPT_CACHE_SIZE_SETTING))); } diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index c9e9f9a873d..9883d62c987 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -46,6 +46,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -84,8 +85,7 @@ public class ScriptService extends AbstractComponent implements Closeable { static final String DISABLE_DYNAMIC_SCRIPTING_SETTING = "script.disable_dynamic"; public static final String DEFAULT_SCRIPTING_LANGUAGE_SETTING = "script.default_lang"; - public static final String SCRIPT_CACHE_SIZE_SETTING = "script.cache.max_size"; - public static final int SCRIPT_CACHE_SIZE_DEFAULT = 100; + public static final Setting SCRIPT_CACHE_SIZE_SETTING = Setting.intSetting("script.cache.max_size", 100, 0, false, Setting.Scope.CLUSTER); public static final String SCRIPT_CACHE_EXPIRE_SETTING = "script.cache.expire"; public static final String SCRIPT_INDEX = ".scripts"; public static final String DEFAULT_LANG = "groovy"; @@ -148,7 +148,7 @@ public class ScriptService extends AbstractComponent implements Closeable { this.scriptEngines = scriptEngines; this.scriptContextRegistry = scriptContextRegistry; - int cacheMaxSize = settings.getAsInt(SCRIPT_CACHE_SIZE_SETTING, SCRIPT_CACHE_SIZE_DEFAULT); + int cacheMaxSize = SCRIPT_CACHE_SIZE_SETTING.get(settings); TimeValue cacheExpire = settings.getAsTime(SCRIPT_CACHE_EXPIRE_SETTING, null); logger.debug("using script cache with max_size [{}], expire [{}]", cacheMaxSize, cacheExpire); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 3c939e7e91a..caad93c68dd 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -378,7 +378,7 @@ public class ScriptServiceTests extends ESTestCase { public void testCompilationStatsOnCacheHit() throws IOException { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); Settings.Builder builder = Settings.builder(); - builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, 1); + builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), 1); buildScriptService(builder.build()); scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); @@ -403,7 +403,7 @@ public class ScriptServiceTests extends ESTestCase { public void testCacheEvictionCountedInCacheEvictionsStats() throws IOException { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); Settings.Builder builder = Settings.builder(); - builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, 1); + builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), 1); buildScriptService(builder.build()); scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 01988f61558..b8cdbb4236b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -457,7 +457,7 @@ public final class InternalTestCluster extends TestCluster { } if (random.nextBoolean()) { - builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, RandomInts.randomIntBetween(random, -100, 2000)); + builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), RandomInts.randomIntBetween(random, -100, 2000)); } if (random.nextBoolean()) { builder.put(ScriptService.SCRIPT_CACHE_EXPIRE_SETTING, TimeValue.timeValueMillis(RandomInts.randomIntBetween(random, 750, 10000000))); From 20c3cbf98d68d3a2004a74e7f45bb627e0bc5dd5 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 22 Jan 2016 11:42:18 +0100 Subject: [PATCH 279/347] [TEST] only pass valid random settings --- .../main/java/org/elasticsearch/test/InternalTestCluster.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index b8cdbb4236b..3b772b4ef33 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -457,7 +457,7 @@ public final class InternalTestCluster extends TestCluster { } if (random.nextBoolean()) { - builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), RandomInts.randomIntBetween(random, -100, 2000)); + builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), RandomInts.randomIntBetween(random, 0, 2000)); } if (random.nextBoolean()) { builder.put(ScriptService.SCRIPT_CACHE_EXPIRE_SETTING, TimeValue.timeValueMillis(RandomInts.randomIntBetween(random, 750, 10000000))); From e860f9ddda2232f70d02a2dc6b30b4892e41aed7 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jan 2016 12:02:40 +0100 Subject: [PATCH 280/347] Split processor: specify known size at list creation --- .../elasticsearch/ingest/processor/SplitProcessor.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java index 425c3a58b2a..ad0bffb061a 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java @@ -21,11 +21,10 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; -import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.IngestDocument; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; @@ -62,8 +61,9 @@ public class SplitProcessor extends AbstractProcessor { if (oldVal == null) { throw new IllegalArgumentException("field [" + field + "] is null, cannot split."); } - List splitList = new ArrayList<>(); - Collections.addAll(splitList, oldVal.split(separator)); + String[] strings = oldVal.split(separator); + List splitList = new ArrayList<>(strings.length); + Collections.addAll(splitList, strings); document.setFieldValue(field, splitList); } From 17a36aecffb6254d35b0df311bdea31ae710375c Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jan 2016 12:03:36 +0100 Subject: [PATCH 281/347] [TEST] Simplify SplitProcessorTests#testSplitAppendable to not require compound and append processor --- .../ingest/processor/SplitProcessorTests.java | 23 +++++++------------ 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java index 08a26c4bcb3..e1c8a626a4b 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java @@ -19,14 +19,10 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.ingest.core.CompoundProcessor; -import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Processor; -import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.test.ESTestCase; -import org.hamcrest.CoreMatchers; import java.util.Arrays; import java.util.Collections; @@ -84,21 +80,18 @@ public class SplitProcessorTests extends ESTestCase { } public void testSplitAppendable() throws Exception { - TemplateService templateService = TestTemplateService.instance(); - Map splitConfig = new HashMap<>(); + Map splitConfig = new HashMap<>(); splitConfig.put("field", "flags"); splitConfig.put("separator", "\\|"); Processor splitProcessor = (new SplitProcessor.Factory()).create(splitConfig); - Map appendConfig = new HashMap<>(); - appendConfig.put("field", "flags"); - appendConfig.put("value", Collections.singletonList("additional_flag")); - Processor appendProcessor = (new AppendProcessor.Factory(templateService)).create(appendConfig); - CompoundProcessor compoundProcessor = new CompoundProcessor(splitProcessor, appendProcessor); Map source = new HashMap<>(); source.put("flags", "new|hot|super|fun|interesting"); IngestDocument ingestDocument = new IngestDocument(source, new HashMap<>()); - compoundProcessor.execute(ingestDocument); - List expectedFlags = Arrays.asList("new", "hot", "super", "fun", "interesting", "additional_flag"); - assertThat(ingestDocument.getFieldValue("flags", List.class), CoreMatchers.equalTo(expectedFlags)); + splitProcessor.execute(ingestDocument); + @SuppressWarnings("unchecked") + List flags = (List)ingestDocument.getFieldValue("flags", List.class); + assertThat(flags, equalTo(Arrays.asList("new", "hot", "super", "fun", "interesting"))); + ingestDocument.appendFieldValue("flags", "additional_flag"); + assertThat(ingestDocument.getFieldValue("flags", List.class), equalTo(Arrays.asList("new", "hot", "super", "fun", "interesting", "additional_flag"))); } } From 4ad5e67433397755fac7687d081a7625ef57f85c Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jan 2016 12:26:39 +0100 Subject: [PATCH 282/347] Geoip processor: remove redundant latitude and longitude fields and make location an object with lat and lon subfields --- .../elasticsearch/ingest/geoip/GeoIpProcessor.java | 13 ++++--------- .../ingest/geoip/GeoIpProcessorTests.java | 12 ++++++------ .../test/ingest_geoip/20_geoip_processor.yaml | 10 +++++----- 3 files changed, 15 insertions(+), 20 deletions(-) diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index b6ec723e1f3..b1c25f5a1ec 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -163,16 +163,11 @@ public final class GeoIpProcessor extends AbstractProcessor { case TIMEZONE: geoData.put("timezone", location.getTimeZone()); break; - case LATITUDE: - geoData.put("latitude", location.getLatitude()); - break; - case LONGITUDE: - geoData.put("longitude", location.getLongitude()); - break; case LOCATION: - if (location.getLatitude() != null && location.getLongitude() != null) { - geoData.put("location", Arrays.asList(location.getLongitude(), location.getLatitude())); - } + Map locationObject = new HashMap<>(); + locationObject.put("lat", location.getLatitude()); + locationObject.put("lon", location.getLongitude()); + geoData.put("location", locationObject); break; } } diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index aa0d6e6d6af..43517986c8e 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -20,12 +20,11 @@ package org.elasticsearch.ingest.geoip; import com.maxmind.geoip2.DatabaseReader; -import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.test.ESTestCase; import java.io.InputStream; -import java.util.Arrays; import java.util.EnumSet; import java.util.HashMap; import java.util.Map; @@ -47,7 +46,7 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo("82.170.213.79")); @SuppressWarnings("unchecked") Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData.size(), equalTo(10)); + assertThat(geoData.size(), equalTo(8)); assertThat(geoData.get("ip"), equalTo("82.170.213.79")); assertThat(geoData.get("country_iso_code"), equalTo("NL")); assertThat(geoData.get("country_name"), equalTo("Netherlands")); @@ -55,9 +54,10 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(geoData.get("region_name"), equalTo("North Holland")); assertThat(geoData.get("city_name"), equalTo("Amsterdam")); assertThat(geoData.get("timezone"), equalTo("Europe/Amsterdam")); - assertThat(geoData.get("latitude"), equalTo(52.374)); - assertThat(geoData.get("longitude"), equalTo(4.8897)); - assertThat(geoData.get("location"), equalTo(Arrays.asList(4.8897d, 52.374d))); + Map location = new HashMap<>(); + location.put("lat", 52.374d); + location.put("lon", 4.8897d); + assertThat(geoData.get("location"), equalTo(location)); } public void testCountry() throws Exception { diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml index e8da23e0edd..704f288646c 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml @@ -33,7 +33,8 @@ - length: { _source.geoip: 5 } - match: { _source.geoip.city_name: "Minneapolis" } - match: { _source.geoip.country_iso_code: "US" } - - match: { _source.geoip.location: [-93.2166, 44.9759] } + - match: { _source.geoip.location.lon: -93.2166 } + - match: { _source.geoip.location.lat: 44.9759 } - match: { _source.geoip.region_name: "Minnesota" } - match: { _source.geoip.continent_name: "North America" } @@ -74,13 +75,12 @@ type: test id: 1 - match: { _source.field1: "128.101.101.101" } - - length: { _source.geoip: 10 } + - length: { _source.geoip: 8 } - match: { _source.geoip.city_name: "Minneapolis" } - match: { _source.geoip.country_iso_code: "US" } - match: { _source.geoip.ip: "128.101.101.101" } - - match: { _source.geoip.latitude: 44.9759 } - - match: { _source.geoip.longitude: -93.2166 } - - match: { _source.geoip.location: [-93.2166, 44.9759] } + - match: { _source.geoip.location.lon: -93.2166 } + - match: { _source.geoip.location.lat: 44.9759 } - match: { _source.geoip.timezone: "America/Chicago" } - match: { _source.geoip.country_name: "United States" } - match: { _source.geoip.region_name: "Minnesota" } From fa4e6020cd2441633369413ce683bd4722354c6c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 22 Jan 2016 06:28:13 -0500 Subject: [PATCH 283/347] Break out packaging breaking changes --- docs/reference/migration/migrate_3_0.asciidoc | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 058995727ae..76b1ddb417e 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -16,6 +16,7 @@ your application to Elasticsearch 3.0. * <> * <> * <> +* <> [[breaking_30_search_changes]] === Warmers @@ -645,7 +646,10 @@ any more. Percolator documents are no longer excluded from the search response. -=== Default logging using systemd +[[breaking_30_packaging]] +=== Packaging + +==== Default logging using systemd (since Elasticsearch 2.2.0) In previous versions of Elasticsearch, the default logging configuration routed standard output to /dev/null and standard error to From ac174aabb0fd8d2eba4e03669f587f978fc7b2b5 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 22 Jan 2016 12:21:30 +0100 Subject: [PATCH 284/347] Migrate gateway settings to the new settings API. --- .../common/settings/ClusterSettings.java | 8 ++++ .../elasticsearch/gateway/GatewayService.java | 44 ++++++++++++++----- 2 files changed, 41 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 9262b22dcef..e220fcf5de0 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; +import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.gateway.PrimaryShardAllocator; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.store.IndexStoreConfig; @@ -140,6 +141,13 @@ public final class ClusterSettings extends AbstractScopedSettings { DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING, DiscoverySettings.COMMIT_TIMEOUT_SETTING, DiscoverySettings.NO_MASTER_BLOCK_SETTING, + GatewayService.EXPECTED_DATA_NODES_SETTING, + GatewayService.EXPECTED_MASTER_NODES_SETTING, + GatewayService.EXPECTED_NODES_SETTING, + GatewayService.RECOVER_AFTER_DATA_NODES_SETTING, + GatewayService.RECOVER_AFTER_MASTER_NODES_SETTING, + GatewayService.RECOVER_AFTER_NODES_SETTING, + GatewayService.RECOVER_AFTER_TIME_SETTING, HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java index 80e3be78093..af565a6002b 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.DiscoveryService; @@ -49,6 +50,21 @@ import java.util.concurrent.atomic.AtomicBoolean; */ public class GatewayService extends AbstractLifecycleComponent implements ClusterStateListener { + public static final Setting EXPECTED_NODES_SETTING = Setting.intSetting( + "gateway.expected_nodes", -1, -1, false, Setting.Scope.CLUSTER); + public static final Setting EXPECTED_DATA_NODES_SETTING = Setting.intSetting( + "gateway.expected_data_nodes", -1, -1, false, Setting.Scope.CLUSTER); + public static final Setting EXPECTED_MASTER_NODES_SETTING = Setting.intSetting( + "gateway.expected_master_nodes", -1, -1, false, Setting.Scope.CLUSTER); + public static final Setting RECOVER_AFTER_TIME_SETTING = Setting.positiveTimeSetting( + "gateway.recover_after_time", TimeValue.timeValueMillis(0), false, Setting.Scope.CLUSTER); + public static final Setting RECOVER_AFTER_NODES_SETTING = Setting.intSetting( + "gateway.recover_after_nodes", -1, -1, false, Setting.Scope.CLUSTER); + public static final Setting RECOVER_AFTER_DATA_NODES_SETTING = Setting.intSetting( + "gateway.recover_after_data_nodes", -1, -1, false, Setting.Scope.CLUSTER); + public static final Setting RECOVER_AFTER_MASTER_NODES_SETTING = Setting.intSetting( + "gateway.recover_after_master_nodes", 0, 0, false, Setting.Scope.CLUSTER); + public static final ClusterBlock STATE_NOT_RECOVERED_BLOCK = new ClusterBlock(1, "state not recovered / initialized", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL); public static final TimeValue DEFAULT_RECOVER_AFTER_TIME_IF_EXPECTED_NODES_IS_SET = TimeValue.timeValueMinutes(5); @@ -84,20 +100,26 @@ public class GatewayService extends AbstractLifecycleComponent i this.discoveryService = discoveryService; this.threadPool = threadPool; // allow to control a delay of when indices will get created - this.expectedNodes = this.settings.getAsInt("gateway.expected_nodes", -1); - this.expectedDataNodes = this.settings.getAsInt("gateway.expected_data_nodes", -1); - this.expectedMasterNodes = this.settings.getAsInt("gateway.expected_master_nodes", -1); + this.expectedNodes = EXPECTED_NODES_SETTING.get(this.settings); + this.expectedDataNodes = EXPECTED_DATA_NODES_SETTING.get(this.settings); + this.expectedMasterNodes = EXPECTED_MASTER_NODES_SETTING.get(this.settings); - TimeValue defaultRecoverAfterTime = null; - if (expectedNodes >= 0 || expectedDataNodes >= 0 || expectedMasterNodes >= 0) { - defaultRecoverAfterTime = DEFAULT_RECOVER_AFTER_TIME_IF_EXPECTED_NODES_IS_SET; + if (RECOVER_AFTER_TIME_SETTING.exists(this.settings)) { + recoverAfterTime = RECOVER_AFTER_TIME_SETTING.get(this.settings); + } else if (expectedNodes >= 0 || expectedDataNodes >= 0 || expectedMasterNodes >= 0) { + recoverAfterTime = DEFAULT_RECOVER_AFTER_TIME_IF_EXPECTED_NODES_IS_SET; + } else { + recoverAfterTime = null; } - - this.recoverAfterTime = this.settings.getAsTime("gateway.recover_after_time", defaultRecoverAfterTime); - this.recoverAfterNodes = this.settings.getAsInt("gateway.recover_after_nodes", -1); - this.recoverAfterDataNodes = this.settings.getAsInt("gateway.recover_after_data_nodes", -1); + this.recoverAfterNodes = RECOVER_AFTER_NODES_SETTING.get(this.settings); + this.recoverAfterDataNodes = RECOVER_AFTER_DATA_NODES_SETTING.get(this.settings); // default the recover after master nodes to the minimum master nodes in the discovery - this.recoverAfterMasterNodes = this.settings.getAsInt("gateway.recover_after_master_nodes", settings.getAsInt("discovery.zen.minimum_master_nodes", -1)); + if (RECOVER_AFTER_MASTER_NODES_SETTING.exists(this.settings)) { + recoverAfterMasterNodes = RECOVER_AFTER_MASTER_NODES_SETTING.get(this.settings); + } else { + // TODO: change me once the minimum_master_nodes is changed too + recoverAfterMasterNodes = settings.getAsInt("discovery.zen.minimum_master_nodes", -1); + } // Add the not recovered as initial state block, we don't allow anything until this.clusterService.addInitialStateBlock(STATE_NOT_RECOVERED_BLOCK); From f402f427cc48f7743177bc1d987c691067e9b39d Mon Sep 17 00:00:00 2001 From: Glen Smith Date: Thu, 21 Jan 2016 18:19:47 -0500 Subject: [PATCH 285/347] Update function-score-query.asciidoc Make explanatory text match value used in example body Closes #16165 --- docs/reference/query-dsl/function-score-query.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/query-dsl/function-score-query.asciidoc b/docs/reference/query-dsl/function-score-query.asciidoc index 08e5e575f20..39fdae80242 100644 --- a/docs/reference/query-dsl/function-score-query.asciidoc +++ b/docs/reference/query-dsl/function-score-query.asciidoc @@ -300,9 +300,9 @@ location field. You want to compute a decay function depending on how far the hotel is from a given location. You might not immediately see what scale to choose for the gauss function, but you can say something like: "At a distance of 2km from the desired location, the score should -be reduced by one third." +be reduced to one third." The parameter "scale" will then be adjusted automatically to assure that -the score function computes a score of 0.5 for hotels that are 2km away +the score function computes a score of 0.33 for hotels that are 2km away from the desired location. From 9c286b601e8d08a7177b8f989ad959374373d034 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Fri, 22 Jan 2016 13:51:06 +0100 Subject: [PATCH 286/347] Convert "indices.*" settings to new infra. --- .../common/settings/ClusterSettings.java | 14 +++++++++++++- .../indices/analysis/HunspellService.java | 13 ++++++++----- .../cache/request/IndicesRequestCache.java | 17 +++++++++-------- .../fielddata/cache/IndicesFieldDataCache.java | 12 ++++++------ .../indices/store/IndicesStore.java | 5 +++-- .../indices/analyze/HunspellServiceIT.java | 12 ++++++------ .../indices/stats/IndexStatsIT.java | 2 +- .../store/IndicesStoreIntegrationIT.java | 2 +- .../org/elasticsearch/test/ESIntegTestCase.java | 2 +- 9 files changed, 48 insertions(+), 31 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 9262b22dcef..9a075bbf98e 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -41,8 +41,12 @@ import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.gateway.PrimaryShardAllocator; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.store.IndexStoreConfig; +import org.elasticsearch.indices.analysis.HunspellService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; +import org.elasticsearch.indices.cache.request.IndicesRequestCache; +import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchService; @@ -160,5 +164,13 @@ public final class ClusterSettings extends AbstractScopedSettings { IndexSettings.QUERY_STRING_ANALYZE_WILDCARD, IndexSettings.QUERY_STRING_ALLOW_LEADING_WILDCARD, PrimaryShardAllocator.NODE_INITIAL_SHARDS_SETTING, - ScriptService.SCRIPT_CACHE_SIZE_SETTING))); + ScriptService.SCRIPT_CACHE_SIZE_SETTING, + IndicesFieldDataCache.INDICES_FIELDDATA_CLEAN_INTERVAL_SETTING, + IndicesFieldDataCache.INDICES_FIELDDATA_CACHE_SIZE_KEY, + IndicesRequestCache.INDICES_CACHE_QUERY_SIZE, + IndicesRequestCache.INDICES_CACHE_QUERY_EXPIRE, + HunspellService.HUNSPELL_LAZY_LOAD, + HunspellService.HUNSPELL_IGNORE_CASE, + HunspellService.HUNSPELL_DICTIONARY_OPTIONS, + IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT))); } diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index 3e63b6fba65..429dc5614c4 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -23,6 +23,7 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -70,8 +71,9 @@ import java.util.function.Function; */ public class HunspellService extends AbstractComponent { - public final static String HUNSPELL_LAZY_LOAD = "indices.analysis.hunspell.dictionary.lazy"; - public final static String HUNSPELL_IGNORE_CASE = "indices.analysis.hunspell.dictionary.ignore_case"; + public final static Setting HUNSPELL_LAZY_LOAD = Setting.boolSetting("indices.analysis.hunspell.dictionary.lazy", Boolean.FALSE, false, Setting.Scope.CLUSTER); + public final static Setting HUNSPELL_IGNORE_CASE = Setting.boolSetting("indices.analysis.hunspell.dictionary.ignore_case", Boolean.FALSE, false, Setting.Scope.CLUSTER); + public final static Setting HUNSPELL_DICTIONARY_OPTIONS = Setting.groupSetting("indices.analysis.hunspell.dictionary.", false, Setting.Scope.CLUSTER); private final static String OLD_HUNSPELL_LOCATION = "indices.analysis.hunspell.dictionary.location"; private final ConcurrentHashMap dictionaries = new ConcurrentHashMap<>(); private final Map knownDictionaries; @@ -83,7 +85,7 @@ public class HunspellService extends AbstractComponent { super(settings); this.knownDictionaries = Collections.unmodifiableMap(knownDictionaries); this.hunspellDir = resolveHunspellDirectory(settings, env); - this.defaultIgnoreCase = settings.getAsBoolean(HUNSPELL_IGNORE_CASE, false); + this.defaultIgnoreCase = HUNSPELL_IGNORE_CASE.get(settings); this.loadingFunction = (locale) -> { try { return loadDictionary(locale, settings, env); @@ -91,7 +93,7 @@ public class HunspellService extends AbstractComponent { throw new IllegalStateException("failed to load hunspell dictionary for locale: " + locale, e); } }; - if (!settings.getAsBoolean(HUNSPELL_LAZY_LOAD, false)) { + if (!HUNSPELL_LAZY_LOAD.get(settings)) { scanAndLoadDictionaries(); } @@ -162,7 +164,8 @@ public class HunspellService extends AbstractComponent { } // merging node settings with hunspell dictionary specific settings - nodeSettings = loadDictionarySettings(dicDir, nodeSettings.getByPrefix("indices.analysis.hunspell.dictionary." + locale + ".")); + Settings dictSettings = HUNSPELL_DICTIONARY_OPTIONS.get(nodeSettings); + nodeSettings = loadDictionarySettings(dicDir, dictSettings.getByPrefix(locale)); boolean ignoreCase = nodeSettings.getAsBoolean("ignore_case", defaultIgnoreCase); diff --git a/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java index 6a252178d4d..57ece4045d1 100644 --- a/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java +++ b/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java @@ -40,6 +40,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.MemorySizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -80,10 +81,10 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis * since we are checking on the cluster state IndexMetaData always. */ public static final Setting INDEX_CACHE_REQUEST_ENABLED_SETTING = Setting.boolSetting("index.requests.cache.enable", true, true, Setting.Scope.INDEX); - public static final String INDICES_CACHE_REQUEST_CLEAN_INTERVAL = "indices.requests.cache.clean_interval"; + public static final Setting INDICES_CACHE_REQUEST_CLEAN_INTERVAL = Setting.positiveTimeSetting("indices.requests.cache.clean_interval", new TimeValue(0), false, Setting.Scope.CLUSTER); - public static final String INDICES_CACHE_QUERY_SIZE = "indices.requests.cache.size"; - public static final String INDICES_CACHE_QUERY_EXPIRE = "indices.requests.cache.expire"; + public static final Setting INDICES_CACHE_QUERY_SIZE = Setting.byteSizeSetting("indices.requests.cache.size", "1%", false, Setting.Scope.CLUSTER); + public static final Setting INDICES_CACHE_QUERY_EXPIRE = Setting.positiveTimeSetting("indices.requests.cache.expire", new TimeValue(0), false, Setting.Scope.CLUSTER); private static final Set CACHEABLE_SEARCH_TYPES = EnumSet.of(SearchType.QUERY_THEN_FETCH, SearchType.QUERY_AND_FETCH); @@ -98,7 +99,7 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis //TODO make these changes configurable on the cluster level - private final String size; + private final ByteSizeValue size; private final TimeValue expire; private volatile Cache cache; @@ -108,11 +109,11 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis super(settings); this.clusterService = clusterService; this.threadPool = threadPool; - this.cleanInterval = settings.getAsTime(INDICES_CACHE_REQUEST_CLEAN_INTERVAL, TimeValue.timeValueSeconds(60)); + this.cleanInterval = settings.getAsTime(INDICES_CACHE_REQUEST_CLEAN_INTERVAL.getKey(), TimeValue.timeValueSeconds(60)); - this.size = settings.get(INDICES_CACHE_QUERY_SIZE, "1%"); + this.size = INDICES_CACHE_QUERY_SIZE.get(settings); - this.expire = settings.getAsTime(INDICES_CACHE_QUERY_EXPIRE, null); + this.expire = INDICES_CACHE_QUERY_EXPIRE.exists(settings) ? INDICES_CACHE_QUERY_EXPIRE.get(settings) : null; buildCache(); this.reaper = new Reaper(); @@ -121,7 +122,7 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis private void buildCache() { - long sizeInBytes = MemorySizeValue.parseBytesSizeValueOrHeapRatio(size, INDICES_CACHE_QUERY_SIZE).bytes(); + long sizeInBytes = size.bytes(); CacheBuilder cacheBuilder = CacheBuilder.builder() .setMaximumWeight(sizeInBytes).weigher((k, v) -> k.ramBytesUsed() + v.ramBytesUsed()).removalListener(this); diff --git a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java index 9181c6248ea..57e8cc8974d 100644 --- a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java +++ b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -53,8 +54,8 @@ import java.util.function.ToLongBiFunction; */ public class IndicesFieldDataCache extends AbstractComponent implements RemovalListener { - public static final String FIELDDATA_CLEAN_INTERVAL_SETTING = "indices.fielddata.cache.cleanup_interval"; - public static final String INDICES_FIELDDATA_CACHE_SIZE_KEY = "indices.fielddata.cache.size"; + public static final Setting INDICES_FIELDDATA_CLEAN_INTERVAL_SETTING = Setting.positiveTimeSetting("indices.fielddata.cache.cleanup_interval", TimeValue.timeValueMinutes(1), false, Setting.Scope.CLUSTER); + public static final Setting INDICES_FIELDDATA_CACHE_SIZE_KEY = Setting.byteSizeSetting("indices.fielddata.cache.size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER); private final IndicesFieldDataCacheListener indicesFieldDataCacheListener; @@ -68,18 +69,17 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL super(settings); this.threadPool = threadPool; this.indicesFieldDataCacheListener = indicesFieldDataCacheListener; - final String size = settings.get(INDICES_FIELDDATA_CACHE_SIZE_KEY, "-1"); - final long sizeInBytes = settings.getAsMemory(INDICES_FIELDDATA_CACHE_SIZE_KEY, "-1").bytes(); + final long sizeInBytes = INDICES_FIELDDATA_CACHE_SIZE_KEY.get(settings).bytes(); CacheBuilder cacheBuilder = CacheBuilder.builder() .removalListener(this); if (sizeInBytes > 0) { cacheBuilder.setMaximumWeight(sizeInBytes).weigher(new FieldDataWeigher()); } - logger.debug("using size [{}] [{}]", size, new ByteSizeValue(sizeInBytes)); + logger.debug("using size [{}]", new ByteSizeValue(sizeInBytes)); cache = cacheBuilder.build(); - this.cleanInterval = settings.getAsTime(FIELDDATA_CLEAN_INTERVAL_SETTING, TimeValue.timeValueMinutes(1)); + this.cleanInterval = INDICES_FIELDDATA_CLEAN_INTERVAL_SETTING.get(settings); // Start thread that will manage cleaning the field data cache periodically threadPool.schedule(this.cleanInterval, ThreadPool.Names.SAME, new FieldDataCacheCleaner(this.cache, this.logger, this.threadPool, this.cleanInterval)); diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index d1075031ee1..49851180ec7 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; @@ -68,7 +69,7 @@ import java.util.concurrent.atomic.AtomicInteger; public class IndicesStore extends AbstractComponent implements ClusterStateListener, Closeable { // TODO this class can be foled into either IndicesService and partially into IndicesClusterStateService there is no need for a seperate public service - public static final String INDICES_STORE_DELETE_SHARD_TIMEOUT = "indices.store.delete.shard.timeout"; + public static final Setting INDICES_STORE_DELETE_SHARD_TIMEOUT = Setting.positiveTimeSetting("indices.store.delete.shard.timeout", new TimeValue(30, TimeUnit.SECONDS), false, Setting.Scope.CLUSTER); public static final String ACTION_SHARD_EXISTS = "internal:index/shard/exists"; private static final EnumSet ACTIVE_STATES = EnumSet.of(IndexShardState.STARTED, IndexShardState.RELOCATED); private final IndicesService indicesService; @@ -85,7 +86,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe this.clusterService = clusterService; this.transportService = transportService; transportService.registerRequestHandler(ACTION_SHARD_EXISTS, ShardActiveRequest::new, ThreadPool.Names.SAME, new ShardActiveRequestHandler()); - this.deleteShardTimeout = settings.getAsTime(INDICES_STORE_DELETE_SHARD_TIMEOUT, new TimeValue(30, TimeUnit.SECONDS)); + this.deleteShardTimeout = INDICES_STORE_DELETE_SHARD_TIMEOUT.get(settings); clusterService.addLast(this); } diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java index 722a4ebde8a..b9bf5becba9 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java @@ -40,8 +40,8 @@ public class HunspellServiceIT extends ESIntegTestCase { public void testLocaleDirectoryWithNodeLevelConfig() throws Exception { Settings settings = Settings.settingsBuilder() .put("path.conf", getDataPath("/indices/analyze/conf_dir")) - .put(HUNSPELL_LAZY_LOAD, randomBoolean()) - .put(HUNSPELL_IGNORE_CASE, true) + .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) + .put(HUNSPELL_IGNORE_CASE.getKey(), true) .build(); internalCluster().startNode(settings); @@ -53,8 +53,8 @@ public class HunspellServiceIT extends ESIntegTestCase { public void testLocaleDirectoryWithLocaleSpecificConfig() throws Exception { Settings settings = Settings.settingsBuilder() .put("path.conf", getDataPath("/indices/analyze/conf_dir")) - .put(HUNSPELL_LAZY_LOAD, randomBoolean()) - .put(HUNSPELL_IGNORE_CASE, true) + .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) + .put(HUNSPELL_IGNORE_CASE.getKey(), true) .put("indices.analysis.hunspell.dictionary.en_US.strict_affix_parsing", false) .put("indices.analysis.hunspell.dictionary.en_US.ignore_case", false) .build(); @@ -75,7 +75,7 @@ public class HunspellServiceIT extends ESIntegTestCase { public void testDicWithNoAff() throws Exception { Settings settings = Settings.settingsBuilder() .put("path.conf", getDataPath("/indices/analyze/no_aff_conf_dir")) - .put(HUNSPELL_LAZY_LOAD, randomBoolean()) + .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) .build(); Dictionary dictionary = null; @@ -93,7 +93,7 @@ public class HunspellServiceIT extends ESIntegTestCase { public void testDicWithTwoAffs() throws Exception { Settings settings = Settings.settingsBuilder() .put("path.conf", getDataPath("/indices/analyze/two_aff_conf_dir")) - .put(HUNSPELL_LAZY_LOAD, randomBoolean()) + .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) .build(); Dictionary dictionary = null; diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 18c03e3739a..756a9af43b1 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -78,7 +78,7 @@ public class IndexStatsIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { //Filter/Query cache is cleaned periodically, default is 60s, so make sure it runs often. Thread.sleep for 60s is bad return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)) - .put(IndicesRequestCache.INDICES_CACHE_REQUEST_CLEAN_INTERVAL, "1ms") + .put(IndicesRequestCache.INDICES_CACHE_REQUEST_CLEAN_INTERVAL.getKey(), "1ms") .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), IndexModule.INDEX_QUERY_CACHE) .build(); diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index 18d56eee88f..9d35445aaa9 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -86,7 +86,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { // by default this value is 1 sec in tests (30 sec in practice) but we adding disruption here // which is between 1 and 2 sec can cause each of the shard deletion requests to timeout. // to prevent this we are setting the timeout here to something highish ie. the default in practice - .put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT, new TimeValue(30, TimeUnit.SECONDS)) + .put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT.getKey(), new TimeValue(30, TimeUnit.SECONDS)) .build(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index ff09ba0e8f6..a184dc86d75 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1685,7 +1685,7 @@ public abstract class ESIntegTestCase extends ESTestCase { .put("script.indexed", "on") .put("script.inline", "on") // wait short time for other active shards before actually deleting, default 30s not needed in tests - .put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT, new TimeValue(1, TimeUnit.SECONDS)); + .put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT.getKey(), new TimeValue(1, TimeUnit.SECONDS)); return builder.build(); } From c6956a9dc9a827ba92d8dc16548b50a72c28bfe2 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 22 Jan 2016 14:00:42 +0100 Subject: [PATCH 287/347] Merge pull request #16171 from dmydlarz/patch-1 Fix missing break line before unordered item list --- docs/reference/docs/index_.asciidoc | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/reference/docs/index_.asciidoc b/docs/reference/docs/index_.asciidoc index 3aacdc7ed10..5f79efbcc60 100644 --- a/docs/reference/docs/index_.asciidoc +++ b/docs/reference/docs/index_.asciidoc @@ -33,6 +33,7 @@ The result of the above index operation is: -------------------------------------------------- The `_shards` header provides information about the replication process of the index operation. + * `total` - Indicates to how many shard copies (primary and replica shards) the index operation should be executed on. * `successful`- Indicates the number of shard copies the index operation succeeded on. * `failures` - An array that contains replication related errors in the case an index operation failed on a replica shard. From dc69d857bfeac58f8215a1dcffc50483d120bcb7 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 22 Jan 2016 14:12:55 +0100 Subject: [PATCH 288/347] Make disabled fielddata loading fail earlier. Currently this fails when loading data from a segment, which means that it will never fail on an empty index since it does not have segments. Closes #16135 --- .../fielddata/IndexFieldDataService.java | 31 +++-- .../plain/DisabledIndexFieldData.java | 72 ----------- .../DisabledFieldDataFormatTests.java | 115 ------------------ .../fielddata/IndexFieldDataServiceTests.java | 19 +++ 4 files changed, 38 insertions(+), 199 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java delete mode 100644 core/src/test/java/org/elasticsearch/index/fielddata/DisabledFieldDataFormatTests.java diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java index b898f3ffd2a..f02f924bc39 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java @@ -28,7 +28,6 @@ import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.plain.AbstractGeoPointDVIndexFieldData; import org.elasticsearch.index.fielddata.plain.BytesBinaryDVIndexFieldData; -import org.elasticsearch.index.fielddata.plain.DisabledIndexFieldData; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; import org.elasticsearch.index.fielddata.plain.GeoPointArrayIndexFieldData; import org.elasticsearch.index.fielddata.plain.IndexIndexFieldData; @@ -79,6 +78,14 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo private static final String DOC_VALUES_FORMAT = "doc_values"; private static final String PAGED_BYTES_FORMAT = "paged_bytes"; + private static final IndexFieldData.Builder DISABLED_BUILDER = new IndexFieldData.Builder() { + @Override + public IndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, + CircuitBreakerService breakerService, MapperService mapperService) { + throw new IllegalStateException("Field data loading is forbidden on [" + fieldType.name() + "]"); + } + }; + private final static Map buildersByType; private final static Map docValuesBuildersByType; private final static Map, IndexFieldData.Builder> buildersByTypeAndFormat; @@ -96,7 +103,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo buildersByTypeBuilder.put("geo_point", new GeoPointArrayIndexFieldData.Builder()); buildersByTypeBuilder.put(ParentFieldMapper.NAME, new ParentChildIndexFieldData.Builder()); buildersByTypeBuilder.put(IndexFieldMapper.NAME, new IndexIndexFieldData.Builder()); - buildersByTypeBuilder.put("binary", new DisabledIndexFieldData.Builder()); + buildersByTypeBuilder.put("binary", DISABLED_BUILDER); buildersByTypeBuilder.put(BooleanFieldMapper.CONTENT_TYPE, MISSING_DOC_VALUES_BUILDER); buildersByType = unmodifiableMap(buildersByTypeBuilder); @@ -117,35 +124,35 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo buildersByTypeAndFormat = MapBuilder., IndexFieldData.Builder>newMapBuilder() .put(Tuple.tuple("string", PAGED_BYTES_FORMAT), new PagedBytesIndexFieldData.Builder()) .put(Tuple.tuple("string", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder()) - .put(Tuple.tuple("string", DISABLED_FORMAT), new DisabledIndexFieldData.Builder()) + .put(Tuple.tuple("string", DISABLED_FORMAT), DISABLED_BUILDER) .put(Tuple.tuple("float", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.FLOAT)) - .put(Tuple.tuple("float", DISABLED_FORMAT), new DisabledIndexFieldData.Builder()) + .put(Tuple.tuple("float", DISABLED_FORMAT), DISABLED_BUILDER) .put(Tuple.tuple("double", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.DOUBLE)) - .put(Tuple.tuple("double", DISABLED_FORMAT), new DisabledIndexFieldData.Builder()) + .put(Tuple.tuple("double", DISABLED_FORMAT), DISABLED_BUILDER) .put(Tuple.tuple("byte", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BYTE)) - .put(Tuple.tuple("byte", DISABLED_FORMAT), new DisabledIndexFieldData.Builder()) + .put(Tuple.tuple("byte", DISABLED_FORMAT), DISABLED_BUILDER) .put(Tuple.tuple("short", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.SHORT)) - .put(Tuple.tuple("short", DISABLED_FORMAT), new DisabledIndexFieldData.Builder()) + .put(Tuple.tuple("short", DISABLED_FORMAT), DISABLED_BUILDER) .put(Tuple.tuple("int", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.INT)) - .put(Tuple.tuple("int", DISABLED_FORMAT), new DisabledIndexFieldData.Builder()) + .put(Tuple.tuple("int", DISABLED_FORMAT), DISABLED_BUILDER) .put(Tuple.tuple("long", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.LONG)) - .put(Tuple.tuple("long", DISABLED_FORMAT), new DisabledIndexFieldData.Builder()) + .put(Tuple.tuple("long", DISABLED_FORMAT), DISABLED_BUILDER) .put(Tuple.tuple("geo_point", ARRAY_FORMAT), new GeoPointArrayIndexFieldData.Builder()) .put(Tuple.tuple("geo_point", DOC_VALUES_FORMAT), new AbstractGeoPointDVIndexFieldData.Builder()) - .put(Tuple.tuple("geo_point", DISABLED_FORMAT), new DisabledIndexFieldData.Builder()) + .put(Tuple.tuple("geo_point", DISABLED_FORMAT), DISABLED_BUILDER) .put(Tuple.tuple("binary", DOC_VALUES_FORMAT), new BytesBinaryDVIndexFieldData.Builder()) - .put(Tuple.tuple("binary", DISABLED_FORMAT), new DisabledIndexFieldData.Builder()) + .put(Tuple.tuple("binary", DISABLED_FORMAT), DISABLED_BUILDER) .put(Tuple.tuple(BooleanFieldMapper.CONTENT_TYPE, DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BOOLEAN)) - .put(Tuple.tuple(BooleanFieldMapper.CONTENT_TYPE, DISABLED_FORMAT), new DisabledIndexFieldData.Builder()) + .put(Tuple.tuple(BooleanFieldMapper.CONTENT_TYPE, DISABLED_FORMAT), DISABLED_BUILDER) .immutableMap(); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java deleted file mode 100644 index 86daaf1a252..00000000000 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.fielddata.plain; - -import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.AtomicFieldData; -import org.elasticsearch.index.fielddata.FieldDataType; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; -import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.elasticsearch.search.MultiValueMode; - -/** - * A field data implementation that forbids loading and will throw an {@link IllegalStateException} if you try to load - * {@link AtomicFieldData} instances. - */ -public final class DisabledIndexFieldData extends AbstractIndexFieldData { - - public static class Builder implements IndexFieldData.Builder { - @Override - public IndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, - IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - // Ignore Circuit Breaker - return new DisabledIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache); - } - } - - public DisabledIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache) { - super(indexSettings, fieldName, fieldDataType, cache); - } - - @Override - public AtomicFieldData loadDirect(LeafReaderContext context) throws Exception { - throw fail(); - } - - @Override - protected AtomicFieldData empty(int maxDoc) { - throw fail(); - } - - @Override - public IndexFieldData.XFieldComparatorSource comparatorSource(Object missingValue, MultiValueMode sortMode, Nested nested) { - throw fail(); - } - - private IllegalStateException fail() { - return new IllegalStateException("Field data loading is forbidden on " + getFieldName()); - } - -} diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/DisabledFieldDataFormatTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/DisabledFieldDataFormatTests.java deleted file mode 100644 index 0601a302428..00000000000 --- a/core/src/test/java/org/elasticsearch/index/fielddata/DisabledFieldDataFormatTests.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.fielddata; - -import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; -import org.elasticsearch.test.ESSingleNodeTestCase; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; - -public class DisabledFieldDataFormatTests extends ESSingleNodeTestCase { - - public void test() throws Exception { - createIndex("test", Settings.EMPTY, "type", "s", "type=string"); - logger.info("indexing data start"); - for (int i = 0; i < 10; ++i) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("s", "value" + i).execute().actionGet(); - } - logger.info("indexing data end"); - - final int searchCycles = 1; - - client().admin().indices().prepareRefresh().execute().actionGet(); - - // disable field data - updateFormat("disabled"); - - SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values()); - SearchResponse resp = null; - // try to run something that relies on field data and make sure that it fails - for (int i = 0; i < searchCycles; i++) { - try { - resp = client().prepareSearch("test").setPreference(Integer.toString(i)).addAggregation(AggregationBuilders.terms("t").field("s") - .collectMode(aggCollectionMode)).execute().actionGet(); - assertFailures(resp); - } catch (SearchPhaseExecutionException e) { - // expected - } - } - - // enable it again - updateFormat("paged_bytes"); - - // try to run something that relies on field data and make sure that it works - for (int i = 0; i < searchCycles; i++) { - resp = client().prepareSearch("test").setPreference(Integer.toString(i)).addAggregation(AggregationBuilders.terms("t").field("s") - .collectMode(aggCollectionMode)).execute().actionGet(); - assertNoFailures(resp); - } - - // disable it again - updateFormat("disabled"); - - // this time, it should work because segments are already loaded - for (int i = 0; i < searchCycles; i++) { - resp = client().prepareSearch("test").setPreference(Integer.toString(i)).addAggregation(AggregationBuilders.terms("t").field("s") - .collectMode(aggCollectionMode)).execute().actionGet(); - assertNoFailures(resp); - } - - // but add more docs and the new segment won't be loaded - client().prepareIndex("test", "type", "-1").setSource("s", "value").execute().actionGet(); - client().admin().indices().prepareRefresh().execute().actionGet(); - for (int i = 0; i < searchCycles; i++) { - try { - resp = client().prepareSearch("test").setPreference(Integer.toString(i)).addAggregation(AggregationBuilders.terms("t").field("s") - .collectMode(aggCollectionMode)).execute().actionGet(); - assertFailures(resp); - } catch (SearchPhaseExecutionException e) { - // expected - } - } - } - - private void updateFormat(final String format) throws Exception { - logger.info(">> put mapping start {}", format); - assertAcked(client().admin().indices().preparePutMapping("test").setType("type").setSource( - XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("s") - .field("type", "string") - .startObject("fielddata") - .field("format", format) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject()).get()); - logger.info(">> put mapping end {}", format); - } - -} diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index 3d4f63daa33..e8b0d03b049 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -234,4 +234,23 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { public void testRequireDocValuesOnBools() { doTestRequireDocValues(new BooleanFieldMapper.BooleanFieldType()); } + + public void testDisabled() { + ThreadPool threadPool = new ThreadPool("random_threadpool_name"); + StringFieldMapper.StringFieldType ft = new StringFieldMapper.StringFieldType(); + try { + IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null, threadPool); + IndexFieldDataService ifds = new IndexFieldDataService(IndexSettingsModule.newIndexSettings(new Index("test"), Settings.EMPTY), cache, null, null); + ft.setName("some_str"); + ft.setFieldDataType(new FieldDataType("string", Settings.builder().put(FieldDataType.FORMAT_KEY, "disabled").build())); + try { + ifds.getForField(ft); + fail(); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), containsString("Field data loading is forbidden on [some_str]")); + } + } finally { + threadPool.shutdown(); + } + } } From 768d171f778bea9c76603f849770352d8e41385c Mon Sep 17 00:00:00 2001 From: Kevin Adams Date: Thu, 21 Jan 2016 07:35:07 -0700 Subject: [PATCH 289/347] Timezone: use forward slash Using a backslash causes errors when querying elasticsearch, but changing the back slash to forward slash on the timezone fixes it. Closes #16148 --- .../aggregations/bucket/datehistogram-aggregation.asciidoc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc index 5afff0cb89f..29ba5e49626 100644 --- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -105,8 +105,7 @@ that bucketing should use a different time zone. Time zones may either be specified as an ISO 8601 UTC offset (e.g. `+01:00` or `-08:00`) or as a timezone id, an identifier used in the TZ database like -`America\Los_Angeles` (which would need to be escaped in JSON as -`"America\\Los_Angeles"`). +`America/Los_Angeles`. Consider the following example: From 37d02505aa9b0e2fa70d1939f0518a23fb271e54 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Fri, 22 Jan 2016 14:30:32 +0100 Subject: [PATCH 290/347] Covers Simon's comments --- .../elasticsearch/indices/analysis/HunspellService.java | 9 ++------- .../indices/cache/request/IndicesRequestCache.java | 4 ++-- .../indices/fielddata/cache/IndicesFieldDataCache.java | 1 - 3 files changed, 4 insertions(+), 10 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index 429dc5614c4..f99b39ef620 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -74,7 +74,6 @@ public class HunspellService extends AbstractComponent { public final static Setting HUNSPELL_LAZY_LOAD = Setting.boolSetting("indices.analysis.hunspell.dictionary.lazy", Boolean.FALSE, false, Setting.Scope.CLUSTER); public final static Setting HUNSPELL_IGNORE_CASE = Setting.boolSetting("indices.analysis.hunspell.dictionary.ignore_case", Boolean.FALSE, false, Setting.Scope.CLUSTER); public final static Setting HUNSPELL_DICTIONARY_OPTIONS = Setting.groupSetting("indices.analysis.hunspell.dictionary.", false, Setting.Scope.CLUSTER); - private final static String OLD_HUNSPELL_LOCATION = "indices.analysis.hunspell.dictionary.location"; private final ConcurrentHashMap dictionaries = new ConcurrentHashMap<>(); private final Map knownDictionaries; private final boolean defaultIgnoreCase; @@ -84,7 +83,7 @@ public class HunspellService extends AbstractComponent { public HunspellService(final Settings settings, final Environment env, final Map knownDictionaries) throws IOException { super(settings); this.knownDictionaries = Collections.unmodifiableMap(knownDictionaries); - this.hunspellDir = resolveHunspellDirectory(settings, env); + this.hunspellDir = resolveHunspellDirectory(env); this.defaultIgnoreCase = HUNSPELL_IGNORE_CASE.get(settings); this.loadingFunction = (locale) -> { try { @@ -112,11 +111,7 @@ public class HunspellService extends AbstractComponent { return dictionary; } - private Path resolveHunspellDirectory(Settings settings, Environment env) { - String location = settings.get(OLD_HUNSPELL_LOCATION, null); - if (location != null) { - throw new IllegalArgumentException("please, put your hunspell dictionaries under config/hunspell !"); - } + private Path resolveHunspellDirectory(Environment env) { return env.configFile().resolve("hunspell"); } diff --git a/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java index 57ece4045d1..3cdb637d370 100644 --- a/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java +++ b/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java @@ -81,7 +81,7 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis * since we are checking on the cluster state IndexMetaData always. */ public static final Setting INDEX_CACHE_REQUEST_ENABLED_SETTING = Setting.boolSetting("index.requests.cache.enable", true, true, Setting.Scope.INDEX); - public static final Setting INDICES_CACHE_REQUEST_CLEAN_INTERVAL = Setting.positiveTimeSetting("indices.requests.cache.clean_interval", new TimeValue(0), false, Setting.Scope.CLUSTER); + public static final Setting INDICES_CACHE_REQUEST_CLEAN_INTERVAL = Setting.positiveTimeSetting("indices.requests.cache.clean_interval", TimeValue.timeValueSeconds(60), false, Setting.Scope.CLUSTER); public static final Setting INDICES_CACHE_QUERY_SIZE = Setting.byteSizeSetting("indices.requests.cache.size", "1%", false, Setting.Scope.CLUSTER); public static final Setting INDICES_CACHE_QUERY_EXPIRE = Setting.positiveTimeSetting("indices.requests.cache.expire", new TimeValue(0), false, Setting.Scope.CLUSTER); @@ -109,7 +109,7 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis super(settings); this.clusterService = clusterService; this.threadPool = threadPool; - this.cleanInterval = settings.getAsTime(INDICES_CACHE_REQUEST_CLEAN_INTERVAL.getKey(), TimeValue.timeValueSeconds(60)); + this.cleanInterval = INDICES_CACHE_REQUEST_CLEAN_INTERVAL.get(settings); this.size = INDICES_CACHE_QUERY_SIZE.get(settings); diff --git a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java index 57e8cc8974d..06d4c219208 100644 --- a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java +++ b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java @@ -76,7 +76,6 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL cacheBuilder.setMaximumWeight(sizeInBytes).weigher(new FieldDataWeigher()); } - logger.debug("using size [{}]", new ByteSizeValue(sizeInBytes)); cache = cacheBuilder.build(); this.cleanInterval = INDICES_FIELDDATA_CLEAN_INTERVAL_SETTING.get(settings); From e9bb3d31a33f45752de25cd646955d07be345aff Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Fri, 22 Jan 2016 14:23:38 +0100 Subject: [PATCH 291/347] Convert "path.*" and "pidfile" to new settings infra --- .../org/elasticsearch/bootstrap/Security.java | 22 +++---- .../common/settings/ClusterSettings.java | 13 +++- .../common/settings/Setting.java | 4 ++ .../org/elasticsearch/env/Environment.java | 65 +++++++++++-------- .../internal/InternalSettingsPreparer.java | 2 +- .../org/elasticsearch/tribe/TribeService.java | 3 +- .../indices/TransportAnalyzeActionTests.java | 2 +- .../action/bulk/BulkProcessorIT.java | 3 +- .../search/SearchRequestBuilderTests.java | 3 +- .../OldIndexBackwardsCompatibilityIT.java | 5 +- .../bwcompat/RestoreBackwardsCompatIT.java | 3 +- .../client/AbstractClientHeadersTestCase.java | 3 +- .../TransportClientHeadersTests.java | 3 +- .../client/transport/TransportClientIT.java | 8 ++- .../transport/TransportClientRetryIT.java | 3 +- .../logging/log4j/Log4jESLoggerTests.java | 5 +- .../log4j/LoggingConfigurationTests.java | 28 ++++---- .../elasticsearch/env/EnvironmentTests.java | 6 +- .../env/NodeEnvironmentTests.java | 30 ++++----- .../gateway/RecoveryFromGatewayIT.java | 5 +- .../elasticsearch/index/IndexModuleTests.java | 21 +++--- .../index/IndexWithShadowReplicasIT.java | 5 +- .../ASCIIFoldingTokenFilterFactoryTests.java | 5 +- .../index/analysis/AnalysisModuleTests.java | 20 +++--- .../index/analysis/AnalysisServiceTests.java | 12 ++-- .../index/analysis/AnalysisTestsHelper.java | 2 +- .../AnalyzerBackwardsCompatTests.java | 3 +- .../index/analysis/CharFilterTests.java | 4 +- .../index/analysis/CompoundAnalysisTests.java | 4 +- .../HunspellTokenFilterFactoryTests.java | 9 +-- .../analysis/KeepFilterFactoryTests.java | 5 +- .../analysis/KeepTypesFilterFactoryTests.java | 3 +- .../LimitTokenCountFilterFactoryTests.java | 9 +-- .../PatternCaptureTokenFilterTests.java | 2 +- .../StemmerTokenFilterFactoryTests.java | 5 +- .../index/analysis/StopAnalyzerTests.java | 2 +- .../index/analysis/StopTokenFilterTests.java | 9 +-- .../WordDelimiterTokenFilterFactoryTests.java | 19 +++--- .../CommonGramsTokenFilterFactoryTests.java | 25 +++---- .../synonyms/SynonymsAnalysisTests.java | 2 +- .../elasticsearch/index/codec/CodecTests.java | 2 +- .../index/query/AbstractQueryTestCase.java | 4 +- .../index/shard/NewPathForShardTests.java | 4 +- .../index/shard/ShardPathTests.java | 3 +- .../indices/analyze/HunspellServiceIT.java | 9 +-- .../store/IndicesStoreIntegrationIT.java | 3 +- .../InternalSettingsPreparerTests.java | 4 +- .../PercolatorBackwardsCompatibilityIT.java | 3 +- .../plugins/PluginsServiceTests.java | 6 +- .../elasticsearch/script/FileScriptTests.java | 2 +- .../script/NativeScriptTests.java | 4 +- .../script/ScriptContextTests.java | 2 +- .../script/ScriptServiceTests.java | 4 +- .../builder/SearchSourceBuilderTests.java | 3 +- .../threadpool/SimpleThreadPoolIT.java | 3 +- .../NettyTransportMultiPortIntegrationIT.java | 3 +- .../messy/tests/ScriptedMetricTests.java | 3 +- .../tests/RenderSearchTemplateTests.java | 3 +- .../messy/tests/TemplateQueryParserTests.java | 4 +- .../messy/tests/TemplateQueryTests.java | 3 +- .../analysis/SimpleIcuAnalysisTests.java | 3 +- .../SimpleIcuCollationTokenFilterTests.java | 19 +++--- .../SimpleIcuNormalizerCharFilterTests.java | 5 +- .../index/analysis/KuromojiAnalysisTests.java | 2 +- .../analysis/SimplePhoneticAnalysisTests.java | 2 +- .../SimpleSmartChineseAnalysisTests.java | 2 +- .../index/analysis/PolishAnalysisTests.java | 2 +- .../SimplePolishTokenFilterTests.java | 4 +- .../cloud/azure/AbstractAzureTestCase.java | 3 +- .../cloud/aws/AbstractAwsTestCase.java | 3 +- .../attachments/AttachmentUnitTestCase.java | 3 +- .../Murmur3FieldMapperUpgradeTests.java | 3 +- .../size/SizeFieldMapperUpgradeTests.java | 3 +- .../cloud/aws/AbstractAwsTestCase.java | 3 +- .../bootstrap/EvilSecurityTests.java | 32 ++++----- .../common/cli/CheckFileCommandTests.java | 6 +- .../EvilInternalSettingsPreparerTests.java | 2 +- .../plugins/PluginManagerPermissionTests.java | 6 +- .../plugins/PluginManagerTests.java | 4 +- .../plugins/PluginManagerUnitTests.java | 4 +- .../elasticsearch/tribe/TribeUnitTests.java | 11 +++- .../smoketest/ESSmokeClientTestCase.java | 3 +- .../elasticsearch/index/MapperTestUtils.java | 2 +- .../elasticsearch/test/ESIntegTestCase.java | 4 +- .../test/ESSingleNodeTestCase.java | 5 +- .../org/elasticsearch/test/ESTestCase.java | 4 +- .../test/ExternalTestCluster.java | 3 +- .../test/InternalTestCluster.java | 13 ++-- 88 files changed, 350 insertions(+), 260 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Security.java b/core/src/main/java/org/elasticsearch/bootstrap/Security.java index 43ad73b5dea..dc89ce3cb19 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -241,26 +241,26 @@ final class Security { */ static void addFilePermissions(Permissions policy, Environment environment) { // read-only dirs - addPath(policy, "path.home", environment.binFile(), "read,readlink"); - addPath(policy, "path.home", environment.libFile(), "read,readlink"); - addPath(policy, "path.home", environment.modulesFile(), "read,readlink"); - addPath(policy, "path.plugins", environment.pluginsFile(), "read,readlink"); - addPath(policy, "path.conf", environment.configFile(), "read,readlink"); - addPath(policy, "path.scripts", environment.scriptsFile(), "read,readlink"); + addPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.binFile(), "read,readlink"); + addPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.libFile(), "read,readlink"); + addPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.modulesFile(), "read,readlink"); + addPath(policy, Environment.PATH_PLUGINS_SETTING.getKey(), environment.pluginsFile(), "read,readlink"); + addPath(policy, Environment.PATH_CONF_SETTING.getKey(), environment.configFile(), "read,readlink"); + addPath(policy, Environment.PATH_SCRIPTS_SETTING.getKey(), environment.scriptsFile(), "read,readlink"); // read-write dirs addPath(policy, "java.io.tmpdir", environment.tmpFile(), "read,readlink,write,delete"); - addPath(policy, "path.logs", environment.logsFile(), "read,readlink,write,delete"); + addPath(policy, Environment.PATH_LOGS_SETTING.getKey(), environment.logsFile(), "read,readlink,write,delete"); if (environment.sharedDataFile() != null) { - addPath(policy, "path.shared_data", environment.sharedDataFile(), "read,readlink,write,delete"); + addPath(policy, Environment.PATH_SHARED_DATA_SETTING.getKey(), environment.sharedDataFile(), "read,readlink,write,delete"); } for (Path path : environment.dataFiles()) { - addPath(policy, "path.data", path, "read,readlink,write,delete"); + addPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete"); } for (Path path : environment.dataWithClusterFiles()) { - addPath(policy, "path.data", path, "read,readlink,write,delete"); + addPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete"); } for (Path path : environment.repoFiles()) { - addPath(policy, "path.repo", path, "read,readlink,write,delete"); + addPath(policy, Environment.PATH_REPO_SETTING.getKey(), path, "read,readlink,write,delete"); } if (environment.pidFile() != null) { // we just need permission to remove the file if its elsewhere. diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index e220fcf5de0..13a9e880cca 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; +import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.gateway.PrimaryShardAllocator; import org.elasticsearch.index.IndexSettings; @@ -168,5 +169,15 @@ public final class ClusterSettings extends AbstractScopedSettings { IndexSettings.QUERY_STRING_ANALYZE_WILDCARD, IndexSettings.QUERY_STRING_ALLOW_LEADING_WILDCARD, PrimaryShardAllocator.NODE_INITIAL_SHARDS_SETTING, - ScriptService.SCRIPT_CACHE_SIZE_SETTING))); + ScriptService.SCRIPT_CACHE_SIZE_SETTING, + Environment.PATH_CONF_SETTING, + Environment.PATH_DATA_SETTING, + Environment.PATH_HOME_SETTING, + Environment.PATH_LOGS_SETTING, + Environment.PATH_PLUGINS_SETTING, + Environment.PATH_REPO_SETTING, + Environment.PATH_SCRIPTS_SETTING, + Environment.PATH_SHARED_DATA_SETTING, + Environment.PIDFILE_SETTING + ))); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 0fc5b062e69..b67ba26c093 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -309,6 +309,10 @@ public class Setting extends ToXContentToBytes { return new Setting<>(key, (s) -> Long.toString(defaultValue), (s) -> parseLong(s, minValue, key), dynamic, scope); } + public static Setting simpleString(String key, boolean dynamic, Scope scope) { + return new Setting<>(key, "", Function.identity(), dynamic, scope); + } + public static int parseInt(String s, int minValue, String key) { int value = Integer.parseInt(s); if (value < minValue) { diff --git a/core/src/main/java/org/elasticsearch/env/Environment.java b/core/src/main/java/org/elasticsearch/env/Environment.java index b6453a4707b..65d62bd9e33 100644 --- a/core/src/main/java/org/elasticsearch/env/Environment.java +++ b/core/src/main/java/org/elasticsearch/env/Environment.java @@ -23,6 +23,7 @@ import org.apache.lucene.util.Constants; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import java.io.IOException; @@ -33,6 +34,9 @@ import java.nio.file.FileStore; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.function.Function; import static org.elasticsearch.common.Strings.cleanPath; @@ -43,6 +47,15 @@ import static org.elasticsearch.common.Strings.cleanPath; // TODO: move PathUtils to be package-private here instead of // public+forbidden api! public class Environment { + public static final Setting PATH_HOME_SETTING = Setting.simpleString("path.home", false, Setting.Scope.CLUSTER); + public static final Setting PATH_CONF_SETTING = Setting.simpleString("path.conf", false, Setting.Scope.CLUSTER); + public static final Setting PATH_SCRIPTS_SETTING = Setting.simpleString("path.scripts", false, Setting.Scope.CLUSTER); + public static final Setting> PATH_DATA_SETTING = Setting.listSetting("path.data", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting PATH_LOGS_SETTING = Setting.simpleString("path.logs", false, Setting.Scope.CLUSTER); + public static final Setting PATH_PLUGINS_SETTING = Setting.simpleString("path.plugins", false, Setting.Scope.CLUSTER); + public static final Setting> PATH_REPO_SETTING = Setting.listSetting("path.repo", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting PATH_SHARED_DATA_SETTING = Setting.simpleString("path.shared_data", false, Setting.Scope.CLUSTER); + public static final Setting PIDFILE_SETTING = Setting.simpleString("pidfile", false, Setting.Scope.CLUSTER); private final Settings settings; @@ -95,64 +108,64 @@ public class Environment { public Environment(Settings settings) { this.settings = settings; final Path homeFile; - if (settings.get("path.home") != null) { - homeFile = PathUtils.get(cleanPath(settings.get("path.home"))); + if (PATH_HOME_SETTING.exists(settings)) { + homeFile = PathUtils.get(cleanPath(PATH_HOME_SETTING.get(settings))); } else { - throw new IllegalStateException("path.home is not configured"); + throw new IllegalStateException(PATH_HOME_SETTING.getKey() + " is not configured"); } - if (settings.get("path.conf") != null) { - configFile = PathUtils.get(cleanPath(settings.get("path.conf"))); + if (PATH_CONF_SETTING.exists(settings)) { + configFile = PathUtils.get(cleanPath(PATH_CONF_SETTING.get(settings))); } else { configFile = homeFile.resolve("config"); } - if (settings.get("path.scripts") != null) { - scriptsFile = PathUtils.get(cleanPath(settings.get("path.scripts"))); + if (PATH_SCRIPTS_SETTING.exists(settings)) { + scriptsFile = PathUtils.get(cleanPath(PATH_SCRIPTS_SETTING.get(settings))); } else { scriptsFile = configFile.resolve("scripts"); } - if (settings.get("path.plugins") != null) { - pluginsFile = PathUtils.get(cleanPath(settings.get("path.plugins"))); + if (PATH_PLUGINS_SETTING.exists(settings)) { + pluginsFile = PathUtils.get(cleanPath(PATH_PLUGINS_SETTING.get(settings))); } else { pluginsFile = homeFile.resolve("plugins"); } - String[] dataPaths = settings.getAsArray("path.data"); - if (dataPaths.length > 0) { - dataFiles = new Path[dataPaths.length]; - dataWithClusterFiles = new Path[dataPaths.length]; - for (int i = 0; i < dataPaths.length; i++) { - dataFiles[i] = PathUtils.get(dataPaths[i]); + List dataPaths = PATH_DATA_SETTING.get(settings); + if (dataPaths.isEmpty() == false) { + dataFiles = new Path[dataPaths.size()]; + dataWithClusterFiles = new Path[dataPaths.size()]; + for (int i = 0; i < dataPaths.size(); i++) { + dataFiles[i] = PathUtils.get(dataPaths.get(i)); dataWithClusterFiles[i] = dataFiles[i].resolve(ClusterName.clusterNameFromSettings(settings).value()); } } else { dataFiles = new Path[]{homeFile.resolve("data")}; dataWithClusterFiles = new Path[]{homeFile.resolve("data").resolve(ClusterName.clusterNameFromSettings(settings).value())}; } - if (settings.get("path.shared_data") != null) { - sharedDataFile = PathUtils.get(cleanPath(settings.get("path.shared_data"))); + if (PATH_SHARED_DATA_SETTING.exists(settings)) { + sharedDataFile = PathUtils.get(cleanPath(PATH_SHARED_DATA_SETTING.get(settings))); } else { sharedDataFile = null; } - String[] repoPaths = settings.getAsArray("path.repo"); - if (repoPaths.length > 0) { - repoFiles = new Path[repoPaths.length]; - for (int i = 0; i < repoPaths.length; i++) { - repoFiles[i] = PathUtils.get(repoPaths[i]); + List repoPaths = PATH_REPO_SETTING.get(settings); + if (repoPaths.isEmpty() == false) { + repoFiles = new Path[repoPaths.size()]; + for (int i = 0; i < repoPaths.size(); i++) { + repoFiles[i] = PathUtils.get(repoPaths.get(i)); } } else { repoFiles = new Path[0]; } - if (settings.get("path.logs") != null) { - logsFile = PathUtils.get(cleanPath(settings.get("path.logs"))); + if (PATH_LOGS_SETTING.exists(settings)) { + logsFile = PathUtils.get(cleanPath(PATH_LOGS_SETTING.get(settings))); } else { logsFile = homeFile.resolve("logs"); } - if (settings.get("pidfile") != null) { - pidFile = PathUtils.get(cleanPath(settings.get("pidfile"))); + if (PIDFILE_SETTING.exists(settings)) { + pidFile = PathUtils.get(cleanPath(PIDFILE_SETTING.get(settings))); } else { pidFile = null; } diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index 1c2ab33e9be..47f262d8d06 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -108,7 +108,7 @@ public class InternalSettingsPreparer { environment = new Environment(output.build()); // we put back the path.logs so we can use it in the logging configuration file - output.put("path.logs", cleanPath(environment.logsFile().toAbsolutePath().toString())); + output.put(Environment.PATH_LOGS_SETTING.getKey(), cleanPath(environment.logsFile().toAbsolutePath().toString())); return new Environment(output.build()); } diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index 78453c9eac6..1a89bc2f918 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -45,6 +45,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.discovery.DiscoveryService; +import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestStatus; @@ -132,7 +133,7 @@ public class TribeService extends AbstractLifecycleComponent { for (Map.Entry entry : nodesSettings.entrySet()) { Settings.Builder sb = Settings.builder().put(entry.getValue()); sb.put("name", settings.get("name") + "/" + entry.getKey()); - sb.put("path.home", settings.get("path.home")); // pass through ES home dir + sb.put(Environment.PATH_HOME_SETTING.getKey(), Environment.PATH_HOME_SETTING.get(settings)); // pass through ES home dir sb.put(TRIBE_NAME, entry.getKey()); if (sb.get("http.enabled") == null) { sb.put("http.enabled", false); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index cb0e0fa0f78..4d53d6cd1e5 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -47,7 +47,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { @Override public void setUp() throws Exception { super.setUp(); - Settings settings = Settings.builder().put("path.home", createTempDir().toString()).build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java index 04b58e6b9fc..70d78e78f23 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; @@ -156,7 +157,7 @@ public class BulkProcessorIT extends ESIntegTestCase { public void testBulkProcessorConcurrentRequestsNoNodeAvailableException() throws Exception { //we create a transport client with no nodes to make sure it throws NoNodeAvailableException Settings settings = Settings.builder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); Client transportClient = TransportClient.builder().settings(settings).build(); diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java index fc6453318cf..9cef4d46e8b 100644 --- a/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.action.search; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; @@ -38,7 +39,7 @@ public class SearchRequestBuilderTests extends ESTestCase { //this client will not be hit by any request, but it needs to be a non null proper client //that is why we create it but we don't add any transport address to it Settings settings = Settings.builder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); client = TransportClient.builder().settings(settings).build(); } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index 22d93f024e1..74881413799 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -43,6 +43,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.IndexSettings; @@ -142,13 +143,13 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { Path baseTempDir = createTempDir(); // start single data path node Settings.Builder nodeSettings = Settings.builder() - .put("path.data", baseTempDir.resolve("single-path").toAbsolutePath()) + .put(Environment.PATH_DATA_SETTING.getKey(), baseTempDir.resolve("single-path").toAbsolutePath()) .put("node.master", false); // workaround for dangling index loading issue when node is master InternalTestCluster.Async singleDataPathNode = internalCluster().startNodeAsync(nodeSettings.build()); // start multi data path node nodeSettings = Settings.builder() - .put("path.data", baseTempDir.resolve("multi-path1").toAbsolutePath() + "," + baseTempDir.resolve("multi-path2").toAbsolutePath()) + .put(Environment.PATH_DATA_SETTING.getKey(), baseTempDir.resolve("multi-path1").toAbsolutePath() + "," + baseTempDir.resolve("multi-path2").toAbsolutePath()) .put("node.master", false); // workaround for dangling index loading issue when node is master InternalTestCluster.Async multiDataPathNode = internalCluster().startNodeAsync(nodeSettings.build()); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index 6ad05b3ff84..fec96ae236f 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.snapshots.RestoreInfo; @@ -64,7 +65,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { // Configure using path.repo return settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) - .put("path.repo", getBwcIndicesPath()) + .put(Environment.PATH_REPO_SETTING.getKey(), getBwcIndicesPath()) .build(); } else { // Configure using url white list diff --git a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java index b814cff520c..3bdfc1fb7ee 100644 --- a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java +++ b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java @@ -48,6 +48,7 @@ import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.support.Headers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportMessage; @@ -90,7 +91,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { public void initClient() { Settings settings = Settings.builder() .put(HEADER_SETTINGS) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); threadPool = new ThreadPool("test-" + getTestName()); client = buildClient(settings, ACTIONS); diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java index f127ae28378..e61dab2fc4b 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; @@ -83,7 +84,7 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTestCase { .put("node.name", "transport_client_" + this.getTestName() + "_1") .put("client.transport.nodes_sampler_interval", "1s") .put(HEADER_SETTINGS) - .put("path.home", createTempDir().toString()).build()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build()) .addPlugin(InternalTransportService.TestPlugin.class) .build(); diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java index f01fdffd147..8ab432d6e41 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.env.Environment; import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.test.ESIntegTestCase; @@ -52,7 +53,7 @@ public class TransportClientIT extends ESIntegTestCase { TransportClientNodesService nodeService = client.nodeService(); Node node = new Node(Settings.builder() .put(internalCluster().getDefaultSettings()) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("node.name", "testNodeVersionIsUpdated") .put("http.enabled", false) .put("node.data", false) @@ -89,7 +90,10 @@ public class TransportClientIT extends ESIntegTestCase { } public void testThatTransportClientSettingCannotBeChanged() { - Settings baseSettings = settingsBuilder().put(Client.CLIENT_TYPE_SETTING, "anything").put("path.home", createTempDir()).build(); + Settings baseSettings = settingsBuilder() + .put(Client.CLIENT_TYPE_SETTING, "anything") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .build(); try (TransportClient client = TransportClient.builder().settings(baseSettings).build()) { Settings settings = client.injector.getInstance(Settings.class); assertThat(settings.get(Client.CLIENT_TYPE_SETTING), is("transport")); diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java index b28fdba8c77..e5367d1da42 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -57,7 +58,7 @@ public class TransportClientRetryIT extends ESIntegTestCase { .put("node.mode", internalCluster().getNodeMode()) .put(ClusterName.SETTING, internalCluster().getClusterName()) .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) - .put("path.home", createTempDir()); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()); try (TransportClient transportClient = TransportClient.builder().settings(builder.build()).build()) { transportClient.addTransportAddresses(addresses); diff --git a/core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java b/core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java index 8f9c9009071..ed8a5cffbf4 100644 --- a/core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java +++ b/core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java @@ -27,6 +27,7 @@ import org.apache.log4j.spi.LoggingEvent; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.junit.After; @@ -53,8 +54,8 @@ public class Log4jESLoggerTests extends ESTestCase { Path configDir = getDataPath("config"); // Need to set custom path.conf so we can use a custom logging.yml file for the test Settings settings = Settings.builder() - .put("path.conf", configDir.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); LogConfigurator.configure(settings, true); diff --git a/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java b/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java index 2a08dd1e55c..5d90edaf7a5 100644 --- a/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java +++ b/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java @@ -54,8 +54,8 @@ public class LoggingConfigurationTests extends ESTestCase { try { Path configDir = getDataPath("config"); Settings settings = Settings.builder() - .put("path.conf", configDir.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); LogConfigurator.configure(settings, true); @@ -84,8 +84,8 @@ public class LoggingConfigurationTests extends ESTestCase { Files.write(loggingConf, "{\"json\": \"foo\"}".getBytes(StandardCharsets.UTF_8)); Environment environment = new Environment( Settings.builder() - .put("path.conf", tmpDir.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build()); Settings.Builder builder = Settings.builder(); @@ -101,8 +101,8 @@ public class LoggingConfigurationTests extends ESTestCase { Files.write(loggingConf, "key: value".getBytes(StandardCharsets.UTF_8)); Environment environment = new Environment( Settings.builder() - .put("path.conf", tmpDir.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build()); Settings.Builder builder = Settings.builder(); @@ -120,8 +120,8 @@ public class LoggingConfigurationTests extends ESTestCase { Files.write(loggingConf2, "yaml: bar".getBytes(StandardCharsets.UTF_8)); Environment environment = new Environment( Settings.builder() - .put("path.conf", tmpDir.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build()); Settings.Builder builder = Settings.builder(); @@ -138,8 +138,8 @@ public class LoggingConfigurationTests extends ESTestCase { Files.write(invalidSuffix, "yml: bar".getBytes(StandardCharsets.UTF_8)); Environment environment = new Environment( Settings.builder() - .put("path.conf", invalidSuffix.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), invalidSuffix.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build()); Settings.Builder builder = Settings.builder(); @@ -157,8 +157,8 @@ public class LoggingConfigurationTests extends ESTestCase { Files.write(loggingConf, "appender.file.type: file\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND); Environment environment = InternalSettingsPreparer.prepareEnvironment( Settings.builder() - .put("path.conf", tmpDir.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("logger.test_resolve_order", "TRACE, console") .put("appender.console.type", "console") .put("appender.console.layout.type", "consolePattern") @@ -186,8 +186,8 @@ public class LoggingConfigurationTests extends ESTestCase { StandardCharsets.UTF_8); Environment environment = InternalSettingsPreparer.prepareEnvironment( Settings.builder() - .put("path.conf", tmpDir.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(), new CliToolTestCase.MockTerminal()); LogConfigurator.configure(environment.settings(), false); ESLogger esLogger = Log4jESLoggerFactory.getLogger("test_config_not_read"); diff --git a/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java index 79f9efbb814..0a62d2829d3 100644 --- a/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java +++ b/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java @@ -40,8 +40,8 @@ public class EnvironmentTests extends ESTestCase { public Environment newEnvironment(Settings settings) throws IOException { Settings build = Settings.builder() .put(settings) - .put("path.home", createTempDir().toAbsolutePath()) - .putArray("path.data", tmpPaths()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath()) + .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build(); return new Environment(build); } @@ -49,7 +49,7 @@ public class EnvironmentTests extends ESTestCase { Environment environment = newEnvironment(); assertThat(environment.resolveRepoFile("/test/repos/repo1"), nullValue()); assertThat(environment.resolveRepoFile("test/repos/repo1"), nullValue()); - environment = newEnvironment(settingsBuilder().putArray("path.repo", "/test/repos", "/another/repos", "/test/repos/../other").build()); + environment = newEnvironment(settingsBuilder().putArray(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build()); assertThat(environment.resolveRepoFile("/test/repos/repo1"), notNullValue()); assertThat(environment.resolveRepoFile("test/repos/repo1"), notNullValue()); assertThat(environment.resolveRepoFile("/another/repos/repo1"), notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index acee455bb6c..1ead12ff432 100644 --- a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -50,7 +50,7 @@ public class NodeEnvironmentTests extends ESTestCase { NodeEnvironment env = newNodeEnvironment(Settings.builder() .put("node.max_local_storage_nodes", 1).build()); Settings settings = env.getSettings(); - String[] dataPaths = env.getSettings().getAsArray("path.data"); + List dataPaths = Environment.PATH_DATA_SETTING.get(env.getSettings()); try { new NodeEnvironment(settings, new Environment(settings)); @@ -62,10 +62,10 @@ public class NodeEnvironmentTests extends ESTestCase { // now can recreate and lock it env = new NodeEnvironment(settings, new Environment(settings)); - assertEquals(env.nodeDataPaths().length, dataPaths.length); + assertEquals(env.nodeDataPaths().length, dataPaths.size()); - for (int i = 0; i < dataPaths.length; i++) { - assertTrue(env.nodeDataPaths()[i].startsWith(PathUtils.get(dataPaths[i]))); + for (int i = 0; i < dataPaths.size(); i++) { + assertTrue(env.nodeDataPaths()[i].startsWith(PathUtils.get(dataPaths.get(i)))); } env.close(); assertTrue("LockedShards: " + env.lockedShards(), env.lockedShards().isEmpty()); @@ -74,11 +74,11 @@ public class NodeEnvironmentTests extends ESTestCase { public void testNodeLockMultipleEnvironment() throws IOException { final NodeEnvironment first = newNodeEnvironment(); - String[] dataPaths = first.getSettings().getAsArray("path.data"); + List dataPaths = Environment.PATH_DATA_SETTING.get(first.getSettings()); NodeEnvironment second = new NodeEnvironment(first.getSettings(), new Environment(first.getSettings())); - assertEquals(first.nodeDataPaths().length, dataPaths.length); - assertEquals(second.nodeDataPaths().length, dataPaths.length); - for (int i = 0; i < dataPaths.length; i++) { + assertEquals(first.nodeDataPaths().length, dataPaths.size()); + assertEquals(second.nodeDataPaths().length, dataPaths.size()); + for (int i = 0; i < dataPaths.size(); i++) { assertEquals(first.nodeDataPaths()[i].getParent(), second.nodeDataPaths()[i].getParent()); } IOUtils.close(first, second); @@ -355,25 +355,25 @@ public class NodeEnvironmentTests extends ESTestCase { public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException { Settings build = Settings.builder() .put(settings) - .put("path.home", createTempDir().toAbsolutePath().toString()) - .putArray("path.data", tmpPaths()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) + .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build(); return new NodeEnvironment(build, new Environment(build)); } public NodeEnvironment newNodeEnvironment(String[] dataPaths, Settings settings) throws IOException { Settings build = Settings.builder() .put(settings) - .put("path.home", createTempDir().toAbsolutePath().toString()) - .putArray("path.data", dataPaths).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) + .putArray(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build(); return new NodeEnvironment(build, new Environment(build)); } public NodeEnvironment newNodeEnvironment(String[] dataPaths, String sharedDataPath, Settings settings) throws IOException { Settings build = Settings.builder() .put(settings) - .put("path.home", createTempDir().toAbsolutePath().toString()) - .put("path.shared_data", sharedDataPath) - .putArray("path.data", dataPaths).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) + .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataPath) + .putArray(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build(); return new NodeEnvironment(build, new Environment(build)); } } diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index e93d8fbcf41..e2cb4bc7925 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDeci import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.recovery.RecoveryState; @@ -438,11 +439,11 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { public void testRecoveryDifferentNodeOrderStartup() throws Exception { // we need different data paths so we make sure we start the second node fresh - final String node_1 = internalCluster().startNode(settingsBuilder().put("path.data", createTempDir()).build()); + final String node_1 = internalCluster().startNode(settingsBuilder().put(Environment.PATH_DATA_SETTING.getKey(), createTempDir()).build()); client().prepareIndex("test", "type1", "1").setSource("field", "value").execute().actionGet(); - internalCluster().startNode(settingsBuilder().put("path.data", createTempDir()).build()); + internalCluster().startNode(settingsBuilder().put(Environment.PATH_DATA_SETTING.getKey(), createTempDir()).build()); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index eae3e65c406..26656296498 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -117,7 +117,7 @@ public class IndexModuleTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); index = new Index("foo"); - settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("path.home", createTempDir().toString()).build(); + settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); indexSettings = IndexSettingsModule.newIndexSettings(index, settings); environment = new Environment(settings); nodeServicesProvider = newNodeServiceProvider(settings, environment, null); @@ -148,7 +148,12 @@ public class IndexModuleTests extends ESTestCase { public void testRegisterIndexStore() throws IOException { final Index index = new Index("foo"); - final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("path.home", createTempDir().toString()).put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "foo_store").build(); + final Settings settings = Settings + .builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "foo_store") + .build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); module.addIndexStore("foo_store", FooStore::new); @@ -210,7 +215,7 @@ public class IndexModuleTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.similarity.my_similarity.type", "test_similarity") .put("index.similarity.my_similarity.key", "there is a key") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); module.addSimilarity("test_similarity", (string, settings) -> new SimilarityProvider() { @@ -238,7 +243,7 @@ public class IndexModuleTests extends ESTestCase { Settings indexSettings = Settings.settingsBuilder() .put("index.similarity.my_similarity.type", "test_similarity") .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); try { @@ -251,7 +256,7 @@ public class IndexModuleTests extends ESTestCase { public void testSetupWithoutType() throws IOException { Settings indexSettings = Settings.settingsBuilder() .put("index.similarity.my_similarity.foo", "bar") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); @@ -264,7 +269,7 @@ public class IndexModuleTests extends ESTestCase { public void testCannotRegisterProvidedImplementations() { Settings indexSettings = Settings.settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); try { @@ -292,7 +297,7 @@ public class IndexModuleTests extends ESTestCase { public void testRegisterCustomQueryCache() throws IOException { Settings indexSettings = Settings.settingsBuilder() .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), "custom") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); module.registerQueryCache("custom", (a, b) -> new CustomQueryCache()); @@ -310,7 +315,7 @@ public class IndexModuleTests extends ESTestCase { public void testDefaultQueryCacheImplIsSelected() throws IOException { Settings indexSettings = Settings.settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, mapperRegistry); diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index 7012ebbc5a8..5d54f7731c2 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShadowIndexShard; import org.elasticsearch.index.translog.TranslogStats; @@ -86,7 +87,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { private Settings nodeSettings(String dataPath) { return Settings.builder() .put("node.add_id_to_custom_path", false) - .put("path.shared_data", dataPath) + .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), dataPath) .put("index.store.fs.fs_lock", randomFrom("native", "simple")) .build(); } @@ -443,7 +444,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { Path dataPath = createTempDir(); Settings nodeSettings = Settings.builder() .put("node.add_id_to_custom_path", false) - .put("path.shared_data", dataPath) + .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), dataPath) .build(); String node1 = internalCluster().startNode(nodeSettings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java index 17bd9d587b3..ba3f8b2e100 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; @@ -31,7 +32,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; public class ASCIIFoldingTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_ascii_folding.type", "asciifolding") .build()); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_ascii_folding"); @@ -44,7 +45,7 @@ public class ASCIIFoldingTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testPreserveOriginal() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_ascii_folding.type", "asciifolding") .put("index.analysis.filter.my_ascii_folding.preserve_original", true) .build()); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java index f844d9ac7a6..5da54158484 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java @@ -81,7 +81,7 @@ public class AnalysisModuleTests extends ModuleTestCase { private Settings loadFromClasspath(String path) { return settingsBuilder().loadFromStream(path, getClass().getResourceAsStream(path)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); } @@ -106,7 +106,7 @@ public class AnalysisModuleTests extends ModuleTestCase { String yaml = "/org/elasticsearch/index/analysis/test1.yml"; Settings settings2 = settingsBuilder() .loadFromStream(yaml, getClass().getResourceAsStream(yaml)) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0) .build(); AnalysisRegistry newRegistry = getNewRegistry(settings2); @@ -130,7 +130,7 @@ public class AnalysisModuleTests extends ModuleTestCase { private void assertTokenFilter(String name, Class clazz) throws IOException { Settings settings = Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir().toString()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter(name); Tokenizer tokenizer = new WhitespaceTokenizer(); @@ -215,7 +215,7 @@ public class AnalysisModuleTests extends ModuleTestCase { public void testWordListPath() throws Exception { Settings settings = Settings.builder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); Environment env = new Environment(settings); String[] words = new String[]{"donau", "dampf", "schiff", "spargel", "creme", "suppe"}; @@ -243,7 +243,7 @@ public class AnalysisModuleTests extends ModuleTestCase { public void testUnderscoreInAnalyzerName() throws IOException { Settings settings = Settings.builder() .put("index.analysis.analyzer._invalid_name.tokenizer", "keyword") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, "1") .build(); try { @@ -258,7 +258,7 @@ public class AnalysisModuleTests extends ModuleTestCase { Settings settings = Settings.builder() .put("index.analysis.analyzer.valid_name.tokenizer", "keyword") .put("index.analysis.analyzer.valid_name.alias", "_invalid_name") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, "1") .build(); try { @@ -275,7 +275,7 @@ public class AnalysisModuleTests extends ModuleTestCase { .put("index.analysis.analyzer.custom1.position_offset_gap", "128") .put("index.analysis.analyzer.custom2.tokenizer", "standard") .put("index.analysis.analyzer.custom2.position_increment_gap", "256") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.V_1_7_1)) .build(); @@ -295,7 +295,7 @@ public class AnalysisModuleTests extends ModuleTestCase { .put("index.analysis.analyzer.custom.tokenizer", "standard") .put("index.analysis.analyzer.custom.position_offset_gap", "128") .put("index.analysis.analyzer.custom.position_increment_gap", "256") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.V_1_7_1)) .build(); @@ -312,7 +312,7 @@ public class AnalysisModuleTests extends ModuleTestCase { Settings settings = settingsBuilder() .put("index.analysis.analyzer.custom.tokenizer", "standard") .put("index.analysis.analyzer.custom.position_offset_gap", "128") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); try { @@ -326,7 +326,7 @@ public class AnalysisModuleTests extends ModuleTestCase { public void testRegisterHunspellDictionary() throws Exception { Settings settings = settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); AnalysisModule module = new AnalysisModule(new Environment(settings)); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java index f467aa289f8..3dfb0975ab4 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java @@ -53,7 +53,11 @@ public class AnalysisServiceTests extends ESTestCase { public void testDefaultAnalyzers() throws IOException { Version version = VersionUtils.randomVersion(getRandom()); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).put("path.home", createTempDir().toString()).build(); + Settings settings = Settings + .builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, version) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); @@ -123,7 +127,7 @@ public class AnalysisServiceTests extends ESTestCase { public void testConfigureCamelCaseTokenFilter() throws IOException { // tests a filter that - Settings settings = Settings.builder().put("path.home", createTempDir().toString()).build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.filter.wordDelimiter.type", "word_delimiter") @@ -169,7 +173,7 @@ public class AnalysisServiceTests extends ESTestCase { } public void testCameCaseOverride() throws IOException { - Settings settings = Settings.builder().put("path.home", createTempDir().toString()).build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.filter.wordDelimiter.type", "word_delimiter") @@ -196,7 +200,7 @@ public class AnalysisServiceTests extends ESTestCase { } public void testBuiltInAnalyzersAreCached() throws IOException { - Settings settings = Settings.builder().put("path.home", createTempDir().toString()).build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), indexSettings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java index 1404716b0c8..7460ddd3e55 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java @@ -37,7 +37,7 @@ public class AnalysisTestsHelper { public static AnalysisService createAnalysisServiceFromClassPath(Path baseDir, String resource) throws IOException { Settings settings = Settings.settingsBuilder() .loadFromStream(resource, AnalysisTestsHelper.class.getResourceAsStream(resource)) - .put("path.home", baseDir.toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), baseDir.toString()) .build(); return createAnalysisServiceFromSettings(settings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java index 63acbc81c8d..a163d9e42b4 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.analysis; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; @@ -41,7 +42,7 @@ public class AnalyzerBackwardsCompatTests extends ESTokenStreamTestCase { builder.put(SETTING_VERSION_CREATED, version); } builder.put("index.analysis.analyzer.foo.type", type); - builder.put("path.home", createTempDir().toString()); + builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build()); NamedAnalyzer analyzer = analysisService.analyzer("foo"); assertNotNull(analyzer); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java index dd08d470136..c39c6e702f4 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java @@ -40,7 +40,7 @@ public class CharFilterTests extends ESTokenStreamTestCase { .putArray("index.analysis.char_filter.my_mapping.mappings", "ph=>f", "qu=>q") .put("index.analysis.analyzer.custom_with_char_filter.tokenizer", "standard") .putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "my_mapping") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); @@ -58,7 +58,7 @@ public class CharFilterTests extends ESTokenStreamTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.analyzer.custom_with_char_filter.tokenizer", "standard") .putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "html_strip") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java index a097d55f4a3..e00f5f67d87 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java @@ -98,7 +98,7 @@ public class CompoundAnalysisTests extends ESTestCase { return settingsBuilder() .loadFromStream(json, getClass().getResourceAsStream(json)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); } @@ -107,7 +107,7 @@ public class CompoundAnalysisTests extends ESTestCase { return settingsBuilder() .loadFromStream(yaml, getClass().getResourceAsStream(yaml)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java index 02c4e1a2642..51d8b9214ad 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.analysis; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -30,8 +31,8 @@ import static org.hamcrest.Matchers.is; public class HunspellTokenFilterFactoryTests extends ESTestCase { public void testDedup() throws IOException { Settings settings = settingsBuilder() - .put("path.home", createTempDir().toString()) - .put("path.conf", getDataPath("/indices/analyze/conf_dir")) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir")) .put("index.analysis.filter.en_US.type", "hunspell") .put("index.analysis.filter.en_US.locale", "en_US") .build(); @@ -43,8 +44,8 @@ public class HunspellTokenFilterFactoryTests extends ESTestCase { assertThat(hunspellTokenFilter.dedup(), is(true)); settings = settingsBuilder() - .put("path.home", createTempDir().toString()) - .put("path.conf", getDataPath("/indices/analyze/conf_dir")) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir")) .put("index.analysis.filter.en_US.type", "hunspell") .put("index.analysis.filter.en_US.dedup", false) .put("index.analysis.filter.en_US.locale", "en_US") diff --git a/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java index 99c936cd346..a7179daff2a 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import org.junit.Assert; @@ -41,7 +42,7 @@ public class KeepFilterFactoryTests extends ESTokenStreamTestCase { public void testLoadOverConfiguredSettings() { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.broken_keep_filter.type", "keep") .put("index.analysis.filter.broken_keep_filter.keep_words_path", "does/not/exists.txt") .put("index.analysis.filter.broken_keep_filter.keep_words", "[\"Hello\", \"worlD\"]") @@ -57,7 +58,7 @@ public class KeepFilterFactoryTests extends ESTokenStreamTestCase { public void testKeepWordsPathSettings() { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.non_broken_keep_filter.type", "keep") .put("index.analysis.filter.non_broken_keep_filter.keep_words_path", "does/not/exists.txt") .build(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java index 1e8a0ba16ed..9111c929f95 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.standard.StandardTokenizer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; @@ -32,7 +33,7 @@ import static org.hamcrest.Matchers.instanceOf; public class KeepTypesFilterFactoryTests extends ESTokenStreamTestCase { public void testKeepTypes() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.keep_numbers.type", "keep_types") .putArray("index.analysis.filter.keep_numbers.types", new String[] {"", ""}) .build(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java index e133ffc79ae..b266be9f2bd 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; @@ -31,7 +32,7 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { Settings settings = Settings.settingsBuilder() .put("index.analysis.filter.limit_default.type", "limit") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); { @@ -58,7 +59,7 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase { .put("index.analysis.filter.limit_1.type", "limit") .put("index.analysis.filter.limit_1.max_token_count", 3) .put("index.analysis.filter.limit_1.consume_all_tokens", true) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1"); @@ -73,7 +74,7 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase { .put("index.analysis.filter.limit_1.type", "limit") .put("index.analysis.filter.limit_1.max_token_count", 3) .put("index.analysis.filter.limit_1.consume_all_tokens", false) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1"); @@ -89,7 +90,7 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase { .put("index.analysis.filter.limit_1.type", "limit") .put("index.analysis.filter.limit_1.max_token_count", 17) .put("index.analysis.filter.limit_1.consume_all_tokens", true) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1"); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java index 4b7119df01b..8c6775a92ab 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java @@ -35,7 +35,7 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase { public void testPatternCaptureTokenFilter() throws Exception { String json = "/org/elasticsearch/index/analysis/pattern_capture.json"; Settings settings = settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .loadFromStream(json, getClass().getResourceAsStream(json)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java index 737a991f0e0..37844dce69d 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.analysis.en.PorterStemFilter; import org.apache.lucene.analysis.snowball.SnowballFilter; import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.VersionUtils; @@ -50,7 +51,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { .put("index.analysis.analyzer.my_english.tokenizer","whitespace") .put("index.analysis.analyzer.my_english.filter","my_english") .put(SETTING_VERSION_CREATED,v) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); @@ -83,7 +84,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { .put("index.analysis.analyzer.my_porter2.tokenizer","whitespace") .put("index.analysis.analyzer.my_porter2.filter","my_porter2") .put(SETTING_VERSION_CREATED,v) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java index 90e55e98d7e..ebaf4cb5cc4 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java @@ -35,7 +35,7 @@ public class StopAnalyzerTests extends ESTokenStreamTestCase { String json = "/org/elasticsearch/index/analysis/stop.json"; Settings settings = settingsBuilder() .loadFromStream(json, getClass().getResourceAsStream(json)) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), settings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java index 1dbd9ac2bd9..2804f522afa 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java @@ -28,6 +28,7 @@ import org.apache.lucene.search.suggest.analyzing.SuggestStopFilter; import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; @@ -44,7 +45,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase { if (random().nextBoolean()) { builder.put("index.analysis.filter.my_stop.version", "5.0"); } - builder.put("path.home", createTempDir().toString()); + builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()); Settings settings = builder.build(); try { AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); @@ -67,7 +68,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase { } else { // don't specify } - builder.put("path.home", createTempDir().toString()); + builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build()); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop"); assertThat(tokenFilter, instanceOf(StopTokenFilterFactory.class)); @@ -86,7 +87,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase { .put("index.analysis.filter.my_stop.type", "stop") .put("index.analysis.filter.my_stop.enable_position_increments", false) .put("index.analysis.filter.my_stop.version", "4.3") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop"); @@ -101,7 +102,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase { Settings settings = Settings.settingsBuilder() .put("index.analysis.filter.my_stop.type", "stop") .put("index.analysis.filter.my_stop.remove_trailing", false) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop"); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java index 54810028ae3..a041694dde6 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; @@ -31,7 +32,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .build()); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter"); @@ -44,7 +45,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase public void testCatenateWords() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false") @@ -59,7 +60,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase public void testCatenateNumbers() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false") .put("index.analysis.filter.my_word_delimiter.catenate_numbers", "true") @@ -74,7 +75,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase public void testCatenateAll() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false") .put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false") @@ -90,7 +91,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase public void testSplitOnCaseChange() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.split_on_case_change", "false") .build()); @@ -104,7 +105,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase public void testPreserveOriginal() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.preserve_original", "true") .build()); @@ -118,7 +119,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase public void testStemEnglishPossessive() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.stem_english_possessive", "false") .build()); @@ -133,7 +134,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase /** Correct offset order when doing both parts and concatenation: PowerShot is a synonym of Power */ public void testPartsAndCatenate() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") @@ -150,7 +151,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase * old offset order when doing both parts and concatenation: PowerShot is a synonym of Shot */ public void testDeprecatedPartsAndCatenate() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") diff --git a/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java index f7c346c6570..a9d3c8820fc 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisTestsHelper; import org.elasticsearch.index.analysis.TokenFilterFactory; @@ -38,7 +39,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { Settings settings = Settings.settingsBuilder() .put("index.analysis.filter.common_grams_default.type", "common_grams") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); try { @@ -54,7 +55,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { { Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_default.type", "common_grams") .putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); @@ -71,7 +72,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { { Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_default.type", "common_grams") .put("index.analysis.filter.common_grams_default.query_mode", false) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein") .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); @@ -90,7 +91,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { { Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_1.type", "common_grams") .put("index.analysis.filter.common_grams_1.ignore_case", true) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are") .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); @@ -104,7 +105,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { { Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_2.type", "common_grams") .put("index.analysis.filter.common_grams_2.ignore_case", false) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); @@ -118,7 +119,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { { Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_3.type", "common_grams") .putArray("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_3"); @@ -134,7 +135,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { String json = "/org/elasticsearch/index/analysis/commongrams/commongrams.json"; Settings settings = Settings.settingsBuilder() .loadFromStream(json, getClass().getResourceAsStream(json)) - .put("path.home", createHome()) + .put(Environment.PATH_HOME_SETTING.getKey(), createHome()) .build(); { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); @@ -158,7 +159,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { .put("index.analysis.filter.common_grams_1.query_mode", true) .putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are") .put("index.analysis.filter.common_grams_1.ignore_case", true) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_1"); @@ -173,7 +174,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { .put("index.analysis.filter.common_grams_2.query_mode", true) .putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") .put("index.analysis.filter.common_grams_2.ignore_case", false) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_2"); @@ -187,7 +188,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_3.type", "common_grams") .put("index.analysis.filter.common_grams_3.query_mode", true) .putArray("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_3"); @@ -201,7 +202,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_4.type", "common_grams") .put("index.analysis.filter.common_grams_4.query_mode", true) .putArray("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_4"); @@ -217,7 +218,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { String json = "/org/elasticsearch/index/analysis/commongrams/commongrams_query_mode.json"; Settings settings = Settings.settingsBuilder() .loadFromStream(json, getClass().getResourceAsStream(json)) - .put("path.home", createHome()) + .put(Environment.PATH_HOME_SETTING.getKey(), createHome()) .build(); { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java index 3a6adca1c67..c4c664f222c 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java @@ -64,7 +64,7 @@ public class SynonymsAnalysisTests extends ESTestCase { String json = "/org/elasticsearch/index/analysis/synonyms/synonyms.json"; Settings settings = settingsBuilder(). loadFromStream(json, getClass().getResourceAsStream(json)) - .put("path.home", home) + .put(Environment.PATH_HOME_SETTING.getKey(), home) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), settings); diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 7cfe52d1c09..c293237b5a0 100644 --- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -106,7 +106,7 @@ public class CodecTests extends ESTestCase { private static CodecService createCodecService() throws IOException { Settings nodeSettings = settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); IndexSettings settings = IndexSettingsModule.newIndexSettings(new Index("_na"), nodeSettings); SimilarityService similarityService = new SimilarityService(settings, Collections.emptyMap()); diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index 52aa7ea48f0..8c6bfff3fac 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -186,7 +186,7 @@ public abstract class AbstractQueryTestCase> Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT); Settings settings = Settings.settingsBuilder() .put("name", AbstractQueryTestCase.class.toString()) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false) .build(); Settings indexSettings = Settings.settingsBuilder() @@ -218,7 +218,7 @@ public abstract class AbstractQueryTestCase> @Override protected void configure() { Settings settings = Settings.builder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) // no file watching, so we don't need a ResourceWatcherService .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false) .build(); diff --git a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java index c3a2d65748c..911f2598f03 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java @@ -167,8 +167,8 @@ public class NewPathForShardTests extends ESTestCase { path.resolve("b").toString()}; Settings settings = Settings.builder() - .put("path.home", path) - .putArray("path.data", paths).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), path) + .putArray(Environment.PATH_DATA_SETTING.getKey(), paths).build(); NodeEnvironment nodeEnv = new NodeEnvironment(settings, new Environment(settings)); // Make sure all our mocking above actually worked: diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java index 5a82a8942aa..80d5f4c8fe9 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -118,7 +119,7 @@ public class ShardPathTests extends ESTestCase { final Path path = createTempDir(); final boolean includeNodeId = randomBoolean(); indexSetttings = indexSettingsBuilder.put(IndexMetaData.SETTING_DATA_PATH, "custom").build(); - nodeSettings = settingsBuilder().put("path.shared_data", path.toAbsolutePath().toAbsolutePath()) + nodeSettings = settingsBuilder().put(Environment.PATH_SHARED_DATA_SETTING.getKey(), path.toAbsolutePath().toAbsolutePath()) .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH, includeNodeId).build(); if (includeNodeId) { customPath = path.resolve("custom").resolve("0"); diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java index 722a4ebde8a..a8c1a6a0191 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java @@ -22,6 +22,7 @@ import org.apache.lucene.analysis.hunspell.Dictionary; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.indices.analysis.HunspellService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -39,7 +40,7 @@ import static org.hamcrest.Matchers.notNullValue; public class HunspellServiceIT extends ESIntegTestCase { public void testLocaleDirectoryWithNodeLevelConfig() throws Exception { Settings settings = Settings.settingsBuilder() - .put("path.conf", getDataPath("/indices/analyze/conf_dir")) + .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir")) .put(HUNSPELL_LAZY_LOAD, randomBoolean()) .put(HUNSPELL_IGNORE_CASE, true) .build(); @@ -52,7 +53,7 @@ public class HunspellServiceIT extends ESIntegTestCase { public void testLocaleDirectoryWithLocaleSpecificConfig() throws Exception { Settings settings = Settings.settingsBuilder() - .put("path.conf", getDataPath("/indices/analyze/conf_dir")) + .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir")) .put(HUNSPELL_LAZY_LOAD, randomBoolean()) .put(HUNSPELL_IGNORE_CASE, true) .put("indices.analysis.hunspell.dictionary.en_US.strict_affix_parsing", false) @@ -74,7 +75,7 @@ public class HunspellServiceIT extends ESIntegTestCase { public void testDicWithNoAff() throws Exception { Settings settings = Settings.settingsBuilder() - .put("path.conf", getDataPath("/indices/analyze/no_aff_conf_dir")) + .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/no_aff_conf_dir")) .put(HUNSPELL_LAZY_LOAD, randomBoolean()) .build(); @@ -92,7 +93,7 @@ public class HunspellServiceIT extends ESIntegTestCase { public void testDicWithTwoAffs() throws Exception { Settings settings = Settings.settingsBuilder() - .put("path.conf", getDataPath("/indices/analyze/two_aff_conf_dir")) + .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/two_aff_conf_dir")) .put(HUNSPELL_LAZY_LOAD, randomBoolean()) .build(); diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index 18d56eee88f..9826a787890 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; @@ -82,7 +83,7 @@ import static org.hamcrest.Matchers.equalTo; public class IndicesStoreIntegrationIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { // simplify this and only use a single data path - return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put("path.data", "") + return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(Environment.PATH_DATA_SETTING.getKey(), "") // by default this value is 1 sec in tests (30 sec in practice) but we adding disruption here // which is between 1 and 2 sec can cause each of the shard deletion requests to timeout. // to prevent this we are setting the timeout here to something highish ie. the default in practice diff --git a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java index 4dda068ddd6..442b1afa6ee 100644 --- a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java +++ b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java @@ -48,7 +48,7 @@ public class InternalSettingsPreparerTests extends ESTestCase { @Before public void createBaseEnvSettings() { baseEnvSettings = settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); } @@ -68,7 +68,7 @@ public class InternalSettingsPreparerTests extends ESTestCase { assertNotNull(settings.get("name")); // a name was set assertNotNull(settings.get(ClusterName.SETTING)); // a cluster name was set assertEquals(settings.toString(), size + 1 /* path.home is in the base settings */, settings.names().size()); - String home = baseEnvSettings.get("path.home"); + String home = Environment.PATH_HOME_SETTING.get(baseEnvSettings); String configDir = env.configFile().toString(); assertTrue(configDir, configDir.startsWith(home)); } diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java index cb8ffb8e91f..9378eaa7542 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.env.Environment; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -113,7 +114,7 @@ public class PercolatorBackwardsCompatibilityIT extends ESIntegTestCase { } Settings.Builder nodeSettings = Settings.builder() - .put("path.data", dataDir); + .put(Environment.PATH_DATA_SETTING.getKey(), dataDir); internalCluster().startNode(nodeSettings.build()); ensureGreen(INDEX_NAME); } diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index 5d8605ab19e..d3c1f1b8bad 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -88,7 +88,7 @@ public class PluginsServiceTests extends ESTestCase { public void testAdditionalSettings() { Settings settings = Settings.builder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("my.setting", "test") .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.SIMPLEFS.getSettingsKey()).build(); PluginsService service = newPluginsService(settings, AdditionalSettingsPlugin1.class); @@ -100,7 +100,7 @@ public class PluginsServiceTests extends ESTestCase { public void testAdditionalSettingsClash() { Settings settings = Settings.builder() - .put("path.home", createTempDir()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); PluginsService service = newPluginsService(settings, AdditionalSettingsPlugin1.class, AdditionalSettingsPlugin2.class); try { service.updatedSettings(); @@ -115,7 +115,7 @@ public class PluginsServiceTests extends ESTestCase { public void testOnModuleExceptionsArePropagated() { Settings settings = Settings.builder() - .put("path.home", createTempDir()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); PluginsService service = newPluginsService(settings, FailOnModule.class); try { service.processModule(new BrokenModule()); diff --git a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java index 987aef90bc3..8ef7bcc41f5 100644 --- a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java @@ -39,7 +39,7 @@ public class FileScriptTests extends ESTestCase { Path mockscript = scriptsDir.resolve("script1.mockscript"); Files.write(mockscript, "1".getBytes("UTF-8")); settings = Settings.builder() - .put("path.home", homeDir) + .put(Environment.PATH_HOME_SETTING.getKey(), homeDir) // no file watching, so we don't need a ResourceWatcherService .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false) .put(settings) diff --git a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java index 47adeabe02f..0e8d477d0e3 100644 --- a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java @@ -50,7 +50,7 @@ public class NativeScriptTests extends ESTestCase { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); Settings settings = Settings.settingsBuilder() .put("name", "testNativeScript") - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); ScriptModule scriptModule = new ScriptModule(settings); scriptModule.registerScript("my", MyNativeScriptFactory.class); @@ -78,7 +78,7 @@ public class NativeScriptTests extends ESTestCase { String scriptContext = randomFrom(ScriptContext.Standard.values()).getKey(); builder.put(ScriptModes.SCRIPT_SETTINGS_PREFIX + scriptContext, randomFrom(ScriptMode.values())); } - Settings settings = builder.put("path.home", createTempDir()).build(); + Settings settings = builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); Environment environment = new Environment(settings); ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, null); Map nativeScriptFactoryMap = new HashMap<>(); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java index 019eb7c74a0..36865f254bd 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java @@ -38,7 +38,7 @@ public class ScriptContextTests extends ESTestCase { ScriptService makeScriptService() throws Exception { Settings settings = Settings.builder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) // no file watching, so we don't need a ResourceWatcherService .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false) .put("script." + PLUGIN_NAME + "_custom_globally_disabled_op", false) diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index caad93c68dd..2028605d844 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -67,8 +67,8 @@ public class ScriptServiceTests extends ESTestCase { public void setup() throws IOException { Path genericConfigFolder = createTempDir(); baseSettings = settingsBuilder() - .put("path.home", createTempDir().toString()) - .put("path.conf", genericConfigFolder) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), genericConfigFolder) .build(); resourceWatcherService = new ResourceWatcherService(baseSettings, null); scriptEngineService = new TestEngineService(); diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 5a1b99fe05f..9597af9301b 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.AbstractQueryTestCase; import org.elasticsearch.index.query.EmptyQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -81,7 +82,7 @@ public class SearchSourceBuilderTests extends ESTestCase { public static void init() throws IOException { Settings settings = Settings.settingsBuilder() .put("name", SearchSourceBuilderTests.class.toString()) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); namedWriteableRegistry = new NamedWriteableRegistry(); injector = new ModulesBuilder().add( diff --git a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index 60f1bad6089..ea225d9680b 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; @@ -186,7 +187,7 @@ public class SimpleThreadPoolIT extends ESIntegTestCase { public void testThreadPoolLeakingThreadsWithTribeNode() { Settings settings = Settings.builder() .put("node.name", "thread_pool_leaking_threads_tribe_node") - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("tribe.t1.cluster.name", "non_existing_cluster") //trigger initialization failure of one of the tribes (doesn't require starting the node) .put("tribe.t1.plugin.mandatory", "non_existing").build(); diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java index ee49012291d..f4dccc77161 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -73,7 +74,7 @@ public class NettyTransportMultiPortIntegrationIT extends ESIntegTestCase { Settings settings = settingsBuilder() .put("cluster.name", internalCluster().getClusterName()) .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); try (TransportClient transportClient = TransportClient.builder().settings(settings).build()) { transportClient.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("127.0.0.1"), randomPort)); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java index 7e6dbd67f56..fb57c545c11 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; @@ -123,7 +124,7 @@ public class ScriptedMetricTests extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { Settings settings = Settings.settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) - .put("path.conf", getDataPath("/org/elasticsearch/messy/tests/conf")) + .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/org/elasticsearch/messy/tests/conf")) .build(); return settings; } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java index 4b3d3f3ff98..39b2f290cd3 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.Template; @@ -68,7 +69,7 @@ public class RenderSearchTemplateTests extends ESIntegTestCase { throw new RuntimeException(e); } return settingsBuilder().put(super.nodeSettings(nodeOrdinal)) - .put("path.conf", configDir).build(); + .put(Environment.PATH_CONF_SETTING.getKey(), configDir).build(); } public void testInlineTemplate() { diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java index 9d136807092..e005ca5b100 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java @@ -88,8 +88,8 @@ public class TemplateQueryParserTests extends ESTestCase { @Before public void setup() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir().toString()) - .put("path.conf", this.getDataPath("config")) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), this.getDataPath("config")) .put("name", getClass().getName()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java index 70298266df9..0914fd6cd7c 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TemplateQueryBuilder; import org.elasticsearch.index.query.TemplateQueryParser; @@ -85,7 +86,7 @@ public class TemplateQueryTests extends ESIntegTestCase { @Override public Settings nodeSettings(int nodeOrdinal) { return settingsBuilder().put(super.nodeSettings(nodeOrdinal)) - .put("path.conf", this.getDataPath("config")).build(); + .put(Environment.PATH_CONF_SETTING.getKey(), this.getDataPath("config")).build(); } public void testTemplateInBody() throws IOException { diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java index d4b2530dbb6..efd60427e23 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.analysis; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -32,7 +33,7 @@ import static org.hamcrest.Matchers.instanceOf; public class SimpleIcuAnalysisTests extends ESTestCase { public void testDefaultsIcuAnalysis() throws IOException { Settings settings = settingsBuilder() - .put("path.home", createTempDir()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); AnalysisService analysisService = createAnalysisService(settings); TokenizerFactory tokenizerFactory = analysisService.tokenizer("icu_tokenizer"); diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java index 33c1f337dbd..632f3f539d6 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java @@ -27,6 +27,7 @@ import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.KeywordTokenizer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -45,7 +46,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testBasicUsage() throws Exception { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "tr") .put("index.analysis.filter.myCollator.strength", "primary") @@ -61,7 +62,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testNormalization() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "tr") .put("index.analysis.filter.myCollator.strength", "primary") @@ -78,7 +79,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testSecondaryStrength() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "secondary") @@ -96,7 +97,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testIgnorePunctuation() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "primary") @@ -114,7 +115,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testIgnoreWhitespace() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "primary") @@ -135,7 +136,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testNumerics() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.numeric", "true") @@ -152,7 +153,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testIgnoreAccentsButNotCase() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "primary") @@ -173,7 +174,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testUpperCaseFirst() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "tertiary") @@ -203,7 +204,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { String tailoredRules = tailoredCollator.getRules(); Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.rules", tailoredRules) .put("index.analysis.filter.myCollator.strength", "primary") diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java index acdbd9d4dfc..7ebb783d1db 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.analysis; import com.ibm.icu.text.Normalizer2; import org.apache.lucene.analysis.CharFilter; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import java.io.StringReader; @@ -34,7 +35,7 @@ import static org.elasticsearch.index.analysis.AnalysisTestUtils.createAnalysisS public class SimpleIcuNormalizerCharFilterTests extends ESTestCase { public void testDefaultSetting() throws Exception { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.char_filter.myNormalizerChar.type", "icu_normalizer") .build(); AnalysisService analysisService = createAnalysisService(settings); @@ -57,7 +58,7 @@ public class SimpleIcuNormalizerCharFilterTests extends ESTestCase { public void testNameAndModeSetting() throws Exception { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.char_filter.myNormalizerChar.type", "icu_normalizer") .put("index.analysis.char_filter.myNormalizerChar.name", "nfkc") .put("index.analysis.char_filter.myNormalizerChar.mode", "decompose") diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java index 63122842104..016053810d2 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java @@ -199,7 +199,7 @@ public class KuromojiAnalysisTests extends ESTestCase { String json = "/org/elasticsearch/index/analysis/kuromoji_analysis.json"; Settings settings = Settings.settingsBuilder() - .put("path.home", home) + .put(Environment.PATH_HOME_SETTING.getKey(), home) .loadFromStream(json, getClass().getResourceAsStream(json)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java index b0a93f10e4d..6dd341346e5 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java @@ -48,7 +48,7 @@ public class SimplePhoneticAnalysisTests extends ESTestCase { String yaml = "/org/elasticsearch/index/analysis/phonetic-1.yml"; Settings settings = settingsBuilder().loadFromStream(yaml, getClass().getResourceAsStream(yaml)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); AnalysisService analysisService = testSimpleConfiguration(settings); TokenFilterFactory filterFactory = analysisService.tokenFilter("phonetic"); diff --git a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java index 55c0912b702..d33d36d4c60 100644 --- a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java +++ b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java @@ -47,7 +47,7 @@ public class SimpleSmartChineseAnalysisTests extends ESTestCase { public void testDefaultsIcuAnalysis() throws IOException { Index index = new Index("test"); Settings settings = settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java index 02fcbd0c369..05c7252bdf7 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java @@ -50,7 +50,7 @@ public class PolishAnalysisTests extends ESTestCase { public void testDefaultsPolishAnalysis() throws IOException { Index index = new Index("test"); Settings settings = settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java index e091b0a0d92..306a835c36e 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java @@ -59,7 +59,7 @@ public class SimplePolishTokenFilterTests extends ESTestCase { Index index = new Index("test"); Settings settings = Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myStemmer.type", "polish_stem") .build(); AnalysisService analysisService = createAnalysisService(index, settings); @@ -81,7 +81,7 @@ public class SimplePolishTokenFilterTests extends ESTestCase { Index index = new Index("test"); Settings settings = Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); AnalysisService analysisService = createAnalysisService(index, settings); diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureTestCase.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureTestCase.java index 9747543a77b..ad7140f5020 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureTestCase.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugin.discovery.azure.AzureDiscoveryPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -54,7 +55,7 @@ public abstract class AbstractAzureTestCase extends ESIntegTestCase { protected Settings readSettingsFromFile() { Settings.Builder settings = Settings.builder(); - settings.put("path.home", createTempDir()); + settings.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()); // if explicit, just load it and don't load from env try { diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java index ec9155c51b3..e5931dc8b8e 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugin.discovery.ec2.Ec2DiscoveryPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ThirdParty; @@ -40,7 +41,7 @@ public abstract class AbstractAwsTestCase extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder settings = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .extendArray("plugin.types", Ec2DiscoveryPlugin.class.getName()) .put("cloud.aws.test.random", randomInt()) .put("cloud.aws.test.write_failures", 0.1) diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java index 9b7d8afe381..81a82825f8b 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java @@ -22,6 +22,7 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -39,7 +40,7 @@ public class AttachmentUnitTestCase extends ESTestCase { @Before public void createSettings() throws Exception { testSettings = Settings.builder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id) .build(); } diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java index b3ad01bae49..fe12cb042d4 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugin.mapper.MapperMurmur3Plugin; import org.elasticsearch.plugins.Plugin; @@ -62,7 +63,7 @@ public class Murmur3FieldMapperUpgradeTests extends ESIntegTestCase { Path dataPath = createTempDir(); Settings settings = Settings.builder() - .put("path.data", dataPath) + .put(Environment.PATH_DATA_SETTING.getKey(), dataPath) .build(); final String node = internalCluster().startDataOnlyNode(settings); // workaround for dangling index loading issue when node is master Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths(); diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java index 4529111c16e..a2af6df4e75 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugin.mapper.MapperSizePlugin; import org.elasticsearch.plugins.Plugin; @@ -63,7 +64,7 @@ public class SizeFieldMapperUpgradeTests extends ESIntegTestCase { Path dataPath = createTempDir(); Settings settings = Settings.builder() - .put("path.data", dataPath) + .put(Environment.PATH_DATA_SETTING.getKey(), dataPath) .build(); final String node = internalCluster().startDataOnlyNode(settings); // workaround for dangling index loading issue when node is master Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths(); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java index e823e8e6681..bc3706263f7 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugin.repository.s3.S3RepositoryPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ThirdParty; @@ -40,7 +41,7 @@ public abstract class AbstractAwsTestCase extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder settings = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .extendArray("plugin.types", S3RepositoryPlugin.class.getName(), TestAwsS3Service.TestPlugin.class.getName()) .put("cloud.aws.test.random", randomInt()) .put("cloud.aws.test.write_failures", 0.1) diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java index 695d2a42321..c213f18f333 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java @@ -36,14 +36,14 @@ import java.util.Set; @SuppressForbidden(reason = "modifies system properties and attempts to create symbolic links intentionally") public class EvilSecurityTests extends ESTestCase { - + /** test generated permissions */ public void testGeneratedPermissions() throws Exception { Path path = createTempDir(); // make a fake ES home and ensure we only grant permissions to that. Path esHome = path.resolve("esHome"); Settings.Builder settingsBuilder = Settings.builder(); - settingsBuilder.put("path.home", esHome.toString()); + settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.toString()); Settings settings = settingsBuilder.build(); Path fakeTmpDir = createTempDir(); @@ -56,7 +56,7 @@ public class EvilSecurityTests extends ESTestCase { } finally { System.setProperty("java.io.tmpdir", realTmpDir); } - + // the fake es home assertNoPermissions(esHome, permissions); // its parent @@ -74,14 +74,14 @@ public class EvilSecurityTests extends ESTestCase { Path esHome = path.resolve("esHome"); Settings.Builder settingsBuilder = Settings.builder(); - settingsBuilder.put("path.home", esHome.resolve("home").toString()); - settingsBuilder.put("path.conf", esHome.resolve("conf").toString()); - settingsBuilder.put("path.scripts", esHome.resolve("scripts").toString()); - settingsBuilder.put("path.plugins", esHome.resolve("plugins").toString()); - settingsBuilder.putArray("path.data", esHome.resolve("data1").toString(), esHome.resolve("data2").toString()); - settingsBuilder.put("path.shared_data", esHome.resolve("custom").toString()); - settingsBuilder.put("path.logs", esHome.resolve("logs").toString()); - settingsBuilder.put("pidfile", esHome.resolve("test.pid").toString()); + settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.resolve("home").toString()); + settingsBuilder.put(Environment.PATH_CONF_SETTING.getKey(), esHome.resolve("conf").toString()); + settingsBuilder.put(Environment.PATH_SCRIPTS_SETTING.getKey(), esHome.resolve("scripts").toString()); + settingsBuilder.put(Environment.PATH_PLUGINS_SETTING.getKey(), esHome.resolve("plugins").toString()); + settingsBuilder.putArray(Environment.PATH_DATA_SETTING.getKey(), esHome.resolve("data1").toString(), esHome.resolve("data2").toString()); + settingsBuilder.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), esHome.resolve("custom").toString()); + settingsBuilder.put(Environment.PATH_LOGS_SETTING.getKey(), esHome.resolve("logs").toString()); + settingsBuilder.put(Environment.PIDFILE_SETTING.getKey(), esHome.resolve("test.pid").toString()); Settings settings = settingsBuilder.build(); Path fakeTmpDir = createTempDir(); @@ -104,7 +104,7 @@ public class EvilSecurityTests extends ESTestCase { assertNoPermissions(esHome.getParent().resolve("other"), permissions); // double check we overwrote java.io.tmpdir correctly for the test assertNoPermissions(PathUtils.get(realTmpDir), permissions); - + // check that all directories got permissions: // bin file: ro @@ -135,10 +135,10 @@ public class EvilSecurityTests extends ESTestCase { // PID file: delete only (for the shutdown hook) assertExactPermissions(new FilePermission(environment.pidFile().toString(), "delete"), permissions); } - + public void testEnsureSymlink() throws IOException { Path p = createTempDir(); - + Path exists = p.resolve("exists"); Files.createDirectory(exists); @@ -154,7 +154,7 @@ public class EvilSecurityTests extends ESTestCase { Security.ensureDirectoryExists(linkExists); Files.createTempFile(linkExists, null, null); } - + public void testEnsureBrokenSymlink() throws IOException { Path p = createTempDir(); @@ -199,7 +199,7 @@ public class EvilSecurityTests extends ESTestCase { assertExactPermissions(new FilePermission(target.resolve("foo").toString(), "read"), permissions); } - /** + /** * checks exact file permissions, meaning those and only those for that path. */ static void assertExactPermissions(FilePermission expected, PermissionCollection actual) { diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java index 8633511756d..45f3df22cd7 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java @@ -124,7 +124,7 @@ public class CheckFileCommandTests extends ESTestCase { try (FileSystem fs = Jimfs.newFileSystem(configuration)) { Path path = fs.getPath(randomAsciiOfLength(10)); Settings settings = Settings.builder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); new CreateFileCommand(captureOutputTerminal, path).execute(settings, new Environment(settings)); assertThat(Files.exists(path), is(true)); @@ -141,7 +141,7 @@ public class CheckFileCommandTests extends ESTestCase { Files.write(path, "anything".getBytes(StandardCharsets.UTF_8)); Settings settings = Settings.builder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); new DeleteFileCommand(captureOutputTerminal, path).execute(settings, new Environment(settings)); assertThat(Files.exists(path), is(false)); @@ -173,7 +173,7 @@ public class CheckFileCommandTests extends ESTestCase { this.fs = fs; this.paths = new Path[] { writePath(fs, "p1", "anything"), writePath(fs, "p2", "anything"), writePath(fs, "p3", "anything") }; Settings settings = Settings.settingsBuilder() - .put("path.home", baseDir.toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), baseDir.toString()) .build(); return super.execute(Settings.EMPTY, new Environment(settings)); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java index 3789c273cf8..d2c8ccfd3c9 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java @@ -72,7 +72,7 @@ public class EvilInternalSettingsPreparerTests extends ESTestCase { @Before public void createBaseEnvSettings() { baseEnvSettings = settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java index 0eebc9731ff..5e70cf71923 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java @@ -70,13 +70,13 @@ public class PluginManagerPermissionTests extends ESTestCase { @Before public void setup() { Path tempDir = createTempDir(); - Settings.Builder settingsBuilder = settingsBuilder().put("path.home", tempDir); + Settings.Builder settingsBuilder = settingsBuilder().put(Environment.PATH_HOME_SETTING.getKey(), tempDir); if (randomBoolean()) { - settingsBuilder.put("path.plugins", createTempDir()); + settingsBuilder.put(Environment.PATH_PLUGINS_SETTING.getKey(), createTempDir()); } if (randomBoolean()) { - settingsBuilder.put("path.conf", createTempDir()); + settingsBuilder.put(Environment.PATH_CONF_SETTING.getKey(), createTempDir()); } environment = new Environment(settingsBuilder.build()); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java index 514a2ee6710..24055d9f6dc 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java @@ -108,7 +108,7 @@ public class PluginManagerTests extends ESIntegTestCase { @Before public void setup() throws Exception { environment = buildInitialSettings(); - System.setProperty("es.default.path.home", environment.settings().get("path.home")); + System.setProperty("es.default.path.home", Environment.PATH_HOME_SETTING.get(environment.settings())); Path binDir = environment.binFile(); if (!Files.exists(binDir)) { Files.createDirectories(binDir); @@ -696,7 +696,7 @@ public class PluginManagerTests extends ESIntegTestCase { private Environment buildInitialSettings() throws IOException { Settings settings = settingsBuilder() .put("http.enabled", true) - .put("path.home", createTempDir()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); return InternalSettingsPreparer.prepareEnvironment(settings, null); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerUnitTests.java index 266b44ebfbd..49edcc7b1d4 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerUnitTests.java @@ -56,8 +56,8 @@ public class PluginManagerUnitTests extends ESTestCase { Path genericConfigFolder = createTempDir(); Settings settings = settingsBuilder() - .put("path.conf", genericConfigFolder) - .put("path.home", homeFolder) + .put(Environment.PATH_CONF_SETTING.getKey(), genericConfigFolder) + .put(Environment.PATH_HOME_SETTING.getKey(), homeFolder) .build(); Environment environment = new Environment(settings); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java index 1ad972e10ef..fb01f9b9b37 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoveryService; +import org.elasticsearch.env.Environment; import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.test.ESIntegTestCase; @@ -56,7 +57,7 @@ public class TribeUnitTests extends ESTestCase { Settings baseSettings = Settings.builder() .put("http.enabled", false) .put("node.mode", NODE_MODE) - .put("path.home", createTempDir()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); tribe1 = new TribeClientNode( Settings.builder() @@ -102,7 +103,11 @@ public class TribeUnitTests extends ESTestCase { public void testThatTribeClientsIgnoreGlobalConfig() throws Exception { Path pathConf = getDataPath("elasticsearch.yml").getParent(); - Settings settings = Settings.builder().put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true).put("path.conf", pathConf).build(); + Settings settings = Settings + .builder() + .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) + .put(Environment.PATH_CONF_SETTING.getKey(), pathConf) + .build(); assertTribeNodeSuccesfullyCreated(settings); } @@ -111,7 +116,7 @@ public class TribeUnitTests extends ESTestCase { //they can find their corresponding tribes using the proper transport Settings settings = Settings.builder().put("http.enabled", false).put("node.name", "tribe_node") .put("tribe.t1.node.mode", NODE_MODE).put("tribe.t2.node.mode", NODE_MODE) - .put("path.home", createTempDir()).put(extraSettings).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).put(extraSettings).build(); try (Node node = new Node(settings).start()) { try (Client client = node.client()) { diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index 227936beb42..3fea66459c2 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.junit.After; import org.junit.AfterClass; @@ -79,7 +80,7 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { .put("name", "qa_smoke_client_" + counter.getAndIncrement()) .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // prevents any settings to be replaced by system properties. .put("client.transport.ignore_cluster_name", true) - .put("path.home", tempDir) + .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put("node.mode", "network").build(); // we require network here! TransportClient.Builder transportClientBuilder = TransportClient.builder().settings(clientSettings); diff --git a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java index 1c110bc405a..a9b45a5b336 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java @@ -45,7 +45,7 @@ public class MapperTestUtils { public static MapperService newMapperService(Path tempDir, Settings settings, IndicesModule indicesModule) throws IOException { Settings.Builder settingsBuilder = Settings.builder() - .put("path.home", tempDir) + .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put(settings); if (settings.get(IndexMetaData.SETTING_VERSION_CREATED) == null) { settingsBuilder.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index ff09ba0e8f6..2fc298048c9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -2091,11 +2091,11 @@ public abstract class ESIntegTestCase extends ESTestCase { assertTrue(Files.exists(dest)); Settings.Builder builder = Settings.builder() .put(settings) - .put("path.data", dataDir.toAbsolutePath()); + .put(Environment.PATH_DATA_SETTING.getKey(), dataDir.toAbsolutePath()); Path configDir = indexDir.resolve("config"); if (Files.exists(configDir)) { - builder.put("path.conf", configDir.toAbsolutePath()); + builder.put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath()); } return builder.build(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 9b06bae21b0..f73839c5cec 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexService; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.node.MockNode; @@ -158,10 +159,10 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { private Node newNode() { Settings settings = Settings.builder() .put(ClusterName.SETTING, InternalTestCluster.clusterName("single-node-cluster", randomLong())) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) // TODO: use a consistent data path for custom paths // This needs to tie into the ESIntegTestCase#indexSettings() method - .put("path.shared_data", createTempDir().getParent()) + .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), createTempDir().getParent()) .put("node.name", nodeName()) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 3777653297e..598b6216ce2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -531,8 +531,8 @@ public abstract class ESTestCase extends LuceneTestCase { public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException { Settings build = Settings.builder() .put(settings) - .put("path.home", createTempDir().toAbsolutePath()) - .putArray("path.data", tmpPaths()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath()) + .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build(); return new NodeEnvironment(build, new Environment(build)); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java index 34b6bfbfb14..0b3facca05d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.plugins.Plugin; @@ -74,7 +75,7 @@ public final class ExternalTestCluster extends TestCluster { .put("name", InternalTestCluster.TRANSPORT_CLIENT_PREFIX + EXTERNAL_CLUSTER_PREFIX + counter.getAndIncrement()) .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // prevents any settings to be replaced by system properties. .put("client.transport.ignore_cluster_name", true) - .put("path.home", tempDir) + .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put("node.mode", "network").build(); // we require network here! TransportClient.Builder transportClientBuilder = TransportClient.builder().settings(clientSettings); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 3b772b4ef33..6fafb52164d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -62,6 +62,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.discovery.DiscoveryService; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.IndexModule; @@ -286,12 +287,12 @@ public final class InternalTestCluster extends TestCluster { for (int i = 0; i < numOfDataPaths; i++) { dataPath.append(baseDir.resolve("d" + i).toAbsolutePath()).append(','); } - builder.put("path.data", dataPath.toString()); + builder.put(Environment.PATH_DATA_SETTING.getKey(), dataPath.toString()); } } - builder.put("path.shared_data", baseDir.resolve("custom")); - builder.put("path.home", baseDir); - builder.put("path.repo", baseDir.resolve("repos")); + builder.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), baseDir.resolve("custom")); + builder.put(Environment.PATH_HOME_SETTING.getKey(), baseDir); + builder.put(Environment.PATH_REPO_SETTING.getKey(), baseDir.resolve("repos")); builder.put("transport.tcp.port", TRANSPORT_BASE_PORT + "-" + (TRANSPORT_BASE_PORT + PORTS_PER_CLUSTER)); builder.put("http.port", HTTP_BASE_PORT + "-" + (HTTP_BASE_PORT + PORTS_PER_CLUSTER)); builder.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true); @@ -594,7 +595,7 @@ public final class InternalTestCluster extends TestCluster { String name = buildNodeName(nodeId); assert !nodes.containsKey(name); Settings finalSettings = settingsBuilder() - .put("path.home", baseDir) // allow overriding path.home + .put(Environment.PATH_HOME_SETTING.getKey(), baseDir) // allow overriding path.home .put(settings) .put("name", name) .put(DiscoveryService.SETTING_DISCOVERY_SEED, seed) @@ -890,7 +891,7 @@ public final class InternalTestCluster extends TestCluster { Settings nodeSettings = node.settings(); Builder builder = settingsBuilder() .put("client.transport.nodes_sampler_interval", "1s") - .put("path.home", baseDir) + .put(Environment.PATH_HOME_SETTING.getKey(), baseDir) .put("name", TRANSPORT_CLIENT_PREFIX + node.settings().get("name")) .put(ClusterName.SETTING, clusterName).put("client.transport.sniff", sniff) .put("node.mode", nodeSettings.get("node.mode", nodeMode)) From 34b2673d42963ac77dbbac8af77d6e404d693c96 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 22 Jan 2016 15:14:13 +0100 Subject: [PATCH 292/347] Convert client.transport settings to new infra --- .../transport/TransportClientNodesService.java | 12 +++++++++--- .../common/settings/ClusterSettings.java | 4 ++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java index 56befbb9b84..99c70255ca8 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; @@ -101,6 +102,11 @@ public class TransportClientNodesService extends AbstractComponent { private volatile boolean closed; + + public static final Setting CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL = Setting.positiveTimeSetting("client.transport.nodes_sampler_interval", timeValueSeconds(5), false, Setting.Scope.CLUSTER); + public static final Setting CLIENT_TRANSPORT_PING_TIMEOUT = Setting.positiveTimeSetting("client.transport.ping_timeout", timeValueSeconds(5), false, Setting.Scope.CLUSTER); + public static final Setting CLIENT_TRANSPORT_IGNORE_CLUSTER_NAME = Setting.boolSetting("client.transport.ignore_cluster_name", false, false, Setting.Scope.CLUSTER); + @Inject public TransportClientNodesService(Settings settings, ClusterName clusterName, TransportService transportService, ThreadPool threadPool, Headers headers, Version version) { @@ -111,9 +117,9 @@ public class TransportClientNodesService extends AbstractComponent { this.minCompatibilityVersion = version.minimumCompatibilityVersion(); this.headers = headers; - this.nodesSamplerInterval = this.settings.getAsTime("client.transport.nodes_sampler_interval", timeValueSeconds(5)); - this.pingTimeout = this.settings.getAsTime("client.transport.ping_timeout", timeValueSeconds(5)).millis(); - this.ignoreClusterName = this.settings.getAsBoolean("client.transport.ignore_cluster_name", false); + this.nodesSamplerInterval = CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL.get(this.settings); + this.pingTimeout = CLIENT_TRANSPORT_PING_TIMEOUT.get(this.settings).millis(); + this.ignoreClusterName = CLIENT_TRANSPORT_IGNORE_CLUSTER_NAME.get(this.settings); if (logger.isDebugEnabled()) { logger.debug("node_sampler_interval[" + nodesSamplerInterval + "]"); diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index e220fcf5de0..b6347a5c79d 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.settings; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.support.DestructiveOperations; +import org.elasticsearch.client.transport.TransportClientNodesService; import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.metadata.MetaData; @@ -100,6 +101,9 @@ public final class ClusterSettings extends AbstractScopedSettings { public static Set> BUILT_IN_CLUSTER_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, + TransportClientNodesService.CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL, // TODO these transport client settings are kind of odd here and should only be valid if we are a transport client + TransportClientNodesService.CLIENT_TRANSPORT_PING_TIMEOUT, + TransportClientNodesService.CLIENT_TRANSPORT_IGNORE_CLUSTER_NAME, AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING, BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, From 296b48b9d1f79982d22cd9f0a69ff09551968643 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Fri, 22 Jan 2016 14:02:40 +0100 Subject: [PATCH 293/347] Move discovery.* settings to new setting infrastructure Closes #16182 --- .../org/elasticsearch/common/Randomness.java | 12 ++--- .../common/settings/ClusterSettings.java | 28 +++++++++++- .../discovery/DiscoveryModule.java | 15 ++++--- .../discovery/DiscoveryService.java | 9 ++-- .../discovery/zen/ZenDiscovery.java | 45 +++++++++---------- .../discovery/zen/fd/FaultDetection.java | 22 ++++----- .../zen/ping/unicast/UnicastZenPing.java | 15 +++---- .../org/elasticsearch/tribe/TribeService.java | 5 ++- .../master/IndexingMasterFailoverIT.java | 4 +- .../cluster/ClusterServiceIT.java | 2 +- .../cluster/MinimumMasterNodesIT.java | 12 ++--- .../elasticsearch/cluster/NoMasterNodeIT.java | 4 +- .../allocation/AwarenessAllocationIT.java | 2 +- .../discovery/DiscoveryModuleTests.java | 6 +-- .../DiscoveryWithServiceDisruptionsIT.java | 6 +-- .../discovery/ZenFaultDetectionTests.java | 8 ++-- .../discovery/zen/ZenDiscoveryIT.java | 8 ++-- .../index/TransportIndexFailuresIT.java | 4 +- .../indices/state/RareClusterStateIT.java | 2 +- .../recovery/FullRollingRestartIT.java | 2 +- .../DedicatedClusterSnapshotRestoreIT.java | 2 +- .../java/org/elasticsearch/tribe/TribeIT.java | 4 +- .../cloud/azure/AzureDiscoveryModule.java | 3 +- .../azure/management/AzureComputeService.java | 11 ++++- .../azure/AzureUnicastHostsProvider.java | 14 ++---- .../discovery/azure/AzureDiscoveryPlugin.java | 7 +++ .../AbstractAzureComputeServiceTestCase.java | 2 +- .../azure/AzureMinimumMasterNodesTests.java | 4 +- .../discovery/azure/AzureSimpleTests.java | 18 ++++---- .../azure/AzureTwoStartedNodesTests.java | 4 +- .../elasticsearch/cloud/aws/Ec2Module.java | 3 +- .../discovery/gce/GceDiscoveryPlugin.java | 2 +- .../azure/AzureSnapshotRestoreTests.java | 3 +- .../elasticsearch/tribe/TribeUnitTests.java | 4 +- .../org/elasticsearch/test/ExternalNode.java | 4 +- .../test/InternalTestCluster.java | 8 ++-- .../ClusterDiscoveryConfiguration.java | 3 +- 37 files changed, 172 insertions(+), 135 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/Randomness.java b/core/src/main/java/org/elasticsearch/common/Randomness.java index 7f71afc1c70..154ebf3736b 100644 --- a/core/src/main/java/org/elasticsearch/common/Randomness.java +++ b/core/src/main/java/org/elasticsearch/common/Randomness.java @@ -19,6 +19,7 @@ package org.elasticsearch.common; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import java.lang.reflect.Method; @@ -40,7 +41,7 @@ import java.util.concurrent.ThreadLocalRandom; * setting a reproducible seed. When running the Elasticsearch server * process, non-reproducible sources of randomness are provided (unless * a setting is provided for a module that exposes a seed setting (e.g., - * DiscoveryService#SETTING_DISCOVERY_SEED)). + * DiscoveryService#DISCOVERY_SEED_SETTING)). */ public final class Randomness { private static final Method currentMethod; @@ -68,13 +69,12 @@ public final class Randomness { * seed in the settings with the key setting. * * @param settings the settings containing the seed - * @param setting the key to access the seed + * @param setting the setting to access the seed * @return a reproducible source of randomness */ - public static Random get(Settings settings, String setting) { - Long maybeSeed = settings.getAsLong(setting, null); - if (maybeSeed != null) { - return new Random(maybeSeed); + public static Random get(Settings settings, Setting setting) { + if (setting.exists(settings)) { + return new Random(setting.get(settings)); } else { return get(); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 25058ea25fa..93afc3bb95a 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -36,11 +36,15 @@ import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAl import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.logging.ESLoggerFactory; +import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.discovery.zen.fd.FaultDetection; +import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing; import org.elasticsearch.gateway.PrimaryShardAllocator; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.store.IndexStoreConfig; @@ -194,6 +198,26 @@ public final class ClusterSettings extends AbstractScopedSettings { Environment.PATH_REPO_SETTING, Environment.PATH_SCRIPTS_SETTING, Environment.PATH_SHARED_DATA_SETTING, - Environment.PIDFILE_SETTING - ))); + Environment.PIDFILE_SETTING, + DiscoveryService.DISCOVERY_SEED_SETTING, + DiscoveryService.INITIAL_STATE_TIMEOUT_SETTING, + DiscoveryModule.DISCOVERY_TYPE_SETTING, + DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING, + FaultDetection.PING_RETRIES_SETTING, + FaultDetection.PING_TIMEOUT_SETTING, + FaultDetection.REGISTER_CONNECTION_LISTENER_SETTING, + FaultDetection.PING_INTERVAL_SETTING, + FaultDetection.CONNECT_ON_NETWORK_DISCONNECT_SETTING, + ZenDiscovery.PING_TIMEOUT_SETTING, + ZenDiscovery.JOIN_TIMEOUT_SETTING, + ZenDiscovery.JOIN_RETRY_ATTEMPTS_SETTING, + ZenDiscovery.JOIN_RETRY_DELAY_SETTING, + ZenDiscovery.MAX_PINGS_FROM_ANOTHER_MASTER_SETTING, + ZenDiscovery.SEND_LEAVE_REQUEST_SETTING, + ZenDiscovery.MASTER_ELECTION_FILTER_CLIENT_SETTING, + ZenDiscovery.MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING, + ZenDiscovery.MASTER_ELECTION_FILTER_DATA_SETTING, + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING, + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING + ))); } diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java index 1ab608761fb..b51339aac90 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java @@ -22,6 +22,7 @@ package org.elasticsearch.discovery; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.Multibinder; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.discovery.local.LocalDiscovery; @@ -36,14 +37,17 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Function; /** * A module for loading classes for node discovery. */ public class DiscoveryModule extends AbstractModule { - public static final String DISCOVERY_TYPE_KEY = "discovery.type"; - public static final String ZEN_MASTER_SERVICE_TYPE_KEY = "discovery.zen.masterservice.type"; + public static final Setting DISCOVERY_TYPE_SETTING = new Setting<>("discovery.type", + settings -> DiscoveryNode.localNode(settings) ? "local" : "zen", Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting ZEN_MASTER_SERVICE_TYPE_SETTING = new Setting<>("discovery.zen.masterservice.type", + "zen", Function.identity(), false, Setting.Scope.CLUSTER); private final Settings settings; private final List> unicastHostProviders = new ArrayList<>(); @@ -93,15 +97,14 @@ public class DiscoveryModule extends AbstractModule { @Override protected void configure() { - String defaultType = DiscoveryNode.localNode(settings) ? "local" : "zen"; - String discoveryType = settings.get(DISCOVERY_TYPE_KEY, defaultType); + String discoveryType = DISCOVERY_TYPE_SETTING.get(settings); Class discoveryClass = discoveryTypes.get(discoveryType); if (discoveryClass == null) { throw new IllegalArgumentException("Unknown Discovery type [" + discoveryType + "]"); } if (discoveryType.equals("local") == false) { - String masterServiceTypeKey = settings.get(ZEN_MASTER_SERVICE_TYPE_KEY, "zen"); + String masterServiceTypeKey = ZEN_MASTER_SERVICE_TYPE_SETTING.get(settings); final Class masterService = masterServiceType.get(masterServiceTypeKey); if (masterService == null) { throw new IllegalArgumentException("Unknown master service type [" + masterServiceTypeKey + "]"); @@ -121,4 +124,4 @@ public class DiscoveryModule extends AbstractModule { bind(Discovery.class).to(discoveryClass).asEagerSingleton(); bind(DiscoveryService.class).asEagerSingleton(); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java index a82099658ea..22f68a738c7 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -39,8 +40,8 @@ import java.util.concurrent.TimeUnit; */ public class DiscoveryService extends AbstractLifecycleComponent { - public static final String SETTING_INITIAL_STATE_TIMEOUT = "discovery.initial_state_timeout"; - public static final String SETTING_DISCOVERY_SEED = "discovery.id.seed"; + public static final Setting INITIAL_STATE_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.initial_state_timeout", TimeValue.timeValueSeconds(30), false, Setting.Scope.CLUSTER); + public static final Setting DISCOVERY_SEED_SETTING = Setting.longSetting("discovery.id.seed", 0l, Long.MIN_VALUE, false, Setting.Scope.CLUSTER); private static class InitialStateListener implements InitialStateDiscoveryListener { @@ -71,7 +72,7 @@ public class DiscoveryService extends AbstractLifecycleComponent implements Discovery, PingContextProvider { public final static Setting REJOIN_ON_MASTER_GONE_SETTING = Setting.boolSetting("discovery.zen.rejoin_on_master_gone", true, true, Setting.Scope.CLUSTER); - public final static String SETTING_PING_TIMEOUT = "discovery.zen.ping_timeout"; - public final static String SETTING_JOIN_TIMEOUT = "discovery.zen.join_timeout"; - public final static String SETTING_JOIN_RETRY_ATTEMPTS = "discovery.zen.join_retry_attempts"; - public final static String SETTING_JOIN_RETRY_DELAY = "discovery.zen.join_retry_delay"; - public final static String SETTING_MAX_PINGS_FROM_ANOTHER_MASTER = "discovery.zen.max_pings_from_another_master"; - public final static String SETTING_SEND_LEAVE_REQUEST = "discovery.zen.send_leave_request"; - public final static String SETTING_MASTER_ELECTION_FILTER_CLIENT = "discovery.zen.master_election.filter_client"; - public final static String SETTING_MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT = "discovery.zen.master_election.wait_for_joins_timeout"; - public final static String SETTING_MASTER_ELECTION_FILTER_DATA = "discovery.zen.master_election.filter_data"; + public final static Setting PING_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.zen.ping_timeout", timeValueSeconds(3), false, Setting.Scope.CLUSTER); + public final static Setting JOIN_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.join_timeout", + settings -> TimeValue.timeValueMillis(PING_TIMEOUT_SETTING.get(settings).millis() * 20).toString(), TimeValue.timeValueMillis(0), false, Setting.Scope.CLUSTER); + public final static Setting JOIN_RETRY_ATTEMPTS_SETTING = Setting.intSetting("discovery.zen.join_retry_attempts", 3, 1, false, Setting.Scope.CLUSTER); + public final static Setting JOIN_RETRY_DELAY_SETTING = Setting.positiveTimeSetting("discovery.zen.join_retry_delay", TimeValue.timeValueMillis(100), false, Setting.Scope.CLUSTER); + public final static Setting MAX_PINGS_FROM_ANOTHER_MASTER_SETTING = Setting.intSetting("discovery.zen.max_pings_from_another_master", 3, 1, false, Setting.Scope.CLUSTER); + public final static Setting SEND_LEAVE_REQUEST_SETTING = Setting.boolSetting("discovery.zen.send_leave_request", true, false, Setting.Scope.CLUSTER); + public final static Setting MASTER_ELECTION_FILTER_CLIENT_SETTING = Setting.boolSetting("discovery.zen.master_election.filter_client", true, false, Setting.Scope.CLUSTER); + public final static Setting MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.master_election.wait_for_joins_timeout", + settings -> TimeValue.timeValueMillis(JOIN_TIMEOUT_SETTING.get(settings).millis() / 2).toString(), TimeValue.timeValueMillis(0), false, Setting.Scope.CLUSTER); + public final static Setting MASTER_ELECTION_FILTER_DATA_SETTING = Setting.boolSetting("discovery.zen.master_election.filter_data", false, false, Setting.Scope.CLUSTER); public static final String DISCOVERY_REJOIN_ACTION_NAME = "internal:discovery/zen/rejoin"; @@ -164,26 +166,19 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen this.discoverySettings = discoverySettings; this.pingService = pingService; this.electMaster = electMasterService; - this.pingTimeout = settings.getAsTime(SETTING_PING_TIMEOUT, timeValueSeconds(3)); + this.pingTimeout = PING_TIMEOUT_SETTING.get(settings); - this.joinTimeout = settings.getAsTime(SETTING_JOIN_TIMEOUT, TimeValue.timeValueMillis(this.pingTimeout.millis() * 20)); - this.joinRetryAttempts = settings.getAsInt(SETTING_JOIN_RETRY_ATTEMPTS, 3); - this.joinRetryDelay = settings.getAsTime(SETTING_JOIN_RETRY_DELAY, TimeValue.timeValueMillis(100)); - this.maxPingsFromAnotherMaster = settings.getAsInt(SETTING_MAX_PINGS_FROM_ANOTHER_MASTER, 3); - this.sendLeaveRequest = settings.getAsBoolean(SETTING_SEND_LEAVE_REQUEST, true); + this.joinTimeout = JOIN_TIMEOUT_SETTING.get(settings); + this.joinRetryAttempts = JOIN_RETRY_ATTEMPTS_SETTING.get(settings); + this.joinRetryDelay = JOIN_RETRY_DELAY_SETTING.get(settings); + this.maxPingsFromAnotherMaster = MAX_PINGS_FROM_ANOTHER_MASTER_SETTING.get(settings); + this.sendLeaveRequest = SEND_LEAVE_REQUEST_SETTING.get(settings); - this.masterElectionFilterClientNodes = settings.getAsBoolean(SETTING_MASTER_ELECTION_FILTER_CLIENT, true); - this.masterElectionFilterDataNodes = settings.getAsBoolean(SETTING_MASTER_ELECTION_FILTER_DATA, false); - this.masterElectionWaitForJoinsTimeout = settings.getAsTime(SETTING_MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT, TimeValue.timeValueMillis(joinTimeout.millis() / 2)); + this.masterElectionFilterClientNodes = MASTER_ELECTION_FILTER_CLIENT_SETTING.get(settings); + this.masterElectionFilterDataNodes = MASTER_ELECTION_FILTER_DATA_SETTING.get(settings); + this.masterElectionWaitForJoinsTimeout = MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING.get(settings); this.rejoinOnMasterGone = REJOIN_ON_MASTER_GONE_SETTING.get(settings); - if (this.joinRetryAttempts < 1) { - throw new IllegalArgumentException("'" + SETTING_JOIN_RETRY_ATTEMPTS + "' must be a positive number. got [" + SETTING_JOIN_RETRY_ATTEMPTS + "]"); - } - if (this.maxPingsFromAnotherMaster < 1) { - throw new IllegalArgumentException("'" + SETTING_MAX_PINGS_FROM_ANOTHER_MASTER + "' must be a positive number. got [" + this.maxPingsFromAnotherMaster + "]"); - } - logger.debug("using ping_timeout [{}], join.timeout [{}], master_election.filter_client [{}], master_election.filter_data [{}]", this.pingTimeout, joinTimeout, masterElectionFilterClientNodes, masterElectionFilterDataNodes); clusterSettings.addSettingsUpdateConsumer(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, this::handleMinimumMasterNodesChanged, (value) -> { diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java index 436ef6bc2b5..62b0250315c 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java @@ -21,6 +21,8 @@ package org.elasticsearch.discovery.zen.fd; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Scope; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool; @@ -35,11 +37,11 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; */ public abstract class FaultDetection extends AbstractComponent { - public static final String SETTING_CONNECT_ON_NETWORK_DISCONNECT = "discovery.zen.fd.connect_on_network_disconnect"; - public static final String SETTING_PING_INTERVAL = "discovery.zen.fd.ping_interval"; - public static final String SETTING_PING_TIMEOUT = "discovery.zen.fd.ping_timeout"; - public static final String SETTING_PING_RETRIES = "discovery.zen.fd.ping_retries"; - public static final String SETTING_REGISTER_CONNECTION_LISTENER = "discovery.zen.fd.register_connection_listener"; + public static final Setting CONNECT_ON_NETWORK_DISCONNECT_SETTING = Setting.boolSetting("discovery.zen.fd.connect_on_network_disconnect", false, false, Scope.CLUSTER); + public static final Setting PING_INTERVAL_SETTING = Setting.positiveTimeSetting("discovery.zen.fd.ping_interval", timeValueSeconds(1), false, Scope.CLUSTER); + public static final Setting PING_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.fd.ping_timeout", timeValueSeconds(30), false, Scope.CLUSTER); + public static final Setting PING_RETRIES_SETTING = Setting.intSetting("discovery.zen.fd.ping_retries", 3, false, Scope.CLUSTER); + public static final Setting REGISTER_CONNECTION_LISTENER_SETTING = Setting.boolSetting("discovery.zen.fd.register_connection_listener", true, false, Scope.CLUSTER); protected final ThreadPool threadPool; protected final ClusterName clusterName; @@ -60,11 +62,11 @@ public abstract class FaultDetection extends AbstractComponent { this.transportService = transportService; this.clusterName = clusterName; - this.connectOnNetworkDisconnect = settings.getAsBoolean(SETTING_CONNECT_ON_NETWORK_DISCONNECT, false); - this.pingInterval = settings.getAsTime(SETTING_PING_INTERVAL, timeValueSeconds(1)); - this.pingRetryTimeout = settings.getAsTime(SETTING_PING_TIMEOUT, timeValueSeconds(30)); - this.pingRetryCount = settings.getAsInt(SETTING_PING_RETRIES, 3); - this.registerConnectionListener = settings.getAsBoolean(SETTING_REGISTER_CONNECTION_LISTENER, true); + this.connectOnNetworkDisconnect = CONNECT_ON_NETWORK_DISCONNECT_SETTING.get(settings); + this.pingInterval = PING_INTERVAL_SETTING.get(settings); + this.pingRetryTimeout = PING_TIMEOUT_SETTING.get(settings); + this.pingRetryCount = PING_RETRIES_SETTING.get(settings); + this.registerConnectionListener = REGISTER_CONNECTION_LISTENER_SETTING.get(settings); this.connectionListener = new FDConnectionListener(); if (registerConnectionListener) { diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java index 99feb4b7f72..a9960272d0e 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; @@ -58,6 +59,7 @@ import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -72,6 +74,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; import static org.elasticsearch.common.unit.TimeValue.readTimeValue; import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap; @@ -83,7 +86,8 @@ import static org.elasticsearch.discovery.zen.ping.ZenPing.PingResponse.readPing public class UnicastZenPing extends AbstractLifecycleComponent implements ZenPing { public static final String ACTION_NAME = "internal:discovery/zen/unicast"; - public static final String DISCOVERY_ZEN_PING_UNICAST_HOSTS = "discovery.zen.ping.unicast.hosts"; + public static final Setting> DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING = Setting.listSetting("discovery.zen.ping.unicast.hosts", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING = Setting.intSetting("discovery.zen.ping.unicast.concurrent_connects", 10, 0, false, Setting.Scope.CLUSTER); // these limits are per-address public static final int LIMIT_FOREIGN_PORTS_COUNT = 1; @@ -135,13 +139,8 @@ public class UnicastZenPing extends AbstractLifecycleComponent implemen } } - this.concurrentConnects = this.settings.getAsInt("discovery.zen.ping.unicast.concurrent_connects", 10); - String[] hostArr = this.settings.getAsArray(DISCOVERY_ZEN_PING_UNICAST_HOSTS); - // trim the hosts - for (int i = 0; i < hostArr.length; i++) { - hostArr[i] = hostArr[i].trim(); - } - List hosts = CollectionUtils.arrayAsArrayList(hostArr); + this.concurrentConnects = DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING.get(settings); + List hosts = DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.get(settings); final int limitPortCounts; if (hosts.isEmpty()) { // if unicast hosts are not specified, fill with simple defaults on the local machine diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index 1a89bc2f918..48f04e3690e 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; @@ -101,8 +102,8 @@ public class TribeService extends AbstractLifecycleComponent { // its a tribe configured node..., force settings Settings.Builder sb = Settings.builder().put(settings); sb.put("node.client", true); // this node should just act as a node client - sb.put("discovery.type", "local"); // a tribe node should not use zen discovery - sb.put("discovery.initial_state_timeout", 0); // nothing is going to be discovered, since no master will be elected + sb.put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local"); // a tribe node should not use zen discovery + sb.put(DiscoveryService.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0); // nothing is going to be discovered, since no master will be elected if (sb.get("cluster.name") == null) { sb.put("cluster.name", "tribe_" + Strings.randomBase64UUID()); // make sure it won't join other tribe nodes in the same JVM } diff --git a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java index f20e54050c6..6d0ae3d2e76 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java @@ -60,8 +60,8 @@ public class IndexingMasterFailoverIT extends ESIntegTestCase { logger.info("--> start 4 nodes, 3 master, 1 data"); final Settings sharedSettings = Settings.builder() - .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // for hitting simulated network failures quickly - .put(FaultDetection.SETTING_PING_RETRIES, "1") // for hitting simulated network failures quickly + .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") // for hitting simulated network failures quickly + .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") // for hitting simulated network failures quickly .put("discovery.zen.join_timeout", "10s") // still long to induce failures but to long so test won't time out .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2) diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java index 2d781c866de..72d58f7f70e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java @@ -642,7 +642,7 @@ public class ClusterServiceIT extends ESIntegTestCase { Settings settings = settingsBuilder() .put("discovery.type", "zen") .put("discovery.zen.minimum_master_nodes", 1) - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "400ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "400ms") .put("discovery.initial_state_timeout", "500ms") .build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 2d726d97424..612f910d3fa 100644 --- a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -77,7 +77,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { Settings settings = settingsBuilder() .put("discovery.type", "zen") .put("discovery.zen.minimum_master_nodes", 2) - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms") .put("discovery.initial_state_timeout", "500ms") .build(); @@ -189,7 +189,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { Settings settings = settingsBuilder() .put("discovery.type", "zen") .put("discovery.zen.minimum_master_nodes", 3) - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "1s") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "1s") .put("discovery.initial_state_timeout", "500ms") .build(); @@ -264,7 +264,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { public void testDynamicUpdateMinimumMasterNodes() throws Exception { Settings settings = settingsBuilder() .put("discovery.type", "zen") - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "400ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "400ms") .put("discovery.initial_state_timeout", "500ms") .build(); @@ -322,7 +322,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { int nodeCount = scaledRandomIntBetween(1, 5); Settings.Builder settings = settingsBuilder() .put("discovery.type", "zen") - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms") .put("discovery.initial_state_timeout", "500ms"); // set an initial value which is at least quorum to avoid split brains during initial startup @@ -361,8 +361,8 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { public void testCanNotPublishWithoutMinMastNodes() throws Exception { Settings settings = settingsBuilder() .put("discovery.type", "zen") - .put(FaultDetection.SETTING_PING_TIMEOUT, "1h") // disable it - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") + .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1h") // disable it + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms") .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2) .put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "100ms") // speed things up .build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java index 8e5479d6f84..3c71f5f5e2b 100644 --- a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java @@ -65,7 +65,7 @@ public class NoMasterNodeIT extends ESIntegTestCase { .put("discovery.type", "zen") .put("action.auto_create_index", autoCreateIndex) .put("discovery.zen.minimum_master_nodes", 2) - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms") .put("discovery.initial_state_timeout", "500ms") .put(DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "all") .build(); @@ -217,7 +217,7 @@ public class NoMasterNodeIT extends ESIntegTestCase { .put("discovery.type", "zen") .put("action.auto_create_index", false) .put("discovery.zen.minimum_master_nodes", 2) - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms") .put("discovery.initial_state_timeout", "500ms") .put(DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "write") .build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java index 1e9c25ed78c..cf948366f6a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java @@ -108,7 +108,7 @@ public class AwarenessAllocationIT extends ESIntegTestCase { .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING.getKey() + "zone.values", "a,b") .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.getKey(), "zone") .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 3) - .put(ZenDiscovery.SETTING_JOIN_TIMEOUT, "10s") + .put(ZenDiscovery.JOIN_TIMEOUT_SETTING.getKey(), "10s") .build(); logger.info("--> starting 4 nodes on different zones"); diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java index 2a1b146da92..2b3f918d545 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java @@ -46,7 +46,7 @@ public class DiscoveryModuleTests extends ModuleTestCase { public void testRegisterMasterElectionService() { Settings settings = Settings.builder().put("node.local", false). - put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_KEY, "custom").build(); + put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING.getKey(), "custom").build(); DiscoveryModule module = new DiscoveryModule(settings); module.addElectMasterService("custom", DummyMasterElectionService.class); assertBinding(module, ElectMasterService.class, DummyMasterElectionService.class); @@ -55,7 +55,7 @@ public class DiscoveryModuleTests extends ModuleTestCase { public void testLoadUnregisteredMasterElectionService() { Settings settings = Settings.builder().put("node.local", false). - put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_KEY, "foobar").build(); + put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING.getKey(), "foobar").build(); DiscoveryModule module = new DiscoveryModule(settings); module.addElectMasterService("custom", DummyMasterElectionService.class); assertBindingFailure(module, "Unknown master service type [foobar]"); @@ -71,7 +71,7 @@ public class DiscoveryModuleTests extends ModuleTestCase { public void testRegisterDiscovery() { boolean local = randomBoolean(); Settings settings = Settings.builder().put("node.local", local). - put(DiscoveryModule.DISCOVERY_TYPE_KEY, "custom").build(); + put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "custom").build(); DiscoveryModule module = new DiscoveryModule(settings); module.addDiscoveryType("custom", DummyDisco.class); assertBinding(module, Discovery.class, DummyDisco.class); diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index 268d1064f64..f77bea8a27d 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -164,8 +164,8 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { } final static Settings DEFAULT_SETTINGS = Settings.builder() - .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // for hitting simulated network failures quickly - .put(FaultDetection.SETTING_PING_RETRIES, "1") // for hitting simulated network failures quickly + .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") // for hitting simulated network failures quickly + .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") // for hitting simulated network failures quickly .put("discovery.zen.join_timeout", "10s") // still long to induce failures but to long so test won't time out .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly .put("http.enabled", false) // just to make test quicker @@ -962,7 +962,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { // don't wait for initial state, wat want to add the disruption while the cluster is forming.. internalCluster().startNodesAsync(3, Settings.builder() - .put(DiscoveryService.SETTING_INITIAL_STATE_TIMEOUT, "1ms") + .put(DiscoveryService.INITIAL_STATE_TIMEOUT_SETTING.getKey(), "1ms") .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "3s") .build()).get(); diff --git a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java index e7a10b0f62b..e3279d2839c 100644 --- a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java @@ -131,8 +131,8 @@ public class ZenFaultDetectionTests extends ESTestCase { Settings.Builder settings = Settings.builder(); boolean shouldRetry = randomBoolean(); // make sure we don't ping again after the initial ping - settings.put(FaultDetection.SETTING_CONNECT_ON_NETWORK_DISCONNECT, shouldRetry) - .put(FaultDetection.SETTING_PING_INTERVAL, "5m"); + settings.put(FaultDetection.CONNECT_ON_NETWORK_DISCONNECT_SETTING.getKey(), shouldRetry) + .put(FaultDetection.PING_INTERVAL_SETTING.getKey(), "5m"); ClusterState clusterState = ClusterState.builder(new ClusterName("test")).nodes(buildNodesForA(true)).build(); NodesFaultDetection nodesFDA = new NodesFaultDetection(settings.build(), threadPool, serviceA, clusterState.getClusterName()); nodesFDA.setLocalNode(nodeA); @@ -179,8 +179,8 @@ public class ZenFaultDetectionTests extends ESTestCase { Settings.Builder settings = Settings.builder(); boolean shouldRetry = randomBoolean(); // make sure we don't ping - settings.put(FaultDetection.SETTING_CONNECT_ON_NETWORK_DISCONNECT, shouldRetry) - .put(FaultDetection.SETTING_PING_INTERVAL, "5m"); + settings.put(FaultDetection.CONNECT_ON_NETWORK_DISCONNECT_SETTING.getKey(), shouldRetry) + .put(FaultDetection.PING_INTERVAL_SETTING.getKey(), "5m"); ClusterName clusterName = new ClusterName(randomAsciiOfLengthBetween(3, 20)); final ClusterState state = ClusterState.builder(clusterName).nodes(buildNodesForA(false)).build(); MasterFaultDetection masterFD = new MasterFaultDetection(settings.build(), threadPool, serviceA, clusterName, diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index 9f9c0420c2f..eb17ab29d33 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -95,8 +95,8 @@ public class ZenDiscoveryIT extends ESIntegTestCase { public void testNoShardRelocationsOccurWhenElectedMasterNodeFails() throws Exception { Settings defaultSettings = Settings.builder() - .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") - .put(FaultDetection.SETTING_PING_RETRIES, "1") + .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") + .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") .put("discovery.type", "zen") .build(); @@ -142,8 +142,8 @@ public class ZenDiscoveryIT extends ESIntegTestCase { @TestLogging(value = "action.admin.cluster.health:TRACE") public void testNodeFailuresAreProcessedOnce() throws ExecutionException, InterruptedException, IOException { Settings defaultSettings = Settings.builder() - .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") - .put(FaultDetection.SETTING_PING_RETRIES, "1") + .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") + .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") .put("discovery.type", "zen") .build(); diff --git a/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java b/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java index 1f083466896..9dfeb4438ad 100644 --- a/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java +++ b/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java @@ -53,8 +53,8 @@ public class TransportIndexFailuresIT extends ESIntegTestCase { private static final Settings nodeSettings = Settings.settingsBuilder() .put("discovery.type", "zen") // <-- To override the local setting if set externally - .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly - .put(FaultDetection.SETTING_PING_RETRIES, "1") // <-- for hitting simulated network failures quickly + .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly + .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") // <-- for hitting simulated network failures quickly .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly .put("discovery.zen.minimum_master_nodes", 1) .build(); diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index 4bf752886c9..65a4d5ab76d 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -171,7 +171,7 @@ public class RareClusterStateIT extends ESIntegTestCase { @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/14932") public void testDeleteCreateInOneBulk() throws Exception { internalCluster().startNodesAsync(2, Settings.builder() - .put(DiscoveryModule.DISCOVERY_TYPE_KEY, "zen") + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen") .build()).get(); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("2").get().isTimedOut()); prepareCreate("test").setSettings(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, true).addMapping("type").get(); diff --git a/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java b/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java index 3b61edf2923..663d951a087 100644 --- a/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java @@ -57,7 +57,7 @@ public class FullRollingRestartIT extends ESIntegTestCase { } public void testFullRollingRestart() throws Exception { - Settings settings = Settings.builder().put(ZenDiscovery.SETTING_JOIN_TIMEOUT, "30s").build(); + Settings settings = Settings.builder().put(ZenDiscovery.JOIN_TIMEOUT_SETTING.getKey(), "30s").build(); internalCluster().startNode(settings); createIndex("test"); diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index ad554b7628b..22bef614bf6 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -100,7 +100,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest logger.info("--> start 2 nodes"); Settings nodeSettings = settingsBuilder() .put("discovery.type", "zen") - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms") .put("discovery.initial_state_timeout", "500ms") .build(); internalCluster().startNode(nodeSettings); diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java index da01ca935f4..e2ea4ee05b5 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java @@ -135,8 +135,8 @@ public class TribeIT extends ESIntegTestCase { tribe2Defaults.put("tribe.t2." + entry.getKey(), entry.getValue()); } // give each tribe it's unicast hosts to connect to - tribe1Defaults.putArray("tribe.t1." + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS, getUnicastHosts(internalCluster().client())); - tribe1Defaults.putArray("tribe.t2." + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS, getUnicastHosts(cluster2.client())); + tribe1Defaults.putArray("tribe.t1." + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.getKey(), getUnicastHosts(internalCluster().client())); + tribe1Defaults.putArray("tribe.t2." + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.getKey(), getUnicastHosts(cluster2.client())); Settings merged = Settings.builder() .put("tribe.t1.cluster.name", internalCluster().getClusterName()) diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureDiscoveryModule.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureDiscoveryModule.java index 5215b90e7e1..d48eed9e507 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureDiscoveryModule.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureDiscoveryModule.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.azure.AzureDiscovery; /** @@ -73,7 +74,7 @@ public class AzureDiscoveryModule extends AbstractModule { */ public static boolean isDiscoveryReady(Settings settings, ESLogger logger) { // User set discovery.type: azure - if (!AzureDiscovery.AZURE.equalsIgnoreCase(settings.get("discovery.type"))) { + if (!AzureDiscovery.AZURE.equalsIgnoreCase(DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings))) { logger.trace("discovery.type not set to {}", AzureDiscovery.AZURE); return false; } diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java index c79a7450929..de2343d9d87 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java @@ -20,6 +20,11 @@ package org.elasticsearch.cloud.azure.management; import com.microsoft.windowsazure.management.compute.models.HostedServiceGetDetailedResponse; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.discovery.azure.AzureUnicastHostsProvider; + +import java.util.Locale; /** * @@ -39,9 +44,11 @@ public interface AzureComputeService { } static public final class Discovery { - public static final String REFRESH = "discovery.azure.refresh_interval"; + public static final Setting REFRESH_SETTING = Setting.positiveTimeSetting("discovery.azure.refresh_interval", TimeValue.timeValueSeconds(0), false, Setting.Scope.CLUSTER); + + public static final Setting HOST_TYPE_SETTING = new Setting<>("discovery.azure.host.type", + AzureUnicastHostsProvider.HostType.PRIVATE_IP.name(), AzureUnicastHostsProvider.HostType::fromString, false, Setting.Scope.CLUSTER); - public static final String HOST_TYPE = "discovery.azure.host.type"; public static final String ENDPOINT_NAME = "discovery.azure.endpoint.name"; public static final String DEPLOYMENT_NAME = "discovery.azure.deployment.name"; public static final String DEPLOYMENT_SLOT = "discovery.azure.deployment.slot"; diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java index 690ab623bd9..aac167fc673 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java @@ -45,7 +45,6 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.List; -import java.util.Locale; /** * @@ -68,7 +67,7 @@ public class AzureUnicastHostsProvider extends AbstractComponent implements Unic return hostType; } } - return null; + throw new IllegalArgumentException("invalid value for host type [" + type + "]"); } } @@ -118,16 +117,9 @@ public class AzureUnicastHostsProvider extends AbstractComponent implements Unic this.networkService = networkService; this.version = version; - this.refreshInterval = settings.getAsTime(Discovery.REFRESH, TimeValue.timeValueSeconds(0)); + this.refreshInterval = Discovery.REFRESH_SETTING.get(settings); - String strHostType = settings.get(Discovery.HOST_TYPE, HostType.PRIVATE_IP.name()).toUpperCase(Locale.ROOT); - HostType tmpHostType = HostType.fromString(strHostType); - if (tmpHostType == null) { - logger.warn("wrong value for [{}]: [{}]. falling back to [{}]...", Discovery.HOST_TYPE, - strHostType, HostType.PRIVATE_IP.name().toLowerCase(Locale.ROOT)); - tmpHostType = HostType.PRIVATE_IP; - } - this.hostType = tmpHostType; + this.hostType = Discovery.HOST_TYPE_SETTING.get(settings); this.publicEndpointName = settings.get(Discovery.ENDPOINT_NAME, "elasticsearch"); // Deployment name could be set with discovery.azure.deployment.name diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/plugin/discovery/azure/AzureDiscoveryPlugin.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/plugin/discovery/azure/AzureDiscoveryPlugin.java index e61a82a6ceb..418bd1291ed 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/plugin/discovery/azure/AzureDiscoveryPlugin.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/plugin/discovery/azure/AzureDiscoveryPlugin.java @@ -20,10 +20,12 @@ package org.elasticsearch.plugin.discovery.azure; import org.elasticsearch.cloud.azure.AzureDiscoveryModule; +import org.elasticsearch.cloud.azure.management.AzureComputeService; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.azure.AzureDiscovery; import org.elasticsearch.discovery.azure.AzureUnicastHostsProvider; @@ -66,4 +68,9 @@ public class AzureDiscoveryPlugin extends Plugin { discoveryModule.addUnicastHostProvider(AzureUnicastHostsProvider.class); } } + + public void onModule(SettingsModule settingsModule) { + settingsModule.registerSetting(AzureComputeService.Discovery.REFRESH_SETTING); + settingsModule.registerSetting(AzureComputeService.Discovery.HOST_TYPE_SETTING); + } } diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java index bf1ba94d309..9babfb4c2ad 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java @@ -48,7 +48,7 @@ public abstract class AbstractAzureComputeServiceTestCase extends ESIntegTestCas // We add a fake subscription_id to start mock compute service builder.put(Management.SUBSCRIPTION_ID, "fake") - .put(Discovery.REFRESH, "5s") + .put(Discovery.REFRESH_SETTING.getKey(), "5s") .put(Management.KEYSTORE_PATH, "dummy") .put(Management.KEYSTORE_PASSWORD, "dummy") .put(Management.SERVICE_NAME, "dummy"); diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java index 19d6d038e8d..0f2d96573ef 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java @@ -53,8 +53,8 @@ public class AzureMinimumMasterNodesTests extends AbstractAzureComputeServiceTes .put(super.nodeSettings(nodeOrdinal)) .put("discovery.zen.minimum_master_nodes", 2) // Make the test run faster - .put(ZenDiscovery.SETTING_JOIN_TIMEOUT, "50ms") - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "10ms") + .put(ZenDiscovery.JOIN_TIMEOUT_SETTING.getKey(), "50ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "10ms") .put("discovery.initial_state_timeout", "100ms"); return builder.build(); } diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureSimpleTests.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureSimpleTests.java index cc4021fb78c..6a6ef0944ce 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureSimpleTests.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureSimpleTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.cloud.azure.management.AzureComputeService.Management; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, @@ -40,7 +41,7 @@ public class AzureSimpleTests extends AbstractAzureComputeServiceTestCase { public void testOneNodeDhouldRunUsingPrivateIp() { Settings.Builder settings = Settings.settingsBuilder() .put(Management.SERVICE_NAME, "dummy") - .put(Discovery.HOST_TYPE, "private_ip"); + .put(Discovery.HOST_TYPE_SETTING.getKey(), "private_ip"); logger.info("--> start one node"); internalCluster().startNode(settings); @@ -53,7 +54,7 @@ public class AzureSimpleTests extends AbstractAzureComputeServiceTestCase { public void testOneNodeShouldRunUsingPublicIp() { Settings.Builder settings = Settings.settingsBuilder() .put(Management.SERVICE_NAME, "dummy") - .put(Discovery.HOST_TYPE, "public_ip"); + .put(Discovery.HOST_TYPE_SETTING.getKey(), "public_ip"); logger.info("--> start one node"); internalCluster().startNode(settings); @@ -66,13 +67,14 @@ public class AzureSimpleTests extends AbstractAzureComputeServiceTestCase { public void testOneNodeShouldRunUsingWrongSettings() { Settings.Builder settings = Settings.settingsBuilder() .put(Management.SERVICE_NAME, "dummy") - .put(Discovery.HOST_TYPE, "do_not_exist"); + .put(Discovery.HOST_TYPE_SETTING.getKey(), "do_not_exist"); logger.info("--> start one node"); - internalCluster().startNode(settings); - assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").execute().actionGet().getState().nodes().masterNodeId(), notNullValue()); - - // We expect having 1 node as part of the cluster, let's test that - checkNumberOfNodes(1); + try { + internalCluster().startNode(settings); + fail("Expected IllegalArgumentException on startup"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("invalid value for host type [do_not_exist]")); + } } } diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureTwoStartedNodesTests.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureTwoStartedNodesTests.java index 2d134d0cc83..880c05ed121 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureTwoStartedNodesTests.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureTwoStartedNodesTests.java @@ -42,7 +42,7 @@ public class AzureTwoStartedNodesTests extends AbstractAzureComputeServiceTestCa public void testTwoNodesShouldRunUsingPrivateIp() { Settings.Builder settings = Settings.settingsBuilder() .put(Management.SERVICE_NAME, "dummy") - .put(Discovery.HOST_TYPE, "private_ip"); + .put(Discovery.HOST_TYPE_SETTING.getKey(), "private_ip"); logger.info("--> start first node"); internalCluster().startNode(settings); @@ -60,7 +60,7 @@ public class AzureTwoStartedNodesTests extends AbstractAzureComputeServiceTestCa public void testTwoNodesShouldRunUsingPublicIp() { Settings.Builder settings = Settings.settingsBuilder() .put(Management.SERVICE_NAME, "dummy") - .put(Discovery.HOST_TYPE, "public_ip"); + .put(Discovery.HOST_TYPE_SETTING.getKey(), "public_ip"); logger.info("--> start first node"); internalCluster().startNode(settings); diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/Ec2Module.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/Ec2Module.java index 09a0116fc6d..4aac319cc8e 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/Ec2Module.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/Ec2Module.java @@ -22,6 +22,7 @@ package org.elasticsearch.cloud.aws; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.ec2.Ec2Discovery; public class Ec2Module extends AbstractModule { @@ -37,7 +38,7 @@ public class Ec2Module extends AbstractModule { */ public static boolean isEc2DiscoveryActive(Settings settings, ESLogger logger) { // User set discovery.type: ec2 - if (!Ec2Discovery.EC2.equalsIgnoreCase(settings.get("discovery.type"))) { + if (!Ec2Discovery.EC2.equalsIgnoreCase(DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings))) { logger.trace("discovery.type not set to {}", Ec2Discovery.EC2); return false; } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java index 496a1df8a9b..97e637abcc2 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java @@ -115,7 +115,7 @@ public class GceDiscoveryPlugin extends Plugin { */ public static boolean isDiscoveryAlive(Settings settings, ESLogger logger) { // User set discovery.type: gce - if (GceDiscovery.GCE.equalsIgnoreCase(settings.get("discovery.type")) == false) { + if (GceDiscovery.GCE.equalsIgnoreCase(DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings)) == false) { logger.debug("discovery.type not set to {}", GceDiscovery.GCE); return false; } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java index cab2f98e940..1818a5e6252 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.repositories.RepositoryMissingException; import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.repositories.azure.AzureRepository.Repository; @@ -77,7 +78,7 @@ public class AzureSnapshotRestoreTests extends AbstractAzureWithThirdPartyTestCa protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder().put(super.nodeSettings(nodeOrdinal)) // In snapshot tests, we explicitly disable cloud discovery - .put("discovery.type", "local") + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local") .build(); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java index fb01f9b9b37..24560aa8b1c 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java @@ -64,14 +64,14 @@ public class TribeUnitTests extends ESTestCase { .put(baseSettings) .put("cluster.name", "tribe1") .put("name", "tribe1_node") - .put(DiscoveryService.SETTING_DISCOVERY_SEED, random().nextLong()) + .put(DiscoveryService.DISCOVERY_SEED_SETTING.getKey(), random().nextLong()) .build()).start(); tribe2 = new TribeClientNode( Settings.builder() .put(baseSettings) .put("cluster.name", "tribe2") .put("name", "tribe2_node") - .put(DiscoveryService.SETTING_DISCOVERY_SEED, random().nextLong()) + .put(DiscoveryService.DISCOVERY_SEED_SETTING.getKey(), random().nextLong()) .build()).start(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java index 05f194fc26a..5d169fc6acd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java @@ -53,7 +53,7 @@ final class ExternalNode implements Closeable { public static final Settings REQUIRED_SETTINGS = Settings.builder() .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) - .put(DiscoveryModule.DISCOVERY_TYPE_KEY, "zen") + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen") .put("node.mode", "network").build(); // we need network mode for this private final Path path; @@ -112,7 +112,7 @@ final class ExternalNode implements Closeable { case "node.mode": case "node.local": case NetworkModule.TRANSPORT_TYPE_KEY: - case DiscoveryModule.DISCOVERY_TYPE_KEY: + case "discovery.type": case NetworkModule.TRANSPORT_SERVICE_TYPE_KEY: case InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING: continue; diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 6fafb52164d..a4304821c7c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -598,7 +598,7 @@ public final class InternalTestCluster extends TestCluster { .put(Environment.PATH_HOME_SETTING.getKey(), baseDir) // allow overriding path.home .put(settings) .put("name", name) - .put(DiscoveryService.SETTING_DISCOVERY_SEED, seed) + .put(DiscoveryService.DISCOVERY_SEED_SETTING.getKey(), seed) .build(); MockNode node = new MockNode(finalSettings, version, plugins); return new NodeAndClient(name, node); @@ -679,7 +679,7 @@ public final class InternalTestCluster extends TestCluster { Builder builder = settingsBuilder().put(settings).put("node.client", true); if (size() == 0) { // if we are the first node - don't wait for a state - builder.put("discovery.initial_state_timeout", 0); + builder.put(DiscoveryService.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0); } String name = startNode(builder); return nodes.get(name).nodeClient(); @@ -845,8 +845,8 @@ public final class InternalTestCluster extends TestCluster { IOUtils.rm(nodeEnv.nodeDataPaths()); } } - final long newIdSeed = node.settings().getAsLong(DiscoveryService.SETTING_DISCOVERY_SEED, 0l) + 1; // use a new seed to make sure we have new node id - Settings finalSettings = Settings.builder().put(node.settings()).put(newSettings).put(DiscoveryService.SETTING_DISCOVERY_SEED, newIdSeed).build(); + final long newIdSeed = DiscoveryService.DISCOVERY_SEED_SETTING.get(node.settings()) + 1; // use a new seed to make sure we have new node id + Settings finalSettings = Settings.builder().put(node.settings()).put(newSettings).put(DiscoveryService.DISCOVERY_SEED_SETTING.getKey(), newIdSeed).build(); Collection> plugins = node.getPlugins(); Version version = node.getVersion(); node = new MockNode(finalSettings, version, plugins); diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java index e549c185616..71c1cc24fa7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.NodeConfigurationSource; @@ -35,7 +36,7 @@ import java.util.Set; public class ClusterDiscoveryConfiguration extends NodeConfigurationSource { - static Settings DEFAULT_NODE_SETTINGS = Settings.settingsBuilder().put("discovery.type", "zen").build(); + static Settings DEFAULT_NODE_SETTINGS = Settings.settingsBuilder().put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen").build(); private static final String IP_ADDR = "127.0.0.1"; final int numOfNodes; From 9b3559f4732e1902f06ca9995a1f9027c61933bd Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 22 Jan 2016 12:03:18 +0100 Subject: [PATCH 294/347] Remove `test.cluster.node.seed` and special case `tests.portsfile` * `test.cluster.node.seed` was only used in one place where it wasn't adding value and was now replaced with a constant * `tests.portsfile` is now an official setting and has been renamed to `node.portsfile` this commit also convert `search.default_keep_alive` and `search.keep_alive_interval` to the new settings infrastrucutre --- .../org/elasticsearch/gradle/test/NodeInfo.groovy | 2 +- .../common/settings/ClusterSettings.java | 6 +++++- .../src/main/java/org/elasticsearch/node/Node.java | 4 +++- .../node/internal/InternalSettingsPreparer.java | 2 -- .../org/elasticsearch/search/SearchService.java | 10 +++++----- .../search/StressSearchServiceReaperIT.java | 2 +- .../cache/recycler/MockPageCacheRecycler.java | 7 ++++--- .../elasticsearch/test/InternalTestCluster.java | 14 ++++---------- 8 files changed, 23 insertions(+), 24 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index b369d35c03a..b41b1822000 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -129,7 +129,7 @@ class NodeInfo { 'JAVA_HOME' : project.javaHome, 'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc ] - args.add("-Des.tests.portsfile=true") + args.add("-Des.node.portsfile=true") args.addAll(config.systemProperties.collect { key, value -> "-D${key}=${value}" }) for (Map.Entry property : System.properties.entrySet()) { if (property.getKey().startsWith('es.')) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 93afc3bb95a..48fd46ae76c 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -55,6 +55,7 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.ttl.IndicesTTLService; +import org.elasticsearch.node.Node; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchService; import org.elasticsearch.threadpool.ThreadPool; @@ -218,6 +219,9 @@ public final class ClusterSettings extends AbstractScopedSettings { ZenDiscovery.MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING, ZenDiscovery.MASTER_ELECTION_FILTER_DATA_SETTING, UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING, - UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING, + SearchService.DEFAULT_KEEPALIVE_SETTING, + SearchService.KEEPALIVE_INTERVAL_SETTING, + Node.WRITE_PORTS_FIELD_SETTING ))); } diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index c5cf53defaa..245a23c80ab 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -46,6 +46,7 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; @@ -120,6 +121,7 @@ public class Node implements Releasable { private static final String CLIENT_TYPE = "node"; public static final String HTTP_ENABLED = "http.enabled"; + public static final Setting WRITE_PORTS_FIELD_SETTING = Setting.boolSetting("node.portsfile", false, false, Setting.Scope.CLUSTER); private final Lifecycle lifecycle = new Lifecycle(); private final Injector injector; private final Settings settings; @@ -275,7 +277,7 @@ public class Node implements Releasable { injector.getInstance(ResourceWatcherService.class).start(); injector.getInstance(TribeService.class).start(); - if (System.getProperty("es.tests.portsfile", "false").equals("true")) { + if (WRITE_PORTS_FIELD_SETTING.get(settings)) { if (settings.getAsBoolean("http.enabled", true)) { HttpServerTransport http = injector.getInstance(HttpServerTransport.class); writePortsFile("http", http.boundAddress()); diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index 47f262d8d06..df4e09d28e8 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -21,7 +21,6 @@ package org.elasticsearch.node.internal; import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.cli.Terminal; @@ -109,7 +108,6 @@ public class InternalSettingsPreparer { // we put back the path.logs so we can use it in the logging configuration file output.put(Environment.PATH_LOGS_SETTING.getKey(), cleanPath(environment.logsFile().toAbsolutePath().toString())); - return new Environment(output.build()); } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 6a84bb44ae7..9a569cef5aa 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -122,8 +122,9 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; public class SearchService extends AbstractLifecycleComponent implements IndexEventListener { public static final Setting INDEX_NORMS_LOADING_SETTING = new Setting<>("index.norms.loading", Loading.LAZY.toString(), (s) -> Loading.parse(s, Loading.LAZY), false, Setting.Scope.INDEX); - public static final String DEFAULT_KEEPALIVE_KEY = "search.default_keep_alive"; - public static final String KEEPALIVE_INTERVAL_KEY = "search.keep_alive_interval"; + // we can have 5 minutes here, since we make sure to clean with search requests and when shard/index closes + public static final Setting DEFAULT_KEEPALIVE_SETTING = Setting.positiveTimeSetting("search.default_keep_alive", timeValueMinutes(5), false, Setting.Scope.CLUSTER); + public static final Setting KEEPALIVE_INTERVAL_SETTING = Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), false, Setting.Scope.CLUSTER); public static final TimeValue NO_TIMEOUT = timeValueMillis(-1); public static final Setting DEFAULT_SEARCH_TIMEOUT_SETTING = Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, true, Setting.Scope.CLUSTER); @@ -183,9 +184,8 @@ public class SearchService extends AbstractLifecycleComponent imp this.fetchPhase = fetchPhase; this.indicesQueryCache = indicesQueryCache; - TimeValue keepAliveInterval = settings.getAsTime(KEEPALIVE_INTERVAL_KEY, timeValueMinutes(1)); - // we can have 5 minutes here, since we make sure to clean with search requests and when shard/index closes - this.defaultKeepAlive = settings.getAsTime(DEFAULT_KEEPALIVE_KEY, timeValueMinutes(5)).millis(); + TimeValue keepAliveInterval = KEEPALIVE_INTERVAL_SETTING.get(settings); + this.defaultKeepAlive = DEFAULT_KEEPALIVE_SETTING.get(settings).millis(); Map elementParsers = new HashMap<>(); elementParsers.putAll(dfsPhase.parseElements()); diff --git a/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java b/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java index addfe14c488..9ea5ec93f1f 100644 --- a/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java +++ b/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java @@ -40,7 +40,7 @@ public class StressSearchServiceReaperIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { // very frequent checks return Settings.builder().put(super.nodeSettings(nodeOrdinal)) - .put(SearchService.KEEPALIVE_INTERVAL_KEY, TimeValue.timeValueMillis(1)).build(); + .put(SearchService.KEEPALIVE_INTERVAL_SETTING.getKey(), TimeValue.timeValueMillis(1)).build(); } // see issue #5165 - this test fails each time without the fix in pull #5170 diff --git a/test/framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java b/test/framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java index 99cd417133d..80b2c3279bf 100644 --- a/test/framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java +++ b/test/framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.recycler.Recycler.V; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.threadpool.ThreadPool; import java.lang.reflect.Array; @@ -63,8 +62,10 @@ public class MockPageCacheRecycler extends PageCacheRecycler { @Inject public MockPageCacheRecycler(Settings settings, ThreadPool threadPool) { super(settings, threadPool); - final long seed = settings.getAsLong(InternalTestCluster.SETTING_CLUSTER_NODE_SEED, 0L); - random = new Random(seed); + // we always initialize with 0 here since we really only wanna have some random bytes / ints / longs + // and given the fact that it's called concurrently it won't reproduces anyway the same order other than in a unittest + // for the latter 0 is just fine + random = new Random(0); } private V wrap(final V v) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index a4304821c7c..9fb3eec8489 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -146,11 +146,6 @@ public final class InternalTestCluster extends TestCluster { private final ESLogger logger = Loggers.getLogger(getClass()); - /** - * A node level setting that holds a per node random seed that is consistent across node restarts - */ - public static final String SETTING_CLUSTER_NODE_SEED = "test.cluster.node.seed"; - /** * The number of ports in the range used for this JVM */ @@ -381,8 +376,7 @@ public final class InternalTestCluster extends TestCluster { private Settings getRandomNodeSettings(long seed) { Random random = new Random(seed); - Builder builder = Settings.settingsBuilder() - .put(SETTING_CLUSTER_NODE_SEED, seed); + Builder builder = Settings.settingsBuilder(); if (isLocalTransportConfigured() == false) { builder.put(Transport.TRANSPORT_TCP_COMPRESS.getKey(), rarely(random)); } @@ -390,12 +384,12 @@ public final class InternalTestCluster extends TestCluster { builder.put("cache.recycler.page.type", RandomPicks.randomFrom(random, PageCacheRecycler.Type.values())); } if (random.nextInt(10) == 0) { // 10% of the nodes have a very frequent check interval - builder.put(SearchService.KEEPALIVE_INTERVAL_KEY, TimeValue.timeValueMillis(10 + random.nextInt(2000))); + builder.put(SearchService.KEEPALIVE_INTERVAL_SETTING.getKey(), TimeValue.timeValueMillis(10 + random.nextInt(2000))); } else if (random.nextInt(10) != 0) { // 90% of the time - 10% of the time we don't set anything - builder.put(SearchService.KEEPALIVE_INTERVAL_KEY, TimeValue.timeValueSeconds(10 + random.nextInt(5 * 60))); + builder.put(SearchService.KEEPALIVE_INTERVAL_SETTING.getKey(), TimeValue.timeValueSeconds(10 + random.nextInt(5 * 60))); } if (random.nextBoolean()) { // sometimes set a - builder.put(SearchService.DEFAULT_KEEPALIVE_KEY, TimeValue.timeValueSeconds(100 + random.nextInt(5 * 60))); + builder.put(SearchService.DEFAULT_KEEPALIVE_SETTING.getKey(), TimeValue.timeValueSeconds(100 + random.nextInt(5 * 60))); } if (random.nextInt(10) == 0) { From d371ef35f415a5b44c06a726e926e43318edf7e7 Mon Sep 17 00:00:00 2001 From: markharwood Date: Fri, 22 Jan 2016 13:19:05 +0000 Subject: [PATCH 295/347] Settings - change over to o.e.common.settings.Setting for http settings: http.cors.allow-credentials http.cors.enabled http.detailed_errors.enabled http.enabled http.pipelining http.cors.max-age --- .../common/network/NetworkModule.java | 6 ++++-- .../common/settings/ClusterSettings.java | 9 +++++++++ .../http/netty/NettyHttpChannel.java | 6 +++--- .../http/netty/NettyHttpServerTransport.java | 18 ++++++++++-------- .../main/java/org/elasticsearch/node/Node.java | 1 - .../RecoveryWithUnsupportedIndicesIT.java | 3 ++- .../common/network/NetworkModuleTests.java | 2 +- .../http/netty/HttpPublishPortIT.java | 3 ++- .../http/netty/NettyHttpChannelTests.java | 4 ++-- .../http/netty/NettyPipeliningDisabledIT.java | 3 ++- .../http/netty/NettyPipeliningEnabledIT.java | 3 ++- .../DetailedErrorsDisabledIT.java | 5 +++-- .../DetailedErrorsEnabledIT.java | 3 ++- .../elasticsearch/rest/CorsRegexDefaultIT.java | 3 ++- .../org/elasticsearch/rest/CorsRegexIT.java | 7 ++++--- .../java/org/elasticsearch/tribe/TribeIT.java | 3 ++- .../tests/ContextAndHeaderTransportTests.java | 4 ++-- .../tests/ContextAndHeaderTransportTests.java | 4 ++-- .../elasticsearch/test/ESIntegTestCase.java | 2 +- 19 files changed, 55 insertions(+), 34 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index a1e8261e63c..8be907e3072 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -28,7 +28,9 @@ import org.elasticsearch.client.transport.support.TransportProxyClient; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.Setting.Scope; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.http.HttpServer; import org.elasticsearch.http.HttpServerTransport; @@ -150,7 +152,7 @@ public class NetworkModule extends AbstractModule { public static final String NETTY_TRANSPORT = "netty"; public static final String HTTP_TYPE_KEY = "http.type"; - public static final String HTTP_ENABLED = "http.enabled"; + public static final Setting HTTP_ENABLED = Setting.boolSetting("http.enabled", true, false, Scope.CLUSTER); private static final List> builtinRestHandlers = Arrays.asList( RestMainAction.class, @@ -368,7 +370,7 @@ public class NetworkModule extends AbstractModule { bind(TransportProxyClient.class).asEagerSingleton(); bind(TransportClientNodesService.class).asEagerSingleton(); } else { - if (settings.getAsBoolean(HTTP_ENABLED, true)) { + if (HTTP_ENABLED.get(settings)) { bind(HttpServer.class).asEagerSingleton(); httpTransportTypes.bindType(binder(), settings, HTTP_TYPE_KEY, NETTY_TRANSPORT); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 48fd46ae76c..145381a7cd7 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -38,6 +38,7 @@ import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.DiscoveryService; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; @@ -46,6 +47,8 @@ import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.discovery.zen.fd.FaultDetection; import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing; import org.elasticsearch.gateway.PrimaryShardAllocator; +import org.elasticsearch.http.netty.NettyHttpChannel; +import org.elasticsearch.http.netty.NettyHttpServerTransport; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.indices.analysis.HunspellService; @@ -162,6 +165,12 @@ public final class ClusterSettings extends AbstractScopedSettings { GatewayService.RECOVER_AFTER_MASTER_NODES_SETTING, GatewayService.RECOVER_AFTER_NODES_SETTING, GatewayService.RECOVER_AFTER_TIME_SETTING, + NetworkModule.HTTP_ENABLED, + NettyHttpServerTransport.SETTING_CORS_ALLOW_CREDENTIALS, + NettyHttpServerTransport.SETTING_CORS_ENABLED, + NettyHttpServerTransport.SETTING_CORS_MAX_AGE, + NettyHttpServerTransport.SETTING_HTTP_DETAILED_ERRORS_ENABLED, + NettyHttpServerTransport.SETTING_PIPELINING, HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java index 7fcc7b65fba..316799dd062 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java @@ -113,7 +113,7 @@ public class NettyHttpChannel extends HttpChannel { resp = new DefaultHttpResponse(HttpVersion.HTTP_1_1, status); } if (RestUtils.isBrowser(nettyRequest.headers().get(USER_AGENT))) { - if (transport.settings().getAsBoolean(SETTING_CORS_ENABLED, false)) { + if (SETTING_CORS_ENABLED.get(transport.settings())) { String originHeader = request.header(ORIGIN); if (!Strings.isNullOrEmpty(originHeader)) { if (corsPattern == null) { @@ -127,12 +127,12 @@ public class NettyHttpChannel extends HttpChannel { } if (nettyRequest.getMethod() == HttpMethod.OPTIONS) { // Allow Ajax requests based on the CORS "preflight" request - resp.headers().add(ACCESS_CONTROL_MAX_AGE, transport.settings().getAsInt(SETTING_CORS_MAX_AGE, 1728000)); + resp.headers().add(ACCESS_CONTROL_MAX_AGE, SETTING_CORS_MAX_AGE.get(transport.settings())); resp.headers().add(ACCESS_CONTROL_ALLOW_METHODS, transport.settings().get(SETTING_CORS_ALLOW_METHODS, "OPTIONS, HEAD, GET, POST, PUT, DELETE")); resp.headers().add(ACCESS_CONTROL_ALLOW_HEADERS, transport.settings().get(SETTING_CORS_ALLOW_HEADERS, "X-Requested-With, Content-Type, Content-Length")); } - if (transport.settings().getAsBoolean(SETTING_CORS_ALLOW_CREDENTIALS, false)) { + if (SETTING_CORS_ALLOW_CREDENTIALS.get(transport.settings())) { resp.headers().add(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true"); } } diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java index 899bbdc86e2..5623db45ef8 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java @@ -28,6 +28,8 @@ import org.elasticsearch.common.netty.OpenChannelsHandler; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.network.NetworkUtils; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Scope; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -94,19 +96,19 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent SETTING_CORS_ENABLED = Setting.boolSetting("http.cors.enabled", false, false, Scope.CLUSTER); public static final String SETTING_CORS_ALLOW_ORIGIN = "http.cors.allow-origin"; - public static final String SETTING_CORS_MAX_AGE = "http.cors.max-age"; + public static final Setting SETTING_CORS_MAX_AGE = Setting.intSetting("http.cors.max-age", 1728000, false, Scope.CLUSTER); public static final String SETTING_CORS_ALLOW_METHODS = "http.cors.allow-methods"; public static final String SETTING_CORS_ALLOW_HEADERS = "http.cors.allow-headers"; - public static final String SETTING_CORS_ALLOW_CREDENTIALS = "http.cors.allow-credentials"; - public static final String SETTING_PIPELINING = "http.pipelining"; + public static final Setting SETTING_CORS_ALLOW_CREDENTIALS = Setting.boolSetting("http.cors.allow-credentials", false, false, Scope.CLUSTER); + + public static final Setting SETTING_PIPELINING = Setting.boolSetting("http.pipelining", true, false, Scope.CLUSTER); public static final String SETTING_PIPELINING_MAX_EVENTS = "http.pipelining.max_events"; public static final String SETTING_HTTP_COMPRESSION = "http.compression"; public static final String SETTING_HTTP_COMPRESSION_LEVEL = "http.compression_level"; - public static final String SETTING_HTTP_DETAILED_ERRORS_ENABLED = "http.detailed_errors.enabled"; + public static final Setting SETTING_HTTP_DETAILED_ERRORS_ENABLED = Setting.boolSetting("http.detailed_errors.enabled", true, false, Scope.CLUSTER); - public static final boolean DEFAULT_SETTING_PIPELINING = true; public static final int DEFAULT_SETTING_PIPELINING_MAX_EVENTS = 10000; public static final String DEFAULT_PORT_RANGE = "9200-9300"; @@ -196,7 +198,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent 0) { @@ -216,7 +218,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent WRITE_PORTS_FIELD_SETTING = Setting.boolSetting("node.portsfile", false, false, Setting.Scope.CLUSTER); private final Lifecycle lifecycle = new Lifecycle(); private final Injector injector; diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java index a573a8374ef..23163b86112 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.bwcompat; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; @@ -28,7 +29,7 @@ public class RecoveryWithUnsupportedIndicesIT extends StaticIndexBackwardCompati String indexName = "unsupported-0.20.6"; logger.info("Checking static index " + indexName); - Settings nodeSettings = prepareBackwardsDataDir(getBwcIndicesPath().resolve(indexName + ".zip"), Node.HTTP_ENABLED, true); + Settings nodeSettings = prepareBackwardsDataDir(getBwcIndicesPath().resolve(indexName + ".zip"), NetworkModule.HTTP_ENABLED.getKey(), true); try { internalCluster().startNode(nodeSettings); fail(); diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index 82cabf7ec58..b8a21e1b678 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -143,7 +143,7 @@ public class NetworkModuleTests extends ModuleTestCase { } // not added if http is disabled - settings = Settings.builder().put(NetworkModule.HTTP_ENABLED, false).build(); + settings = Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false).build(); module = new NetworkModule(new NetworkService(settings), settings, false, null); assertNotBound(module, HttpServerTransport.class); } diff --git a/core/src/test/java/org/elasticsearch/http/netty/HttpPublishPortIT.java b/core/src/test/java/org/elasticsearch/http/netty/HttpPublishPortIT.java index 4d73b52576a..f227a9a03b4 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/HttpPublishPortIT.java +++ b/core/src/test/java/org/elasticsearch/http/netty/HttpPublishPortIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.http.netty; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -39,7 +40,7 @@ public class HttpPublishPortIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) - .put(Node.HTTP_ENABLED, true) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) .put("http.publish_port", 9080) .build(); } diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java index cb111a71988..f02916c68ac 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java @@ -81,7 +81,7 @@ public class NettyHttpChannelTests extends ESTestCase { public void testCorsEnabledWithoutAllowOrigins() { // Set up a HTTP transport with only the CORS enabled setting Settings settings = Settings.builder() - .put(NettyHttpServerTransport.SETTING_CORS_ENABLED, true) + .put(NettyHttpServerTransport.SETTING_CORS_ENABLED.getKey(), true) .build(); httpServerTransport = new NettyHttpServerTransport(settings, networkService, bigArrays); HttpRequest httpRequest = new TestHttpRequest(); @@ -104,7 +104,7 @@ public class NettyHttpChannelTests extends ESTestCase { public void testCorsEnabledWithAllowOrigins() { // create a http transport with CORS enabled and allow origin configured Settings settings = Settings.builder() - .put(NettyHttpServerTransport.SETTING_CORS_ENABLED, true) + .put(NettyHttpServerTransport.SETTING_CORS_ENABLED.getKey(), true) .put(NettyHttpServerTransport.SETTING_CORS_ALLOW_ORIGIN, "remote-host") .build(); httpServerTransport = new NettyHttpServerTransport(settings, networkService, bigArrays); diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java b/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java index f4ce3756e61..8f7765dcc87 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.http.netty; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.http.HttpServerTransport; @@ -45,7 +46,7 @@ import static org.hamcrest.Matchers.hasSize; public class NettyPipeliningDisabledIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { - return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(Node.HTTP_ENABLED, true).put("http.pipelining", false).build(); + return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(NetworkModule.HTTP_ENABLED.getKey(), true).put("http.pipelining", false).build(); } public void testThatNettyHttpServerDoesNotSupportPipelining() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java b/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java index 9e5971c1d4f..93f54cb7628 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.http.netty; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.http.HttpServerTransport; @@ -42,7 +43,7 @@ import static org.hamcrest.Matchers.is; public class NettyPipeliningEnabledIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { - return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(Node.HTTP_ENABLED, true).put("http.pipelining", true).build(); + return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(NetworkModule.HTTP_ENABLED.getKey(), true).put("http.pipelining", true).build(); } public void testThatNettyHttpServerSupportsPipelining() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java b/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java index a0751dffac5..2a121be509c 100644 --- a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java +++ b/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.options.detailederrors; import org.apache.http.impl.client.HttpClients; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.netty.NettyHttpServerTransport; @@ -43,8 +44,8 @@ public class DetailedErrorsDisabledIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) - .put(Node.HTTP_ENABLED, true) - .put(NettyHttpServerTransport.SETTING_HTTP_DETAILED_ERRORS_ENABLED, false) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) + .put(NettyHttpServerTransport.SETTING_HTTP_DETAILED_ERRORS_ENABLED.getKey(), false) .build(); } diff --git a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java b/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java index 935b4e21ad2..4333d81b2ea 100644 --- a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java +++ b/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.options.detailederrors; import org.apache.http.impl.client.HttpClients; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.node.Node; @@ -42,7 +43,7 @@ public class DetailedErrorsEnabledIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) - .put(Node.HTTP_ENABLED, true) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) .build(); } diff --git a/core/src/test/java/org/elasticsearch/rest/CorsRegexDefaultIT.java b/core/src/test/java/org/elasticsearch/rest/CorsRegexDefaultIT.java index 2b7533cae19..f2ce16ac857 100644 --- a/core/src/test/java/org/elasticsearch/rest/CorsRegexDefaultIT.java +++ b/core/src/test/java/org/elasticsearch/rest/CorsRegexDefaultIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.rest; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; @@ -35,7 +36,7 @@ public class CorsRegexDefaultIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() - .put(Node.HTTP_ENABLED, true) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) .put(super.nodeSettings(nodeOrdinal)).build(); } diff --git a/core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java b/core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java index 3828ae0ad74..1c624f98e2f 100644 --- a/core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java +++ b/core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.rest; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; @@ -47,9 +48,9 @@ public class CorsRegexIT extends ESIntegTestCase { return Settings.settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) .put(SETTING_CORS_ALLOW_ORIGIN, "/https?:\\/\\/localhost(:[0-9]+)?/") - .put(SETTING_CORS_ALLOW_CREDENTIALS, true) - .put(SETTING_CORS_ENABLED, true) - .put(Node.HTTP_ENABLED, true) + .put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true) + .put(SETTING_CORS_ENABLED.getKey(), true) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) .build(); } diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java index e2ea4ee05b5..260c6252efd 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.MasterNotDiscoveredException; @@ -79,7 +80,7 @@ public class TribeIT extends ESIntegTestCase { NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { - return Settings.builder().put(Node.HTTP_ENABLED, false).build(); + return Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false).build(); } @Override diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java index 2a6be52f3f4..fc19a956ff1 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.client.Client; import org.elasticsearch.client.FilterClient; import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -69,7 +70,6 @@ import java.util.Locale; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.node.Node.HTTP_ENABLED; import static org.elasticsearch.rest.RestStatus.OK; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -92,7 +92,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { return settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) .put("script.indexed", "on") - .put(HTTP_ENABLED, true) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) .build(); } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java index 485e687b4a5..d1275f63141 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.FilterClient; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -63,7 +64,6 @@ import java.util.Map; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.node.Node.HTTP_ENABLED; import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -87,7 +87,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { return settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) .put("script.indexed", "on") - .put(HTTP_ENABLED, true) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) .build(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 9294ba2d232..ca11fa242d5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1759,7 +1759,7 @@ public abstract class ESIntegTestCase extends ESTestCase { NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { - return Settings.builder().put(Node.HTTP_ENABLED, false). + return Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false). put(ESIntegTestCase.this.nodeSettings(nodeOrdinal)).build(); } From 38ea9c7d1328749e9ac9ba2e52ecc346b29466f0 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jan 2016 16:28:04 +0100 Subject: [PATCH 296/347] Date processor: simplify switch to identify the specified date format --- .../ingest/processor/DateFormat.java | 61 ++++++++----------- .../ingest/processor/DateProcessor.java | 14 +---- .../ingest/processor/DateFormatTests.java | 32 +++++----- .../ingest/processor/DateProcessorTests.java | 16 ++++- 4 files changed, 57 insertions(+), 66 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java b/core/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java index 5748d3788ab..282b29176bf 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java @@ -25,36 +25,30 @@ import org.joda.time.format.DateTimeFormat; import org.joda.time.format.ISODateTimeFormat; import java.util.Locale; -import java.util.Optional; import java.util.function.Function; enum DateFormat { Iso8601 { @Override - Function getFunction(DateTimeZone timezone) { + Function getFunction(String format, DateTimeZone timezone, Locale locale) { return ISODateTimeFormat.dateTimeParser().withZone(timezone)::parseDateTime; } }, Unix { @Override - Function getFunction(DateTimeZone timezone) { + Function getFunction(String format, DateTimeZone timezone, Locale locale) { return (date) -> new DateTime((long)(Float.parseFloat(date) * 1000), timezone); } }, UnixMs { @Override - Function getFunction(DateTimeZone timezone) { + Function getFunction(String format, DateTimeZone timezone, Locale locale) { return (date) -> new DateTime(Long.parseLong(date), timezone); } - - @Override - public String toString() { - return "UNIX_MS"; - } }, Tai64n { @Override - Function getFunction(DateTimeZone timezone) { + Function getFunction(String format, DateTimeZone timezone, Locale locale) { return (date) -> new DateTime(parseMillis(date), timezone); } @@ -67,33 +61,30 @@ enum DateFormat { long rest = Long.parseLong(date.substring(16, 24), 16); return ((base * 1000) - 10000) + (rest/1000000); } - }; - - abstract Function getFunction(DateTimeZone timezone); - - static Optional fromString(String format) { - switch (format) { - case "ISO8601": - return Optional.of(Iso8601); - case "UNIX": - return Optional.of(Unix); - case "UNIX_MS": - return Optional.of(UnixMs); - case "TAI64N": - return Optional.of(Tai64n); - default: - return Optional.empty(); - } - } - - static Function getJodaFunction(String matchFormat, DateTimeZone timezone, Locale locale) { - return DateTimeFormat.forPattern(matchFormat) + }, + Joda { + @Override + Function getFunction(String format, DateTimeZone timezone, Locale locale) { + return DateTimeFormat.forPattern(format) .withDefaultYear((new DateTime(DateTimeZone.UTC)).getYear()) .withZone(timezone).withLocale(locale)::parseDateTime; - } + } + }; - @Override - public String toString() { - return name().toUpperCase(Locale.ROOT); + abstract Function getFunction(String format, DateTimeZone timezone, Locale locale); + + static DateFormat fromString(String format) { + switch (format) { + case "ISO8601": + return Iso8601; + case "UNIX": + return Unix; + case "UNIX_MS": + return UnixMs; + case "TAI64N": + return Tai64n; + default: + return Joda; + } } } diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java index 230871d5081..9fc0378d774 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java @@ -22,8 +22,8 @@ package org.elasticsearch.ingest.processor; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; -import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.IngestDocument; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; @@ -33,7 +33,6 @@ import java.util.IllformedLocaleException; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.Optional; import java.util.function.Function; public final class DateProcessor extends AbstractProcessor { @@ -57,21 +56,14 @@ public final class DateProcessor extends AbstractProcessor { this.matchFormats = matchFormats; this.dateParsers = new ArrayList<>(); for (String matchFormat : matchFormats) { - Optional dateFormat = DateFormat.fromString(matchFormat); - Function stringToDateFunction; - if (dateFormat.isPresent()) { - stringToDateFunction = dateFormat.get().getFunction(timezone); - } else { - stringToDateFunction = DateFormat.getJodaFunction(matchFormat, timezone, locale); - } - dateParsers.add(stringToDateFunction); + DateFormat dateFormat = DateFormat.fromString(matchFormat); + dateParsers.add(dateFormat.getFunction(matchFormat, timezone, locale)); } } @Override public void execute(IngestDocument ingestDocument) { String value = ingestDocument.getFieldValue(matchField, String.class); - // TODO(talevy): handle custom timestamp fields DateTime dateTime = null; Exception lastException = null; diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java index c53b420b244..401dd44d44a 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.ingest.processor; -import org.elasticsearch.ingest.processor.DateFormat; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -28,7 +27,6 @@ import java.time.Instant; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.Locale; -import java.util.Optional; import java.util.function.Function; import static org.hamcrest.core.IsEqual.equalTo; @@ -36,7 +34,7 @@ import static org.hamcrest.core.IsEqual.equalTo; public class DateFormatTests extends ESTestCase { public void testParseJoda() { - Function jodaFunction = DateFormat.getJodaFunction("MMM dd HH:mm:ss Z", DateTimeZone.forOffsetHours(-8), Locale.ENGLISH); + Function jodaFunction = DateFormat.Joda.getFunction("MMM dd HH:mm:ss Z", DateTimeZone.forOffsetHours(-8), Locale.ENGLISH); assertThat(Instant.ofEpochMilli(jodaFunction.apply("Nov 24 01:29:01 -0800").getMillis()) .atZone(ZoneId.of("GMT-8")) .format(DateTimeFormatter.ofPattern("MM dd HH:mm:ss", Locale.ENGLISH)), @@ -44,19 +42,19 @@ public class DateFormatTests extends ESTestCase { } public void testParseUnixMs() { - assertThat(DateFormat.UnixMs.getFunction(DateTimeZone.UTC).apply("1000500").getMillis(), equalTo(1000500L)); + assertThat(DateFormat.UnixMs.getFunction(null, DateTimeZone.UTC, null).apply("1000500").getMillis(), equalTo(1000500L)); } public void testParseUnix() { - assertThat(DateFormat.Unix.getFunction(DateTimeZone.UTC).apply("1000.5").getMillis(), equalTo(1000500L)); + assertThat(DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null).apply("1000.5").getMillis(), equalTo(1000500L)); } public void testParseISO8601() { - assertThat(DateFormat.Iso8601.getFunction(DateTimeZone.UTC).apply("2001-01-01T00:00:00-0800").getMillis(), equalTo(978336000000L)); + assertThat(DateFormat.Iso8601.getFunction(null, DateTimeZone.UTC, null).apply("2001-01-01T00:00:00-0800").getMillis(), equalTo(978336000000L)); } public void testParseISO8601Failure() { - Function function = DateFormat.Iso8601.getFunction(DateTimeZone.UTC); + Function function = DateFormat.Iso8601.getFunction(null, DateTimeZone.UTC, null); try { function.apply("2001-01-0:00-0800"); fail("parse should have failed"); @@ -68,18 +66,18 @@ public class DateFormatTests extends ESTestCase { public void testTAI64NParse() { String input = "4000000050d506482dbdf024"; String expected = "2012-12-22T03:00:46.767+02:00"; - assertThat(DateFormat.Tai64n.getFunction(DateTimeZone.forOffsetHours(2)).apply((randomBoolean() ? "@" : "") + input).toString(), equalTo(expected)); + assertThat(DateFormat.Tai64n.getFunction(null, DateTimeZone.forOffsetHours(2), null).apply((randomBoolean() ? "@" : "") + input).toString(), equalTo(expected)); } public void testFromString() { - assertThat(DateFormat.fromString("UNIX_MS"), equalTo(Optional.of(DateFormat.UnixMs))); - assertThat(DateFormat.fromString("unix_ms"), equalTo(Optional.empty())); - assertThat(DateFormat.fromString("UNIX"), equalTo(Optional.of(DateFormat.Unix))); - assertThat(DateFormat.fromString("unix"), equalTo(Optional.empty())); - assertThat(DateFormat.fromString("ISO8601"), equalTo(Optional.of(DateFormat.Iso8601))); - assertThat(DateFormat.fromString("iso8601"), equalTo(Optional.empty())); - assertThat(DateFormat.fromString("TAI64N"), equalTo(Optional.of(DateFormat.Tai64n))); - assertThat(DateFormat.fromString("tai64n"), equalTo(Optional.empty())); - assertThat(DateFormat.fromString("prefix-" + randomAsciiOfLengthBetween(1, 10)), equalTo(Optional.empty())); + assertThat(DateFormat.fromString("UNIX_MS"), equalTo(DateFormat.UnixMs)); + assertThat(DateFormat.fromString("unix_ms"), equalTo(DateFormat.Joda)); + assertThat(DateFormat.fromString("UNIX"), equalTo(DateFormat.Unix)); + assertThat(DateFormat.fromString("unix"), equalTo(DateFormat.Joda)); + assertThat(DateFormat.fromString("ISO8601"), equalTo(DateFormat.Iso8601)); + assertThat(DateFormat.fromString("iso8601"), equalTo(DateFormat.Joda)); + assertThat(DateFormat.fromString("TAI64N"), equalTo(DateFormat.Tai64n)); + assertThat(DateFormat.fromString("tai64n"), equalTo(DateFormat.Joda)); + assertThat(DateFormat.fromString("prefix-" + randomAsciiOfLengthBetween(1, 10)), equalTo(DateFormat.Joda)); } } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java index ff108e61bfc..5daab95a5d0 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java @@ -84,6 +84,16 @@ public class DateProcessorTests extends ESTestCase { } } + public void testInvalidJodaPattern() { + try { + new DateProcessor(randomAsciiOfLength(10), DateTimeZone.UTC, randomLocale(random()), + "date_as_string", Collections.singletonList("invalid pattern"), "date_as_date"); + fail("date processor initialization should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Illegal pattern component: i")); + } + } + public void testJodaPatternLocale() { DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ITALIAN, "date_as_string", Collections.singletonList("yyyy dd MMM"), "date_as_date"); @@ -106,7 +116,7 @@ public class DateProcessorTests extends ESTestCase { public void testTAI64N() { DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forOffsetHours(2), randomLocale(random()), - "date_as_string", Collections.singletonList(DateFormat.Tai64n.toString()), "date_as_date"); + "date_as_string", Collections.singletonList("TAI64N"), "date_as_date"); Map document = new HashMap<>(); String dateAsString = (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024"; document.put("date_as_string", dateAsString); @@ -117,7 +127,7 @@ public class DateProcessorTests extends ESTestCase { public void testUnixMs() { DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.UTC, randomLocale(random()), - "date_as_string", Collections.singletonList(DateFormat.UnixMs.toString()), "date_as_date"); + "date_as_string", Collections.singletonList("UNIX_MS"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "1000500"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); @@ -127,7 +137,7 @@ public class DateProcessorTests extends ESTestCase { public void testUnix() { DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.UTC, randomLocale(random()), - "date_as_string", Collections.singletonList(DateFormat.Unix.toString()), "date_as_date"); + "date_as_string", Collections.singletonList("UNIX"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "1000.5"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); From ae5da3432c0170690c79af0db382df97509bd2b6 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 22 Jan 2016 14:12:51 +0100 Subject: [PATCH 297/347] Migrate repository settings to the new settings API --- .../common/settings/ClusterSettings.java | 13 +++-- .../repositories/fs/FsRepository.java | 21 ++++++-- .../repositories/uri/URLRepository.java | 52 ++++++++++++------- .../bwcompat/RestoreBackwardsCompatIT.java | 4 +- 4 files changed, 64 insertions(+), 26 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 145381a7cd7..98ca4b234d0 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -47,7 +47,6 @@ import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.discovery.zen.fd.FaultDetection; import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing; import org.elasticsearch.gateway.PrimaryShardAllocator; -import org.elasticsearch.http.netty.NettyHttpChannel; import org.elasticsearch.http.netty.NettyHttpServerTransport; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.store.IndexStoreConfig; @@ -59,6 +58,8 @@ import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.node.Node; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.repositories.uri.URLRepository; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchService; import org.elasticsearch.threadpool.ThreadPool; @@ -129,6 +130,9 @@ public final class ClusterSettings extends AbstractScopedSettings { FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP_SETTING, FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING, FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING, + FsRepository.REPOSITORIES_CHUNK_SIZE_SETTING, + FsRepository.REPOSITORIES_COMPRESS_SETTING, + FsRepository.REPOSITORIES_LOCATION_SETTING, IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING, IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, IndicesTTLService.INDICES_TTL_INTERVAL_SETTING, @@ -231,6 +235,9 @@ public final class ClusterSettings extends AbstractScopedSettings { UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING, SearchService.DEFAULT_KEEPALIVE_SETTING, SearchService.KEEPALIVE_INTERVAL_SETTING, - Node.WRITE_PORTS_FIELD_SETTING - ))); + Node.WRITE_PORTS_FIELD_SETTING, + URLRepository.ALLOWED_URLS_SETTING, + URLRepository.REPOSITORIES_LIST_DIRECTORIES_SETTING, + URLRepository.REPOSITORIES_URL_SETTING, + URLRepository.SUPPORTED_PROTOCOLS_SETTING))); } diff --git a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java index 33f9d4e7c30..d591f396400 100644 --- a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.fs.FsBlobStore; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.index.snapshots.IndexShardRepository; @@ -33,6 +34,7 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import java.io.IOException; import java.nio.file.Path; +import java.util.function.Function; /** * Shared file system implementation of the BlobStoreRepository @@ -49,6 +51,13 @@ public class FsRepository extends BlobStoreRepository { public final static String TYPE = "fs"; + public static final Setting LOCATION_SETTING = new Setting<>("location", "", Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting REPOSITORIES_LOCATION_SETTING = new Setting<>("repositories.fs.location", "", Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", "-1", false, Setting.Scope.CLUSTER); + public static final Setting REPOSITORIES_CHUNK_SIZE_SETTING = Setting.byteSizeSetting("repositories.fs.chunk_size", "-1", false, Setting.Scope.CLUSTER); + public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, false, Setting.Scope.CLUSTER); + public static final Setting REPOSITORIES_COMPRESS_SETTING = Setting.boolSetting("repositories.fs.compress", false, false, Setting.Scope.CLUSTER); + private final FsBlobStore blobStore; private ByteSizeValue chunkSize; @@ -68,7 +77,7 @@ public class FsRepository extends BlobStoreRepository { public FsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, Environment environment) throws IOException { super(name.getName(), repositorySettings, indexShardRepository); Path locationFile; - String location = repositorySettings.settings().get("location", settings.get("repositories.fs.location")); + String location = LOCATION_SETTING.exists(repositorySettings.settings()) ? LOCATION_SETTING.get(repositorySettings.settings()) : REPOSITORIES_LOCATION_SETTING.get(settings); if (location == null) { logger.warn("the repository location is missing, it should point to a shared file system location that is available on all master and data nodes"); throw new RepositoryException(name.name(), "missing location"); @@ -85,8 +94,14 @@ public class FsRepository extends BlobStoreRepository { } blobStore = new FsBlobStore(settings, locationFile); - this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("repositories.fs.chunk_size", null)); - this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("repositories.fs.compress", false)); + if (CHUNK_SIZE_SETTING.exists(repositorySettings.settings())) { + this.chunkSize = CHUNK_SIZE_SETTING.get(repositorySettings.settings()); + } else if (REPOSITORIES_CHUNK_SIZE_SETTING.exists(settings)) { + this.chunkSize = REPOSITORIES_CHUNK_SIZE_SETTING.get(settings); + } else { + this.chunkSize = null; + } + this.compress = COMPRESS_SETTING.exists(repositorySettings.settings()) ? COMPRESS_SETTING.get(repositorySettings.settings()) : REPOSITORIES_COMPRESS_SETTING.get(settings); this.basePath = BlobPath.cleanPath(); } diff --git a/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java b/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java index 4d361683e5c..2d15db245aa 100644 --- a/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java @@ -20,11 +20,11 @@ package org.elasticsearch.repositories.uri; import org.elasticsearch.cluster.metadata.SnapshotId; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.url.URLBlobStore; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.util.URIPattern; import org.elasticsearch.env.Environment; import org.elasticsearch.index.snapshots.IndexShardRepository; @@ -34,9 +34,13 @@ import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import java.io.IOException; +import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; +import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.function.Function; /** * Read-only URL-based implementation of the BlobStoreRepository @@ -51,13 +55,21 @@ public class URLRepository extends BlobStoreRepository { public final static String TYPE = "url"; - public final static String[] DEFAULT_SUPPORTED_PROTOCOLS = {"http", "https", "ftp", "file", "jar"}; + public static final Setting> SUPPORTED_PROTOCOLS_SETTING = Setting.listSetting("repositories.url.supported_protocols", + Arrays.asList("http", "https", "ftp", "file", "jar"), Function.identity(), false, Setting.Scope.CLUSTER); - public final static String SUPPORTED_PROTOCOLS_SETTING = "repositories.url.supported_protocols"; + public static final Setting> ALLOWED_URLS_SETTING = Setting.listSetting("repositories.url.allowed_urls", + Collections.emptyList(), URIPattern::new, false, Setting.Scope.CLUSTER); - public final static String ALLOWED_URLS_SETTING = "repositories.url.allowed_urls"; + public static final Setting URL_SETTING = new Setting<>("url", "http:", URLRepository::parseURL, false, Setting.Scope.CLUSTER); + public static final Setting REPOSITORIES_URL_SETTING = new Setting<>("repositories.url.url", (s) -> s.get("repositories.uri.url", "http:"), + URLRepository::parseURL, false, Setting.Scope.CLUSTER); - private final String[] supportedProtocols; + public static final Setting LIST_DIRECTORIES_SETTING = Setting.boolSetting("list_directories", true, false, Setting.Scope.CLUSTER); + public static final Setting REPOSITORIES_LIST_DIRECTORIES_SETTING = Setting.boolSetting("repositories.uri.list_directories", true, + false, Setting.Scope.CLUSTER); + + private final List supportedProtocols; private final URIPattern[] urlWhiteList; @@ -79,21 +91,16 @@ public class URLRepository extends BlobStoreRepository { @Inject public URLRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, Environment environment) throws IOException { super(name.getName(), repositorySettings, indexShardRepository); - URL url; - String path = repositorySettings.settings().get("url", settings.get("repositories.url.url", settings.get("repositories.uri.url"))); - if (path == null) { + + if (URL_SETTING.exists(repositorySettings.settings()) == false && REPOSITORIES_URL_SETTING.exists(settings) == false) { throw new RepositoryException(name.name(), "missing url"); - } else { - url = new URL(path); - } - supportedProtocols = settings.getAsArray(SUPPORTED_PROTOCOLS_SETTING, DEFAULT_SUPPORTED_PROTOCOLS); - String[] urlWhiteList = settings.getAsArray(ALLOWED_URLS_SETTING, Strings.EMPTY_ARRAY); - this.urlWhiteList = new URIPattern[urlWhiteList.length]; - for (int i = 0; i < urlWhiteList.length; i++) { - this.urlWhiteList[i] = new URIPattern(urlWhiteList[i]); } + supportedProtocols = SUPPORTED_PROTOCOLS_SETTING.get(settings); + urlWhiteList = ALLOWED_URLS_SETTING.get(settings).toArray(new URIPattern[]{}); this.environment = environment; - listDirectories = repositorySettings.settings().getAsBoolean("list_directories", settings.getAsBoolean("repositories.uri.list_directories", true)); + listDirectories = LIST_DIRECTORIES_SETTING.exists(repositorySettings.settings()) ? LIST_DIRECTORIES_SETTING.get(repositorySettings.settings()) : REPOSITORIES_LIST_DIRECTORIES_SETTING.get(settings); + + URL url = URL_SETTING.exists(repositorySettings.settings()) ? URL_SETTING.get(repositorySettings.settings()) : REPOSITORIES_URL_SETTING.get(settings); URL normalizedURL = checkURL(url); blobStore = new URLBlobStore(settings, normalizedURL); basePath = BlobPath.cleanPath(); @@ -147,8 +154,8 @@ public class URLRepository extends BlobStoreRepository { // We didn't match white list - try to resolve against path.repo URL normalizedUrl = environment.resolveRepoURL(url); if (normalizedUrl == null) { - logger.warn("The specified url [{}] doesn't start with any repository paths specified by the path.repo setting: [{}] or by repositories.url.allowed_urls setting: [{}] ", url, environment.repoFiles()); - throw new RepositoryException(repositoryName, "file url [" + url + "] doesn't match any of the locations specified by path.repo or repositories.url.allowed_urls"); + logger.warn("The specified url [{}] doesn't start with any repository paths specified by the path.repo setting or by {} setting: [{}] ", url, ALLOWED_URLS_SETTING.getKey(), environment.repoFiles()); + throw new RepositoryException(repositoryName, "file url [" + url + "] doesn't match any of the locations specified by path.repo or " + ALLOWED_URLS_SETTING.getKey()); } return normalizedUrl; } @@ -161,4 +168,11 @@ public class URLRepository extends BlobStoreRepository { return true; } + private static URL parseURL(String s) { + try { + return new URL(s); + } catch (MalformedURLException e) { + throw new IllegalArgumentException("Unable to parse URL repository setting", e); + } + } } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index fec96ae236f..eabb954c2c6 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -1,3 +1,4 @@ +/* /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -28,6 +29,7 @@ import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.repositories.uri.URLRepository; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.snapshots.RestoreInfo; @@ -73,7 +75,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { URI repoJarPatternUri = new URI("jar:" + getBwcIndicesPath().toUri().toString() + "*.zip!/repo/"); return settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) - .putArray("repositories.url.allowed_urls", repoJarPatternUri.toString()) + .putArray(URLRepository.ALLOWED_URLS_SETTING.getKey(), repoJarPatternUri.toString()) .build(); } catch (URISyntaxException ex) { throw new IllegalArgumentException(ex); From 30cbbc55b6f588ce19d3ab126c69a7dda42715b9 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jan 2016 16:50:23 +0100 Subject: [PATCH 298/347] remove old todo in DateProcessor --- .../java/org/elasticsearch/ingest/processor/DateProcessor.java | 1 - 1 file changed, 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java index 230871d5081..16b7e52b75d 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java @@ -71,7 +71,6 @@ public final class DateProcessor extends AbstractProcessor { @Override public void execute(IngestDocument ingestDocument) { String value = ingestDocument.getFieldValue(matchField, String.class); - // TODO(talevy): handle custom timestamp fields DateTime dateTime = null; Exception lastException = null; From 720c5313648844c07902d067f01e982429839d41 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jan 2016 16:51:51 +0100 Subject: [PATCH 299/347] [TEST] remove check for jvm true, site plugins have been removed --- .../test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml index 9a82d95ed7f..b522cb77780 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml @@ -3,4 +3,3 @@ cluster.stats: {} - match: { nodes.plugins.0.name: ingest-geoip } - - match: { nodes.plugins.0.jvm: true } From ec31feca93fdfad9c8b4ef7a757b408ecf18d274 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Fri, 22 Jan 2016 16:34:05 +0100 Subject: [PATCH 300/347] Settings: migrate network service to the new infra This commit migrates all the settings under network service to the new settings infra. It also adds some chaining utils to make fall back settings slightly less verbose. Breaking (but I think acceptable) - network.tcp.no_delay and network.tcp.keep_alive used to accept the value `default` which make us not set them at all on netty. Our default was true so we weren't using this feature. I removed it and now we only accept a true boolean. --- .../common/network/NetworkService.java | 94 ++++++++++--------- .../common/settings/ClusterSettings.java | 13 +++ .../common/settings/Setting.java | 76 +++++++++++---- .../http/netty/NettyHttpServerTransport.java | 41 ++++---- .../transport/netty/NettyTransport.java | 56 +++++------ .../common/settings/SettingTests.java | 26 ++++- .../breaker/CircuitBreakerServiceIT.java | 6 +- .../bucket/DateHistogramOffsetIT.java | 2 +- .../transport/AssertingLocalTransport.java | 11 ++- .../test/transport/MockTransportService.java | 5 +- 10 files changed, 195 insertions(+), 135 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java index 835a35d2383..a1286aaec55 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java @@ -19,7 +19,9 @@ package org.elasticsearch.common.network; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -41,31 +43,30 @@ public class NetworkService extends AbstractComponent { /** By default, we bind to loopback interfaces */ public static final String DEFAULT_NETWORK_HOST = "_local_"; - private static final String GLOBAL_NETWORK_HOST_SETTING = "network.host"; - private static final String GLOBAL_NETWORK_BINDHOST_SETTING = "network.bind_host"; - private static final String GLOBAL_NETWORK_PUBLISHHOST_SETTING = "network.publish_host"; + public static final Setting> GLOBAL_NETWORK_HOST_SETTING = Setting.listSetting("network.host", Arrays.asList(DEFAULT_NETWORK_HOST), + s -> s, false, Setting.Scope.CLUSTER); + public static final Setting> GLOBAL_NETWORK_BINDHOST_SETTING = Setting.listSetting("network.bind_host", GLOBAL_NETWORK_HOST_SETTING, + s -> s, false, Setting.Scope.CLUSTER); + public static final Setting> GLOBAL_NETWORK_PUBLISHHOST_SETTING = Setting.listSetting("network.publish_host", GLOBAL_NETWORK_HOST_SETTING, + s -> s, false, Setting.Scope.CLUSTER); public static final class TcpSettings { - public static final String TCP_NO_DELAY = "network.tcp.no_delay"; - public static final String TCP_KEEP_ALIVE = "network.tcp.keep_alive"; - public static final String TCP_REUSE_ADDRESS = "network.tcp.reuse_address"; - public static final String TCP_SEND_BUFFER_SIZE = "network.tcp.send_buffer_size"; - public static final String TCP_RECEIVE_BUFFER_SIZE = "network.tcp.receive_buffer_size"; - public static final String TCP_BLOCKING = "network.tcp.blocking"; - public static final String TCP_BLOCKING_SERVER = "network.tcp.blocking_server"; - public static final String TCP_BLOCKING_CLIENT = "network.tcp.blocking_client"; - public static final String TCP_CONNECT_TIMEOUT = "network.tcp.connect_timeout"; - - public static final ByteSizeValue TCP_DEFAULT_SEND_BUFFER_SIZE = null; - public static final ByteSizeValue TCP_DEFAULT_RECEIVE_BUFFER_SIZE = null; - public static final TimeValue TCP_DEFAULT_CONNECT_TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + public static final Setting TCP_NO_DELAY = Setting.boolSetting("network.tcp.no_delay", true, false, Setting.Scope.CLUSTER); + public static final Setting TCP_KEEP_ALIVE = Setting.boolSetting("network.tcp.keep_alive", true, false, Setting.Scope.CLUSTER); + public static final Setting TCP_REUSE_ADDRESS = Setting.boolSetting("network.tcp.reuse_address", NetworkUtils.defaultReuseAddress(), false, Setting.Scope.CLUSTER); + public static final Setting TCP_SEND_BUFFER_SIZE = Setting.byteSizeSetting("network.tcp.send_buffer_size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER); + public static final Setting TCP_RECEIVE_BUFFER_SIZE = Setting.byteSizeSetting("network.tcp.receive_buffer_size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER); + public static final Setting TCP_BLOCKING = Setting.boolSetting("network.tcp.blocking", false, false, Setting.Scope.CLUSTER); + public static final Setting TCP_BLOCKING_SERVER = Setting.boolSetting("network.tcp.blocking_server", TCP_BLOCKING, false, Setting.Scope.CLUSTER); + public static final Setting TCP_BLOCKING_CLIENT = Setting.boolSetting("network.tcp.blocking_client", TCP_BLOCKING, false, Setting.Scope.CLUSTER); + public static final Setting TCP_CONNECT_TIMEOUT = Setting.timeSetting("network.tcp.connect_timeout", new TimeValue(30, TimeUnit.SECONDS), false, Setting.Scope.CLUSTER); } /** * A custom name resolver can support custom lookup keys (my_net_key:ipv4) and also change * the default inet address used in case no settings is provided. */ - public static interface CustomNameResolver { + public interface CustomNameResolver { /** * Resolves the default value if possible. If not, return null. */ @@ -94,6 +95,7 @@ public class NetworkService extends AbstractComponent { /** * Resolves {@code bindHosts} to a list of internet addresses. The list will * not contain duplicate addresses. + * * @param bindHosts list of hosts to bind to. this may contain special pseudo-hostnames * such as _local_ (see the documentation). if it is null, it will be populated * based on global default settings. @@ -102,21 +104,22 @@ public class NetworkService extends AbstractComponent { public InetAddress[] resolveBindHostAddresses(String bindHosts[]) throws IOException { // first check settings if (bindHosts == null) { - bindHosts = settings.getAsArray(GLOBAL_NETWORK_BINDHOST_SETTING, settings.getAsArray(GLOBAL_NETWORK_HOST_SETTING, null)); - } - // next check any registered custom resolvers - if (bindHosts == null) { - for (CustomNameResolver customNameResolver : customNameResolvers) { - InetAddress addresses[] = customNameResolver.resolveDefault(); - if (addresses != null) { - return addresses; + if (GLOBAL_NETWORK_BINDHOST_SETTING.exists(settings) || GLOBAL_NETWORK_HOST_SETTING.exists(settings)) { + // if we have settings use them (we have a fallback to GLOBAL_NETWORK_HOST_SETTING inline + bindHosts = GLOBAL_NETWORK_BINDHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY); + } else { + // next check any registered custom resolvers + for (CustomNameResolver customNameResolver : customNameResolvers) { + InetAddress addresses[] = customNameResolver.resolveDefault(); + if (addresses != null) { + return addresses; + } } + // we know it's not here. get the defaults + bindHosts = GLOBAL_NETWORK_BINDHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY); } } - // finally, fill with our default - if (bindHosts == null) { - bindHosts = new String[] { DEFAULT_NETWORK_HOST }; - } + InetAddress addresses[] = resolveInetAddresses(bindHosts); // try to deal with some (mis)configuration @@ -138,6 +141,7 @@ public class NetworkService extends AbstractComponent { * only one address is just a current limitation. *

* If {@code publishHosts} resolves to more than one address, then one is selected with magic + * * @param publishHosts list of hosts to publish as. this may contain special pseudo-hostnames * such as _local_ (see the documentation). if it is null, it will be populated * based on global default settings. @@ -145,23 +149,23 @@ public class NetworkService extends AbstractComponent { */ // TODO: needs to be InetAddress[] public InetAddress resolvePublishHostAddresses(String publishHosts[]) throws IOException { - // first check settings if (publishHosts == null) { - publishHosts = settings.getAsArray(GLOBAL_NETWORK_PUBLISHHOST_SETTING, settings.getAsArray(GLOBAL_NETWORK_HOST_SETTING, null)); - } - // next check any registered custom resolvers - if (publishHosts == null) { - for (CustomNameResolver customNameResolver : customNameResolvers) { - InetAddress addresses[] = customNameResolver.resolveDefault(); - if (addresses != null) { - return addresses[0]; + if (GLOBAL_NETWORK_PUBLISHHOST_SETTING.exists(settings) || GLOBAL_NETWORK_HOST_SETTING.exists(settings)) { + // if we have settings use them (we have a fallback to GLOBAL_NETWORK_HOST_SETTING inline + publishHosts = GLOBAL_NETWORK_PUBLISHHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY); + } else { + // next check any registered custom resolvers + for (CustomNameResolver customNameResolver : customNameResolvers) { + InetAddress addresses[] = customNameResolver.resolveDefault(); + if (addresses != null) { + return addresses[0]; + } } + // we know it's not here. get the defaults + publishHosts = GLOBAL_NETWORK_PUBLISHHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY); } } - // finally, fill with our default - if (publishHosts == null) { - publishHosts = new String[] { DEFAULT_NETWORK_HOST }; - } + InetAddress addresses[] = resolveInetAddresses(publishHosts); // TODO: allow publishing multiple addresses // for now... the hack begins @@ -184,17 +188,17 @@ public class NetworkService extends AbstractComponent { throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) + "} is wildcard, but multiple addresses specified: this makes no sense"); } } - + // 3. if we end out with multiple publish addresses, select by preference. // don't warn the user, or they will get confused by bind_host vs publish_host etc. if (addresses.length > 1) { List sorted = new ArrayList<>(Arrays.asList(addresses)); NetworkUtils.sortAddresses(sorted); - addresses = new InetAddress[] { sorted.get(0) }; + addresses = new InetAddress[]{sorted.get(0)}; } return addresses[0]; } - + /** resolves (and deduplicates) host specification */ private InetAddress[] resolveInetAddresses(String hosts[]) throws IOException { if (hosts.length == 0) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 98ca4b234d0..4680146dd88 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; @@ -192,6 +193,18 @@ public final class ClusterSettings extends AbstractScopedSettings { HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, Transport.TRANSPORT_PROFILES_SETTING, Transport.TRANSPORT_TCP_COMPRESS, + NetworkService.GLOBAL_NETWORK_HOST_SETTING, + NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING, + NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING, + NetworkService.TcpSettings.TCP_NO_DELAY, + NetworkService.TcpSettings.TCP_KEEP_ALIVE, + NetworkService.TcpSettings.TCP_REUSE_ADDRESS, + NetworkService.TcpSettings.TCP_SEND_BUFFER_SIZE, + NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE, + NetworkService.TcpSettings.TCP_BLOCKING, + NetworkService.TcpSettings.TCP_BLOCKING_SERVER, + NetworkService.TcpSettings.TCP_BLOCKING_CLIENT, + NetworkService.TcpSettings.TCP_CONNECT_TIMEOUT, IndexSettings.QUERY_STRING_ANALYZE_WILDCARD, IndexSettings.QUERY_STRING_ALLOW_LEADING_WILDCARD, PrimaryShardAllocator.NODE_INITIAL_SHARDS_SETTING, diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index b67ba26c093..22cab996d63 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -41,6 +41,7 @@ import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.regex.Pattern; +import java.util.stream.Collectors; /** * A setting. Encapsulates typical stuff like default value, parsing, and scope. @@ -70,8 +71,21 @@ public class Setting extends ToXContentToBytes { this.scope = scope; } + /** + * Creates a new Setting instance + * @param key the settings key for this setting. + * @param fallBackSetting a setting to fall back to if the current setting is not set. + * @param parser a parser that parses the string rep into a complex datatype. + * @param dynamic true iff this setting can be dynamically updateable + * @param scope the scope of this setting + */ + public Setting(String key, Setting fallBackSetting, Function parser, boolean dynamic, Scope scope) { + this(key, fallBackSetting::getRaw, parser, dynamic, scope); + } + /** * Returns the settings key or a prefix if this setting is a group setting + * * @see #isGroupSetting() */ public final String getKey() { @@ -106,13 +120,21 @@ public class Setting extends ToXContentToBytes { } /** - * Returns the default values string representation for this setting. + * Returns the default value string representation for this setting. * @param settings a settings object for settings that has a default value depending on another setting if available */ - public final String getDefault(Settings settings) { + public final String getDefaultRaw(Settings settings) { return defaultValue.apply(settings); } + /** + * Returns the default value for this setting. + * @param settings a settings object for settings that has a default value depending on another setting if available + */ + public final T getDefault(Settings settings) { + return parser.apply(getDefaultRaw(settings)); + } + /** * Returns true iff this setting is present in the given settings object. Otherwise false */ @@ -337,6 +359,10 @@ public class Setting extends ToXContentToBytes { return new Setting<>(key, (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, dynamic, scope); } + public static Setting boolSetting(String key, Setting fallbackSetting, boolean dynamic, Scope scope) { + return new Setting<>(key, fallbackSetting, Booleans::parseBooleanExact, dynamic, scope); + } + public static Setting byteSizeSetting(String key, String percentage, boolean dynamic, Scope scope) { return new Setting<>(key, (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), dynamic, scope); } @@ -352,25 +378,15 @@ public class Setting extends ToXContentToBytes { public static Setting> listSetting(String key, List defaultStringValue, Function singleValueParser, boolean dynamic, Scope scope) { return listSetting(key, (s) -> defaultStringValue, singleValueParser, dynamic, scope); } + + public static Setting> listSetting(String key, Setting> fallbackSetting, Function singleValueParser, boolean dynamic, Scope scope) { + return listSetting(key, (s) -> parseableStringToList(fallbackSetting.getRaw(s)), singleValueParser, dynamic, scope); + } + public static Setting> listSetting(String key, Function> defaultStringValue, Function singleValueParser, boolean dynamic, Scope scope) { - Function> parser = (s) -> { - try (XContentParser xContentParser = XContentType.JSON.xContent().createParser(s)){ - XContentParser.Token token = xContentParser.nextToken(); - if (token != XContentParser.Token.START_ARRAY) { - throw new IllegalArgumentException("expected START_ARRAY but got " + token); - } - ArrayList list = new ArrayList<>(); - while ((token = xContentParser.nextToken()) !=XContentParser.Token.END_ARRAY) { - if (token != XContentParser.Token.VALUE_STRING) { - throw new IllegalArgumentException("expected VALUE_STRING but got " + token); - } - list.add(singleValueParser.apply(xContentParser.text())); - } - return list; - } catch (IOException e) { - throw new IllegalArgumentException("failed to parse array", e); - } - }; + Function> parser = (s) -> + parseableStringToList(s).stream().map(singleValueParser).collect(Collectors.toList()); + return new Setting>(key, (s) -> arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser, dynamic, scope) { private final Pattern pattern = Pattern.compile(Pattern.quote(key)+"(\\.\\d+)?"); @Override @@ -391,6 +407,26 @@ public class Setting extends ToXContentToBytes { }; } + private static List parseableStringToList(String parsableString) { + try (XContentParser xContentParser = XContentType.JSON.xContent().createParser(parsableString)) { + XContentParser.Token token = xContentParser.nextToken(); + if (token != XContentParser.Token.START_ARRAY) { + throw new IllegalArgumentException("expected START_ARRAY but got " + token); + } + ArrayList list = new ArrayList<>(); + while ((token = xContentParser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token != XContentParser.Token.VALUE_STRING) { + throw new IllegalArgumentException("expected VALUE_STRING but got " + token); + } + list.add(xContentParser.text()); + } + return list; + } catch (IOException e) { + throw new IllegalArgumentException("failed to parse array", e); + } + } + + private static String arrayToParsableString(String[] array) { try { XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java index 5623db45ef8..0cd0cef336c 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java @@ -19,7 +19,6 @@ package org.elasticsearch.http.netty; -import org.elasticsearch.common.Booleans; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; @@ -27,7 +26,6 @@ import org.elasticsearch.common.netty.NettyUtils; import org.elasticsearch.common.netty.OpenChannelsHandler; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; -import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Scope; import org.elasticsearch.common.settings.Settings; @@ -77,9 +75,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING; -import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING_SERVER; -import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_RECEIVE_BUFFER_SIZE; -import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_SEND_BUFFER_SIZE; import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_KEEP_ALIVE; import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_NO_DELAY; import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE; @@ -98,11 +93,11 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent SETTING_CORS_ENABLED = Setting.boolSetting("http.cors.enabled", false, false, Scope.CLUSTER); public static final String SETTING_CORS_ALLOW_ORIGIN = "http.cors.allow-origin"; - public static final Setting SETTING_CORS_MAX_AGE = Setting.intSetting("http.cors.max-age", 1728000, false, Scope.CLUSTER); + public static final Setting SETTING_CORS_MAX_AGE = Setting.intSetting("http.cors.max-age", 1728000, false, Scope.CLUSTER); public static final String SETTING_CORS_ALLOW_METHODS = "http.cors.allow-methods"; public static final String SETTING_CORS_ALLOW_HEADERS = "http.cors.allow-headers"; public static final Setting SETTING_CORS_ALLOW_CREDENTIALS = Setting.boolSetting("http.cors.allow-credentials", false, false, Scope.CLUSTER); - + public static final Setting SETTING_PIPELINING = Setting.boolSetting("http.pipelining", true, false, Scope.CLUSTER); public static final String SETTING_PIPELINING_MAX_EVENTS = "http.pipelining.max_events"; public static final String SETTING_HTTP_COMPRESSION = "http.compression"; @@ -144,8 +139,8 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent 0) { + serverBootstrap.setOption("child.tcpNoDelay", tcpNoDelay); + serverBootstrap.setOption("child.keepAlive", tcpKeepAlive); + if (tcpSendBufferSize.bytes() > 0) { + serverBootstrap.setOption("child.sendBufferSize", tcpSendBufferSize.bytes()); } - if (tcpReceiveBufferSize != null && tcpReceiveBufferSize.bytes() > 0) { + if (tcpReceiveBufferSize.bytes() > 0) { serverBootstrap.setOption("child.receiveBufferSize", tcpReceiveBufferSize.bytes()); } serverBootstrap.setOption("receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory); @@ -310,7 +302,8 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent implem } this.workerCount = settings.getAsInt(WORKER_COUNT, EsExecutors.boundedNumberOfProcessors(settings) * 2); - this.blockingClient = settings.getAsBoolean("transport.netty.transport.tcp.blocking_client", settings.getAsBoolean(TCP_BLOCKING_CLIENT, settings.getAsBoolean(TCP_BLOCKING, false))); - this.connectTimeout = this.settings.getAsTime("transport.netty.connect_timeout", settings.getAsTime("transport.tcp.connect_timeout", settings.getAsTime(TCP_CONNECT_TIMEOUT, TCP_DEFAULT_CONNECT_TIMEOUT))); + this.blockingClient = settings.getAsBoolean("transport.netty.transport.tcp.blocking_client", TCP_BLOCKING_CLIENT.get(settings)); + this.connectTimeout = this.settings.getAsTime("transport.netty.connect_timeout", settings.getAsTime("transport.tcp.connect_timeout", TCP_CONNECT_TIMEOUT.get(settings))); this.maxCumulationBufferCapacity = this.settings.getAsBytesSize("transport.netty.max_cumulation_buffer_capacity", null); this.maxCompositeBufferComponents = this.settings.getAsInt("transport.netty.max_composite_buffer_components", -1); this.compress = Transport.TRANSPORT_TCP_COMPRESS.get(settings); @@ -362,29 +358,25 @@ public class NettyTransport extends AbstractLifecycleComponent implem clientBootstrap.setPipelineFactory(configureClientChannelPipelineFactory()); clientBootstrap.setOption("connectTimeoutMillis", connectTimeout.millis()); - String tcpNoDelay = settings.get("transport.netty.tcp_no_delay", settings.get(TCP_NO_DELAY, "true")); - if (!"default".equals(tcpNoDelay)) { - clientBootstrap.setOption("tcpNoDelay", Booleans.parseBoolean(tcpNoDelay, null)); - } + boolean tcpNoDelay = settings.getAsBoolean("transport.netty.tcp_no_delay", TCP_NO_DELAY.get(settings)); + clientBootstrap.setOption("tcpNoDelay", tcpNoDelay); - String tcpKeepAlive = settings.get("transport.netty.tcp_keep_alive", settings.get(TCP_KEEP_ALIVE, "true")); - if (!"default".equals(tcpKeepAlive)) { - clientBootstrap.setOption("keepAlive", Booleans.parseBoolean(tcpKeepAlive, null)); - } + boolean tcpKeepAlive = settings.getAsBoolean("transport.netty.tcp_keep_alive", TCP_KEEP_ALIVE.get(settings)); + clientBootstrap.setOption("keepAlive", tcpKeepAlive); - ByteSizeValue tcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", settings.getAsBytesSize(TCP_SEND_BUFFER_SIZE, TCP_DEFAULT_SEND_BUFFER_SIZE)); - if (tcpSendBufferSize != null && tcpSendBufferSize.bytes() > 0) { + ByteSizeValue tcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", TCP_SEND_BUFFER_SIZE.get(settings)); + if (tcpSendBufferSize.bytes() > 0) { clientBootstrap.setOption("sendBufferSize", tcpSendBufferSize.bytes()); } - ByteSizeValue tcpReceiveBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", settings.getAsBytesSize(TCP_RECEIVE_BUFFER_SIZE, TCP_DEFAULT_RECEIVE_BUFFER_SIZE)); - if (tcpReceiveBufferSize != null && tcpReceiveBufferSize.bytes() > 0) { + ByteSizeValue tcpReceiveBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", TCP_RECEIVE_BUFFER_SIZE.get(settings)); + if (tcpReceiveBufferSize.bytes() > 0) { clientBootstrap.setOption("receiveBufferSize", tcpReceiveBufferSize.bytes()); } clientBootstrap.setOption("receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory); - boolean reuseAddress = settings.getAsBoolean("transport.netty.reuse_address", settings.getAsBoolean(TCP_REUSE_ADDRESS, NetworkUtils.defaultReuseAddress())); + boolean reuseAddress = settings.getAsBoolean("transport.netty.reuse_address", TCP_REUSE_ADDRESS.get(settings)); clientBootstrap.setOption("reuseAddress", reuseAddress); return clientBootstrap; @@ -403,26 +395,22 @@ public class NettyTransport extends AbstractLifecycleComponent implem fallbackSettingsBuilder.put("publish_host", fallbackPublishHost); } - String fallbackTcpNoDelay = settings.get("transport.netty.tcp_no_delay", settings.get(TCP_NO_DELAY, "true")); - if (fallbackTcpNoDelay != null) { - fallbackSettingsBuilder.put("tcp_no_delay", fallbackTcpNoDelay); - } + boolean fallbackTcpNoDelay = settings.getAsBoolean("transport.netty.tcp_no_delay", TCP_NO_DELAY.get(settings)); + fallbackSettingsBuilder.put("tcp_no_delay", fallbackTcpNoDelay); - String fallbackTcpKeepAlive = settings.get("transport.netty.tcp_keep_alive", settings.get(TCP_KEEP_ALIVE, "true")); - if (fallbackTcpKeepAlive != null) { + boolean fallbackTcpKeepAlive = settings.getAsBoolean("transport.netty.tcp_keep_alive", TCP_KEEP_ALIVE.get(settings)); fallbackSettingsBuilder.put("tcp_keep_alive", fallbackTcpKeepAlive); - } - boolean fallbackReuseAddress = settings.getAsBoolean("transport.netty.reuse_address", settings.getAsBoolean(TCP_REUSE_ADDRESS, NetworkUtils.defaultReuseAddress())); + boolean fallbackReuseAddress = settings.getAsBoolean("transport.netty.reuse_address", TCP_REUSE_ADDRESS.get(settings)); fallbackSettingsBuilder.put("reuse_address", fallbackReuseAddress); - ByteSizeValue fallbackTcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", settings.getAsBytesSize(TCP_SEND_BUFFER_SIZE, TCP_DEFAULT_SEND_BUFFER_SIZE)); - if (fallbackTcpSendBufferSize != null) { + ByteSizeValue fallbackTcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", TCP_SEND_BUFFER_SIZE.get(settings)); + if (fallbackTcpSendBufferSize.bytes() >= 0) { fallbackSettingsBuilder.put("tcp_send_buffer_size", fallbackTcpSendBufferSize); } - ByteSizeValue fallbackTcpBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", settings.getAsBytesSize(TCP_RECEIVE_BUFFER_SIZE, TCP_DEFAULT_RECEIVE_BUFFER_SIZE)); - if (fallbackTcpBufferSize != null) { + ByteSizeValue fallbackTcpBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", TCP_RECEIVE_BUFFER_SIZE.get(settings)); + if (fallbackTcpBufferSize.bytes() >= 0) { fallbackSettingsBuilder.put("tcp_receive_buffer_size", fallbackTcpBufferSize); } @@ -552,15 +540,15 @@ public class NettyTransport extends AbstractLifecycleComponent implem } private void createServerBootstrap(String name, Settings settings) { - boolean blockingServer = settings.getAsBoolean("transport.tcp.blocking_server", this.settings.getAsBoolean(TCP_BLOCKING_SERVER, this.settings.getAsBoolean(TCP_BLOCKING, false))); + boolean blockingServer = settings.getAsBoolean("transport.tcp.blocking_server", TCP_BLOCKING_SERVER.get(settings)); String port = settings.get("port"); String bindHost = settings.get("bind_host"); String publishHost = settings.get("publish_host"); String tcpNoDelay = settings.get("tcp_no_delay"); String tcpKeepAlive = settings.get("tcp_keep_alive"); boolean reuseAddress = settings.getAsBoolean("reuse_address", NetworkUtils.defaultReuseAddress()); - ByteSizeValue tcpSendBufferSize = settings.getAsBytesSize("tcp_send_buffer_size", TCP_DEFAULT_SEND_BUFFER_SIZE); - ByteSizeValue tcpReceiveBufferSize = settings.getAsBytesSize("tcp_receive_buffer_size", TCP_DEFAULT_RECEIVE_BUFFER_SIZE); + ByteSizeValue tcpSendBufferSize = settings.getAsBytesSize("tcp_send_buffer_size", TCP_SEND_BUFFER_SIZE.getDefault(settings)); + ByteSizeValue tcpReceiveBufferSize = settings.getAsBytesSize("tcp_receive_buffer_size", TCP_RECEIVE_BUFFER_SIZE.getDefault(settings)); logger.debug("using profile[{}], worker_count[{}], port[{}], bind_host[{}], publish_host[{}], compress[{}], connect_timeout[{}], connections_per_node[{}/{}/{}/{}/{}], receive_predictor[{}->{}]", name, workerCount, port, bindHost, publishHost, compress, connectTimeout, connectionsPerNodeRecovery, connectionsPerNodeBulk, connectionsPerNodeReg, connectionsPerNodeState, connectionsPerNodePing, receivePredictorMin, receivePredictorMax); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index cccfa373200..6f189bd5e19 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -104,13 +104,17 @@ public class SettingTests extends ESTestCase { TimeValue defautlValue = TimeValue.timeValueMillis(randomIntBetween(0, 1000000)); Setting setting = Setting.positiveTimeSetting("my.time.value", defautlValue, randomBoolean(), Setting.Scope.CLUSTER); assertFalse(setting.isGroupSetting()); - String aDefault = setting.getDefault(Settings.EMPTY); + String aDefault = setting.getDefaultRaw(Settings.EMPTY); assertEquals(defautlValue.millis() + "ms", aDefault); assertEquals(defautlValue.millis(), setting.get(Settings.EMPTY).millis()); + assertEquals(defautlValue, setting.getDefault(Settings.EMPTY)); Setting secondaryDefault = new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), (s) -> s, randomBoolean(), Setting.Scope.CLUSTER); assertEquals("some_default", secondaryDefault.get(Settings.EMPTY)); assertEquals("42", secondaryDefault.get(Settings.builder().put("old.foo.bar", 42).build())); + Setting secondaryDefaultViaSettings = new Setting<>("foo.bar", secondaryDefault, (s) -> s, randomBoolean(), Setting.Scope.CLUSTER); + assertEquals("some_default", secondaryDefaultViaSettings.get(Settings.EMPTY)); + assertEquals("42", secondaryDefaultViaSettings.get(Settings.builder().put("old.foo.bar", 42).build())); } public void testComplexType() { @@ -298,6 +302,26 @@ public class SettingTests extends ESTestCase { for (int i = 0; i < intValues.size(); i++) { assertEquals(i, intValues.get(i).intValue()); } + + Setting> settingWithFallback = Setting.listSetting("foo.baz", listSetting, s -> s, true, Setting.Scope.CLUSTER); + value = settingWithFallback.get(Settings.EMPTY); + assertEquals(1, value.size()); + assertEquals("foo,bar", value.get(0)); + + value = settingWithFallback.get(Settings.builder().putArray("foo.bar", "1", "2").build()); + assertEquals(2, value.size()); + assertEquals("1", value.get(0)); + assertEquals("2", value.get(1)); + + value = settingWithFallback.get(Settings.builder().putArray("foo.baz", "3", "4").build()); + assertEquals(2, value.size()); + assertEquals("3", value.get(0)); + assertEquals("4", value.get(1)); + + value = settingWithFallback.get(Settings.builder().putArray("foo.baz", "3", "4").putArray("foo.bar", "1", "2").build()); + assertEquals(2, value.size()); + assertEquals("3", value.get(0)); + assertEquals("4", value.get(1)); } public void testListSettingAcceptsNumberSyntax() { diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java index 1af04e295dd..6cdd4cf348a 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -64,11 +64,11 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { logger.info("--> resetting breaker settings"); Settings resetSettings = settingsBuilder() .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), - HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getDefault(null)) + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getDefaultRaw(null)) .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), - HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getDefault(null)) + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getDefaultRaw(null)) .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), - HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getDefault(null)) + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getDefaultRaw(null)) .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.0) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(resetSettings)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index bb22361ebde..cbd9a250ebd 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -61,7 +61,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY, Version.V_1_4_0_Beta1).build(); + .put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY.getKey(), Version.V_1_4_0_Beta1).build(); } @Before diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java index 98996932dc7..d66acb7ff06 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -60,8 +61,10 @@ public class AssertingLocalTransport extends LocalTransport { } } - public static final String ASSERTING_TRANSPORT_MIN_VERSION_KEY = "transport.asserting.version.min"; - public static final String ASSERTING_TRANSPORT_MAX_VERSION_KEY = "transport.asserting.version.max"; + public static final Setting ASSERTING_TRANSPORT_MIN_VERSION_KEY = new Setting<>("transport.asserting.version.min", + Version.CURRENT.minimumCompatibilityVersion().toString(), Version::fromString, false, Setting.Scope.CLUSTER); + public static final Setting ASSERTING_TRANSPORT_MAX_VERSION_KEY = new Setting<>("transport.asserting.version.max", + Version.CURRENT.toString(), Version::fromString, false, Setting.Scope.CLUSTER); private final Random random; private final Version minVersion; private final Version maxVersion; @@ -71,8 +74,8 @@ public class AssertingLocalTransport extends LocalTransport { super(settings, threadPool, version, namedWriteableRegistry); final long seed = ESIntegTestCase.INDEX_TEST_SEED_SETTING.get(settings); random = new Random(seed); - minVersion = settings.getAsVersion(ASSERTING_TRANSPORT_MIN_VERSION_KEY, Version.V_0_18_0); - maxVersion = settings.getAsVersion(ASSERTING_TRANSPORT_MAX_VERSION_KEY, Version.CURRENT); + minVersion = ASSERTING_TRANSPORT_MIN_VERSION_KEY.get(settings); + maxVersion = ASSERTING_TRANSPORT_MAX_VERSION_KEY.get(settings); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 0a8869b20cf..985c8a86838 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -34,7 +34,6 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.RequestHandlerRegistry; @@ -283,7 +282,7 @@ public class MockTransportService extends TransportService { } // TODO: Replace with proper setting - TimeValue connectingTimeout = NetworkService.TcpSettings.TCP_DEFAULT_CONNECT_TIMEOUT; + TimeValue connectingTimeout = NetworkService.TcpSettings.TCP_CONNECT_TIMEOUT.getDefault(Settings.EMPTY); try { if (delay.millis() < connectingTimeout.millis()) { Thread.sleep(delay.millis()); @@ -306,7 +305,7 @@ public class MockTransportService extends TransportService { } // TODO: Replace with proper setting - TimeValue connectingTimeout = NetworkService.TcpSettings.TCP_DEFAULT_CONNECT_TIMEOUT; + TimeValue connectingTimeout = NetworkService.TcpSettings.TCP_CONNECT_TIMEOUT.getDefault(Settings.EMPTY); try { if (delay.millis() < connectingTimeout.millis()) { Thread.sleep(delay.millis()); From dc0fb29ad9e04d72b6cea225f78af6e6e7a0c7ef Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Fri, 22 Jan 2016 17:51:17 +0100 Subject: [PATCH 301/347] Fix check for required settings in FsRepository --- .../org/elasticsearch/repositories/fs/FsRepository.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java index d591f396400..36cf55ed3f9 100644 --- a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java @@ -52,7 +52,7 @@ public class FsRepository extends BlobStoreRepository { public final static String TYPE = "fs"; public static final Setting LOCATION_SETTING = new Setting<>("location", "", Function.identity(), false, Setting.Scope.CLUSTER); - public static final Setting REPOSITORIES_LOCATION_SETTING = new Setting<>("repositories.fs.location", "", Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting REPOSITORIES_LOCATION_SETTING = new Setting<>("repositories.fs.location", LOCATION_SETTING, Function.identity(), false, Setting.Scope.CLUSTER); public static final Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", "-1", false, Setting.Scope.CLUSTER); public static final Setting REPOSITORIES_CHUNK_SIZE_SETTING = Setting.byteSizeSetting("repositories.fs.chunk_size", "-1", false, Setting.Scope.CLUSTER); public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, false, Setting.Scope.CLUSTER); @@ -77,8 +77,8 @@ public class FsRepository extends BlobStoreRepository { public FsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, Environment environment) throws IOException { super(name.getName(), repositorySettings, indexShardRepository); Path locationFile; - String location = LOCATION_SETTING.exists(repositorySettings.settings()) ? LOCATION_SETTING.get(repositorySettings.settings()) : REPOSITORIES_LOCATION_SETTING.get(settings); - if (location == null) { + String location = REPOSITORIES_LOCATION_SETTING.get(settings); + if (location.isEmpty()) { logger.warn("the repository location is missing, it should point to a shared file system location that is available on all master and data nodes"); throw new RepositoryException(name.name(), "missing location"); } From 30bfde9a2de3d1be35449eea722e2407d900d033 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Fri, 22 Jan 2016 18:01:27 +0100 Subject: [PATCH 302/347] Another fix of check for required settings in FsRepository This time with the right setting --- .../java/org/elasticsearch/repositories/fs/FsRepository.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java index 36cf55ed3f9..0aa62225479 100644 --- a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java @@ -77,7 +77,7 @@ public class FsRepository extends BlobStoreRepository { public FsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, Environment environment) throws IOException { super(name.getName(), repositorySettings, indexShardRepository); Path locationFile; - String location = REPOSITORIES_LOCATION_SETTING.get(settings); + String location = REPOSITORIES_LOCATION_SETTING.get(repositorySettings.settings()); if (location.isEmpty()) { logger.warn("the repository location is missing, it should point to a shared file system location that is available on all master and data nodes"); throw new RepositoryException(name.name(), "missing location"); From b349746bbe6767b527d9410f3d6faa0313b377f4 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 22 Jan 2016 22:54:49 -0800 Subject: [PATCH 303/347] Fix repro line for tests to use language tag See https://issues.apache.org/jira/browse/LUCENE-6978 Also fixed date histogram test to use its version constant as a string setting. --- .../search/aggregations/bucket/DateHistogramOffsetIT.java | 2 +- .../test/junit/listeners/ReproduceInfoPrinter.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index cbd9a250ebd..75c0fc25e6d 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -61,7 +61,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY.getKey(), Version.V_1_4_0_Beta1).build(); + .put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY.getKey(), Version.V_1_4_0_Beta1.toString()).build(); } @Before diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index 1c9bddb6b45..2347fc47672 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -147,7 +147,7 @@ public class ReproduceInfoPrinter extends RunListener { if (System.getProperty("tests.jvm.argline") != null && !System.getProperty("tests.jvm.argline").isEmpty()) { appendOpt("tests.jvm.argline", "\"" + System.getProperty("tests.jvm.argline") + "\""); } - appendOpt("tests.locale", Locale.getDefault().toString()); + appendOpt("tests.locale", Locale.getDefault().toLanguageTag()); appendOpt("tests.timezone", TimeZone.getDefault().getID()); return this; } From 2de4a7a666690ececb80f8540fea6bb61d15d13b Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 24 Jan 2016 09:52:11 -0500 Subject: [PATCH 304/347] Log shard ID in ShardStateAction This commit amends the logging statement in two places in ShardStateAction to log the full shard ID instead of just the numerical shard ID (without the index name). --- .../elasticsearch/cluster/action/shard/ShardStateAction.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 3a837d81a7e..170d6fa0899 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -89,7 +89,7 @@ public class ShardStateAction extends AbstractComponent { logger.warn("{} no master known for action [{}] for shard [{}]", shardRoutingEntry.getShardRouting().shardId(), actionName, shardRoutingEntry.getShardRouting()); waitForNewMasterAndRetry(actionName, observer, shardRoutingEntry, listener); } else { - logger.debug("{} sending [{}] to [{}] for shard [{}]", shardRoutingEntry.getShardRouting().getId(), actionName, masterNode.getId(), shardRoutingEntry); + logger.debug("{} sending [{}] to [{}] for shard [{}]", shardRoutingEntry.getShardRouting().shardId(), actionName, masterNode.getId(), shardRoutingEntry); transportService.sendRequest(masterNode, actionName, shardRoutingEntry, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { @Override @@ -144,7 +144,7 @@ public class ShardStateAction extends AbstractComponent { @Override public void onClusterServiceClose() { - logger.warn("{} node closed while execution action [{}] for shard [{}]", shardRoutingEntry.failure, shardRoutingEntry.getShardRouting().getId(), actionName, shardRoutingEntry.getShardRouting()); + logger.warn("{} node closed while execution action [{}] for shard [{}]", shardRoutingEntry.failure, shardRoutingEntry.getShardRouting().shardId(), actionName, shardRoutingEntry.getShardRouting()); listener.onFailure(new NodeClosedException(clusterService.localNode())); } From 9b5739c43dab4a84a29b5920b7871b3902120a32 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Sun, 24 Jan 2016 19:52:18 -0800 Subject: [PATCH 305/347] docs: add docs for on_failure support in ingest pipelines --- docs/plugins/ingest.asciidoc | 97 ++++++++++++++++++++++++++++++++++++ 1 file changed, 97 insertions(+) diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 906479412e7..72336086b23 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -578,6 +578,103 @@ to depends on the field in the source with name `geoip.country_iso_code`. } -------------------------------------------------- +==== Handling Failure in Pipelines + +In its simplest case, pipelines describe a list of processors which +are executed sequentially and processing halts at the first exception. This +may not be desirable when failures are expected. For example, not all your logs +may match a certain grok expression and you may wish to index such documents into +a separate index. + +To enable this behavior, you can utilize the `on_failure` parameter. `on_failure` +defines a list of processors to be executed immediately following the failed processor. +This parameter can be supplied at the pipeline level, as well as at the processor +level. If a processor has an `on_failure` configuration option provided, whether +it is empty or not, any exceptions that are thrown by it will be caught and the +pipeline will continue executing the proceeding processors defined. Since further processors +are defined within the scope of an `on_failure` statement, failure handling can be nested. + +Example: In the following example we define a pipeline that hopes to rename documents with +a field named `foo` to `bar`. If the document does not contain the `foo` field, we +go ahead and attach an error message within the document for later analysis within +Elasticsearch. + +[source,js] +-------------------------------------------------- +{ + "description" : "my first pipeline with handled exceptions", + "processors" : [ + { + "rename" : { + "field" : "foo", + "to" : "bar", + "on_failure" : [ + { + "set" : { + "field" : "error", + "value" : "field \"foo\" does not exist, cannot rename to \"bar\"" + } + } + ] + } + } + ] +} +-------------------------------------------------- + +Example: Here we define an `on_failure` block on a whole pipeline to change +the index for which failed documents get sent. + +[source,js] +-------------------------------------------------- +{ + "description" : "my first pipeline with handled exceptions", + "processors" : [ ... ], + "on_failure" : [ + { + "set" : { + "field" : "_index", + "value" : "failed-{{ _index }}" + } + } + ] +} +-------------------------------------------------- + + +===== Accessing Error Metadata From Processors Handling Exceptions + +Sometimes you may want to retrieve the actual error message that was thrown +by a failed processor. To do so you can access metadata fields called +`on_failure_message` and `on_failure_processor`. These fields are only accessible +from within the context of an `on_failure` block. Here is an updated version of +our first example which leverages these fields to provide the error message instead +of manually setting it. + +[source,js] +-------------------------------------------------- +{ + "description" : "my first pipeline with handled exceptions", + "processors" : [ + { + "rename" : { + "field" : "foo", + "to" : "bar", + "on_failure" : [ + { + "set" : { + "field" : "error", + "value" : "{{ _ingest.on_failure_message }}" + } + } + ] + } + } + ] +} +-------------------------------------------------- + + === Ingest APIs ==== Put pipeline API From 3a0839de9a495138a854dd3f7090c039a18942db Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Mon, 25 Jan 2016 07:58:10 +0100 Subject: [PATCH 306/347] Scripting: Allow to get size of array in mustache This adds support for returning the size of an array or an collection, in addition to access fields via `array.0` you can specify `array.size` to get its size. --- .../CustomReflectionObjectHandler.java | 8 +++-- .../script/mustache/MustacheTests.java | 31 ++++++++++++++++--- 2 files changed, 32 insertions(+), 7 deletions(-) diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java index 30983395d93..45d3d8c182d 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java @@ -61,7 +61,9 @@ final class CustomReflectionObjectHandler extends ReflectionObjectHandler { @Override public Object get(Object key) { - if (key instanceof Number) { + if ("size".equals(key)) { + return size(); + } else if (key instanceof Number) { return Array.get(array, ((Number) key).intValue()); } try { @@ -117,7 +119,9 @@ final class CustomReflectionObjectHandler extends ReflectionObjectHandler { @Override public Object get(Object key) { - if (key instanceof Number) { + if ("size".equals(key)) { + return col.size(); + } else if (key instanceof Number) { return Iterables.get(col, ((Number) key).intValue()); } try { diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java index 9c560210e2c..1bbae2b36b7 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java @@ -27,24 +27,25 @@ import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; - import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; -import java.util.HashSet; import static java.util.Collections.singleton; import static java.util.Collections.singletonMap; import static org.elasticsearch.script.mustache.MustacheScriptEngineService.CONTENT_TYPE_PARAM; import static org.elasticsearch.script.mustache.MustacheScriptEngineService.JSON_CONTENT_TYPE; import static org.elasticsearch.script.mustache.MustacheScriptEngineService.PLAIN_TEXT_CONTENT_TYPE; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; public class MustacheTests extends ESTestCase { @@ -156,4 +157,24 @@ public class MustacheTests extends ESTestCase { assertThat(result, equalTo("{ \"field1\": \"a \"value\"\"}")); } + public void testSizeAccessForCollectionsAndArrays() throws Exception { + String[] randomArrayValues = generateRandomStringArray(10, 20, false); + List randomList = Arrays.asList(generateRandomStringArray(10, 20, false)); + + String template = "{{data.array.size}} {{data.list.size}}"; + CompiledScript mustache = new CompiledScript(ScriptService.ScriptType.INLINE, "inline", "mustache", engine.compile(template, Collections.emptyMap())); + Map data = new HashMap<>(); + data.put("array", randomArrayValues); + data.put("list", randomList); + Map vars = new HashMap<>(); + vars.put("data", data); + + Object output = engine.executable(mustache, vars).run(); + assertThat(output, notNullValue()); + assertThat(output, instanceOf(BytesReference.class)); + + BytesReference bytes = (BytesReference) output; + String expectedString = String.format(Locale.ROOT, "%s %s", randomArrayValues.length, randomList.size()); + assertThat(bytes.toUtf8(), equalTo(expectedString)); + } } From b24dde88de8ea4fd7d62e4518e36c99a84abb2e7 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Mon, 25 Jan 2016 08:25:31 +0100 Subject: [PATCH 307/347] Merge branch 'malpani-aws-discovery-seoul' # Please enter a commit message to explain why this merge is necessary, # especially if it merges an updated upstream into a topic branch. # # Lines starting with '#' will be ignored, and an empty message aborts # the commit. --- .../java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java | 2 ++ .../java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java | 2 ++ 2 files changed, 4 insertions(+) diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java index 48309457d73..51dfd55b7b0 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java @@ -181,6 +181,8 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent endpoint = "ec2.ap-southeast-2.amazonaws.com"; } else if (region.equals("ap-northeast") || region.equals("ap-northeast-1")) { endpoint = "ec2.ap-northeast-1.amazonaws.com"; + } else if (region.equals("ap-northeast-2")) { + endpoint = "ec2.ap-northeast-2.amazonaws.com"; } else if (region.equals("eu-west") || region.equals("eu-west-1")) { endpoint = "ec2.eu-west-1.amazonaws.com"; } else if (region.equals("eu-central") || region.equals("eu-central-1")) { diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java index 90b79fd3b4d..a897cf6f899 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java @@ -200,6 +200,8 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent Date: Mon, 25 Jan 2016 08:46:51 +0100 Subject: [PATCH 308/347] Add seoul endpoints for EC2 discovery and S3 snapshots Add documentation for #16167 --- docs/plugins/discovery-ec2.asciidoc | 1 + docs/plugins/repository-s3.asciidoc | 1 + 2 files changed, 2 insertions(+) diff --git a/docs/plugins/discovery-ec2.asciidoc b/docs/plugins/discovery-ec2.asciidoc index 567c80a3832..27d6d08a4ec 100644 --- a/docs/plugins/discovery-ec2.asciidoc +++ b/docs/plugins/discovery-ec2.asciidoc @@ -113,6 +113,7 @@ The available values are: * `ap-southeast-1` * `ap-southeast-2` * `ap-northeast` (`ap-northeast-1`) +* `ap-northeast-2` (`ap-northeast-2`) * `eu-west` (`eu-west-1`) * `eu-central` (`eu-central-1`) * `sa-east` (`sa-east-1`) diff --git a/docs/plugins/repository-s3.asciidoc b/docs/plugins/repository-s3.asciidoc index faaa87302ee..7c89b0e29e6 100644 --- a/docs/plugins/repository-s3.asciidoc +++ b/docs/plugins/repository-s3.asciidoc @@ -116,6 +116,7 @@ The available values are: * `ap-southeast-1` * `ap-southeast-2` * `ap-northeast` (`ap-northeast-1`) +* `ap-northeast-2` (`ap-northeast-2`) * `eu-west` (`eu-west-1`) * `eu-central` (`eu-central-1`) * `sa-east` (`sa-east-1`) From 4c1e93bd89cfbf97c25e84e0cdcf9818e9cd97f6 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 25 Jan 2016 05:39:51 -0500 Subject: [PATCH 309/347] Reproducible randoms in SearchWhileCreatingIndexIT This commit removes non-reproducible randomness from SearchWhileCreatingIndexIT. The cause of the non-reproducible randomness is the use of a random draws for the shard preference inside of a non-deterministic while loop. Because the inner while loop executed a non-deterministic number of times, the draws for the next iteration of the outer loop would be impacted by this making the random draws non-reproducible. The solution is to move the random draws outside of the while loop (just make a single draw for the prefernce and increment with a counter), and remove the outer loop iteration instead using an annotation to get the desired repetitions. Closes #16208 --- .../basic/SearchWhileCreatingIndexIT.java | 70 ++++++++++--------- 1 file changed, 37 insertions(+), 33 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java index 28874d2e2a4..35dbde26aba 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java @@ -29,7 +29,6 @@ import org.elasticsearch.test.ESIntegTestCase; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.greaterThanOrEqualTo; - /** * This test basically verifies that search with a single shard active (cause we indexed to it) and other * shards possibly not active at all (cause they haven't allocated) will still work. @@ -58,39 +57,44 @@ public class SearchWhileCreatingIndexIT extends ESIntegTestCase { int shardsNo = numberOfReplicas + 1; int neededNodes = shardsNo <= 2 ? 1 : shardsNo / 2 + 1; internalCluster().ensureAtLeastNumDataNodes(randomIntBetween(neededNodes, shardsNo)); - for (int i = 0; i < 20; i++) { - logger.info("running iteration {}", i); - if (createIndex) { - createIndex("test"); - } - client().prepareIndex("test", "type1", randomAsciiOfLength(5)).setSource("field", "test").execute().actionGet(); - RefreshResponse refreshResponse = client().admin().indices().prepareRefresh("test").execute().actionGet(); - assertThat(refreshResponse.getSuccessfulShards(), greaterThanOrEqualTo(1)); // at least one shard should be successful when refreshing - // we want to make sure that while recovery happens, and a replica gets recovered, its properly refreshed - ClusterHealthStatus status = ClusterHealthStatus.RED; - while (status != ClusterHealthStatus.GREEN) { - // first, verify that search on the primary search works - SearchResponse searchResponse = client().prepareSearch("test").setPreference("_primary").setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet(); - assertHitCount(searchResponse, 1); - // now, let it go to primary or replica, though in a randomized re-creatable manner - String preference = randomAsciiOfLength(5); - Client client = client(); - searchResponse = client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet(); - if (searchResponse.getHits().getTotalHits() != 1) { - refresh(); - SearchResponse searchResponseAfterRefresh = client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet(); - logger.info("hits count mismatch on any shard search failed, post explicit refresh hits are {}", searchResponseAfterRefresh.getHits().getTotalHits()); - ensureGreen(); - SearchResponse searchResponseAfterGreen = client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet(); - logger.info("hits count mismatch on any shard search failed, post explicit wait for green hits are {}", searchResponseAfterGreen.getHits().getTotalHits()); - assertHitCount(searchResponse, 1); - } - assertHitCount(searchResponse, 1); - status = client().admin().cluster().prepareHealth("test").get().getStatus(); - internalCluster().ensureAtLeastNumDataNodes(numberOfReplicas + 1); - } - cluster().wipeIndices("test"); + String id = randomAsciiOfLength(5); + // we will go the primary or the replica, but in a + // randomized re-creatable manner + int counter = 0; + String preference = randomAsciiOfLength(5); + + logger.info("running iteration for id {}, preference {}", id, preference); + + if (createIndex) { + createIndex("test"); } + client().prepareIndex("test", "type1", id).setSource("field", "test").execute().actionGet(); + RefreshResponse refreshResponse = client().admin().indices().prepareRefresh("test").execute().actionGet(); + assertThat(refreshResponse.getSuccessfulShards(), greaterThanOrEqualTo(1)); // at least one shard should be successful when refreshing + + logger.info("using preference {}", preference); + // we want to make sure that while recovery happens, and a replica gets recovered, its properly refreshed + ClusterHealthStatus status = ClusterHealthStatus.RED; + while (status != ClusterHealthStatus.GREEN) { + // first, verify that search on the primary search works + SearchResponse searchResponse = client().prepareSearch("test").setPreference("_primary").setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet(); + assertHitCount(searchResponse, 1); + Client client = client(); + searchResponse = client.prepareSearch("test").setPreference(preference + Integer.toString(counter++)).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet(); + if (searchResponse.getHits().getTotalHits() != 1) { + refresh(); + SearchResponse searchResponseAfterRefresh = client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet(); + logger.info("hits count mismatch on any shard search failed, post explicit refresh hits are {}", searchResponseAfterRefresh.getHits().getTotalHits()); + ensureGreen(); + SearchResponse searchResponseAfterGreen = client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet(); + logger.info("hits count mismatch on any shard search failed, post explicit wait for green hits are {}", searchResponseAfterGreen.getHits().getTotalHits()); + assertHitCount(searchResponse, 1); + } + assertHitCount(searchResponse, 1); + status = client().admin().cluster().prepareHealth("test").get().getStatus(); + internalCluster().ensureAtLeastNumDataNodes(numberOfReplicas + 1); + } + cluster().wipeIndices("test"); } } From b99e9cd2b0561132dc1b496c9e61a65c64761907 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 25 Jan 2016 15:05:00 +0100 Subject: [PATCH 310/347] Use Setting class to register node.ingest setting Boolean parsing is now strict. Also added isIngestNode methods to DiscoveryNode to align this setting with the existing node.data node.master and node.client. Removed NodeModule#isIngestEnabled methods that are not needed anymore. --- .../ingest/IngestProxyActionFilter.java | 9 +-- .../client/transport/TransportClient.java | 14 ++-- .../cluster/node/DiscoveryNode.java | 13 +++ .../cluster/node/DiscoveryNodes.java | 3 +- .../common/settings/ClusterSettings.java | 4 +- .../java/org/elasticsearch/node/Node.java | 6 +- .../org/elasticsearch/node/NodeModule.java | 13 --- .../ingest/IngestProxyActionFilterTests.java | 3 +- .../elasticsearch/node/NodeModuleTests.java | 80 ------------------- 9 files changed, 29 insertions(+), 116 deletions(-) delete mode 100644 core/src/test/java/org/elasticsearch/node/NodeModuleTests.java diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java index fef7a37bd69..39a4b1fa4e8 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java @@ -35,17 +35,10 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.node.NodeModule; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportResponse; -import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; import java.util.concurrent.atomic.AtomicInteger; public final class IngestProxyActionFilter implements ActionFilter { @@ -110,7 +103,7 @@ public final class IngestProxyActionFilter implements ActionFilter { } private DiscoveryNode randomIngestNode() { - assert NodeModule.isNodeIngestEnabled(clusterService.localNode().attributes()) == false; + assert clusterService.localNode().isIngestNode() == false; DiscoveryNodes nodes = clusterService.state().getNodes(); DiscoveryNode[] ingestNodes = nodes.getIngestNodes().values().toArray(DiscoveryNode.class); if (ingestNodes.length == 0) { diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 0d677490fd3..9930a9d1539 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -19,10 +19,6 @@ package org.elasticsearch.client.transport; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; - import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; @@ -49,7 +45,6 @@ import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.indices.breaker.CircuitBreakerModule; import org.elasticsearch.monitor.MonitorService; -import org.elasticsearch.node.NodeModule; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsModule; @@ -60,6 +55,10 @@ import org.elasticsearch.threadpool.ThreadPoolModule; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.netty.NettyTransport; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + import static org.elasticsearch.common.settings.Settings.settingsBuilder; /** @@ -117,7 +116,7 @@ public class TransportClient extends AbstractClient { .put("node.client", true) .put(CLIENT_TYPE_SETTING, CLIENT_TYPE); return new PluginsService(settingsBuilder.build(), null, null, pluginClasses); - }; + } /** * Builds a new instance of the transport client. @@ -151,8 +150,7 @@ public class TransportClient extends AbstractClient { // noop } }); - boolean ingestEnabled = NodeModule.isNodeIngestEnabled(settings); - modules.add(new ActionModule(ingestEnabled, true)); + modules.add(new ActionModule(false, true)); modules.add(new CircuitBreakerModule(settings)); pluginsService.processModules(modules); diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index 7dce2172879..e05bab6d4a4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.transport.TransportAddressSerializers; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.node.Node; import java.io.IOException; import java.util.Collections; @@ -87,6 +88,10 @@ public class DiscoveryNode implements Streamable, ToXContent { return Booleans.isExplicitTrue(data); } + public static boolean ingestNode(Settings settings) { + return Node.NODE_INGEST_SETTING.get(settings); + } + public static final List EMPTY_LIST = Collections.emptyList(); private String nodeName = ""; @@ -316,6 +321,14 @@ public class DiscoveryNode implements Streamable, ToXContent { return masterNode(); } + /** + * Returns a boolean that tells whether this an ingest node or not + */ + public boolean isIngestNode() { + String ingest = attributes.get("ingest"); + return ingest == null ? true : Booleans.parseBooleanExact(ingest); + } + public Version version() { return this.version; } diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index 58f7244c5c1..e24c25dacbb 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.node.NodeModule; import java.io.IOException; import java.util.ArrayList; @@ -678,7 +677,7 @@ public class DiscoveryNodes extends AbstractDiffable implements masterNodesBuilder.put(nodeEntry.key, nodeEntry.value); minNonClientNodeVersion = Version.smallest(minNonClientNodeVersion, nodeEntry.value.version()); } - if (NodeModule.isNodeIngestEnabled(nodeEntry.value.getAttributes())) { + if (nodeEntry.value.isIngestNode()) { ingestNodesBuilder.put(nodeEntry.key, nodeEntry.value); } minNodeVersion = Version.smallest(minNodeVersion, nodeEntry.value.version()); diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 1e764dce42c..b260cd6c791 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -44,6 +44,7 @@ import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.ttl.IndicesTTLService; +import org.elasticsearch.node.Node; import org.elasticsearch.search.SearchService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; @@ -158,5 +159,6 @@ public final class ClusterSettings extends AbstractScopedSettings { Transport.TRANSPORT_TCP_COMPRESS, IndexSettings.QUERY_STRING_ANALYZE_WILDCARD, IndexSettings.QUERY_STRING_ALLOW_LEADING_WILDCARD, - PrimaryShardAllocator.NODE_INITIAL_SHARDS_SETTING))); + PrimaryShardAllocator.NODE_INITIAL_SHARDS_SETTING, + Node.NODE_INGEST_SETTING))); } diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 7ca0f5e76a7..1db76266251 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.ClusterNameModule; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.common.StopWatch; import org.elasticsearch.common.component.Lifecycle; @@ -46,6 +47,7 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; @@ -119,6 +121,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; */ public class Node implements Releasable { + public static final Setting NODE_INGEST_SETTING = Setting.boolSetting("node.ingest", true, false, Setting.Scope.CLUSTER); private static final String CLIENT_TYPE = "node"; public static final String HTTP_ENABLED = "http.enabled"; private final Lifecycle lifecycle = new Lifecycle(); @@ -190,8 +193,7 @@ public class Node implements Releasable { modules.add(new ClusterModule(this.settings)); modules.add(new IndicesModule()); modules.add(new SearchModule(settings, namedWriteableRegistry)); - boolean ingestEnabled = NodeModule.isNodeIngestEnabled(settings); - modules.add(new ActionModule(ingestEnabled, false)); + modules.add(new ActionModule(DiscoveryNode.ingestNode(settings), false)); modules.add(new GatewayModule(settings)); modules.add(new NodeClientModule()); modules.add(new PercolatorModule()); diff --git a/core/src/main/java/org/elasticsearch/node/NodeModule.java b/core/src/main/java/org/elasticsearch/node/NodeModule.java index 1844c269754..442dc727007 100644 --- a/core/src/main/java/org/elasticsearch/node/NodeModule.java +++ b/core/src/main/java/org/elasticsearch/node/NodeModule.java @@ -20,10 +20,7 @@ package org.elasticsearch.node; import org.elasticsearch.cache.recycler.PageCacheRecycler; -import org.elasticsearch.common.Booleans; -import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.ingest.core.Processor; @@ -113,14 +110,4 @@ public class NodeModule extends AbstractModule { public void registerProcessor(String type, Function> processorFactoryProvider) { processorsRegistry.registerProcessor(type, processorFactoryProvider); } - - public static boolean isNodeIngestEnabled(Settings settings) { - return settings.getAsBoolean("node.ingest", true); - } - - public static boolean isNodeIngestEnabled(ImmutableOpenMap nodeAttributes) { - String ingestEnabled = nodeAttributes.get("ingest"); - //reproduces same logic used in settings.getAsBoolean used above - return Booleans.parseBoolean(ingestEnabled, true); - } } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java index a48398e9807..fa9728c4cd1 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java @@ -34,7 +34,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.transport.DummyTransportAddress; -import org.elasticsearch.node.NodeModule; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; @@ -246,7 +245,7 @@ public class IngestProxyActionFilterTests extends ESTestCase { @Override protected boolean matchesSafely(DiscoveryNode node) { - return NodeModule.isNodeIngestEnabled(node.getAttributes()); + return node.isIngestNode(); } } } diff --git a/core/src/test/java/org/elasticsearch/node/NodeModuleTests.java b/core/src/test/java/org/elasticsearch/node/NodeModuleTests.java deleted file mode 100644 index ad8005d2901..00000000000 --- a/core/src/test/java/org/elasticsearch/node/NodeModuleTests.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.node; - -import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; - -import static org.hamcrest.Matchers.equalTo; - -public class NodeModuleTests extends ESTestCase { - - public void testIsNodeIngestEnabledSettings() { - assertThat(NodeModule.isNodeIngestEnabled(Settings.EMPTY), equalTo(true)); - assertThat(NodeModule.isNodeIngestEnabled(Settings.builder().put("node.ingest", true).build()), equalTo(true)); - assertThat(NodeModule.isNodeIngestEnabled(Settings.builder().put("node.ingest", "true").build()), equalTo(true)); - assertThat(NodeModule.isNodeIngestEnabled(Settings.builder().put("node.ingest", false).build()), equalTo(false)); - - assertThat(NodeModule.isNodeIngestEnabled(Settings.builder().put("node.ingest", "false").build()), equalTo(false)); - assertThat(NodeModule.isNodeIngestEnabled(Settings.builder().put("node.ingest", "off").build()), equalTo(false)); - assertThat(NodeModule.isNodeIngestEnabled(Settings.builder().put("node.ingest", "no").build()), equalTo(false)); - assertThat(NodeModule.isNodeIngestEnabled(Settings.builder().put("node.ingest", "0").build()), equalTo(false)); - } - - public void testIsIngestEnabledAttributes() { - assertThat(NodeModule.isNodeIngestEnabled(ImmutableOpenMap.builder().build()), equalTo(true)); - - ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(); - builder.put("ingest", "true"); - assertThat(NodeModule.isNodeIngestEnabled(builder.build()), equalTo(true)); - - builder = ImmutableOpenMap.builder(); - builder.put("ingest", "false"); - assertThat(NodeModule.isNodeIngestEnabled(builder.build()), equalTo(false)); - - builder = ImmutableOpenMap.builder(); - builder.put("ingest", "off"); - assertThat(NodeModule.isNodeIngestEnabled(builder.build()), equalTo(false)); - - builder = ImmutableOpenMap.builder(); - builder.put("ingest", "no"); - assertThat(NodeModule.isNodeIngestEnabled(builder.build()), equalTo(false)); - - builder = ImmutableOpenMap.builder(); - builder.put("ingest", "0"); - assertThat(NodeModule.isNodeIngestEnabled(builder.build()), equalTo(false)); - } - - public void testIsIngestEnabledMethodsReturnTheSameValue() { - String randomString; - if (randomBoolean()) { - randomString = randomFrom("true", "false", "on", "off", "yes", "no", "0", "1"); - } else { - randomString = randomAsciiOfLengthBetween(1, 5); - } - Settings settings = Settings.builder().put("node.ingest", randomString).build(); - ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(); - builder.put("ingest", randomString); - ImmutableOpenMap attributes = builder.build(); - - assertThat(NodeModule.isNodeIngestEnabled(settings), equalTo(NodeModule.isNodeIngestEnabled(attributes))); - } -} From b676583ba57d7eb87c91d107c8b9dc6d536a50d6 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Mon, 25 Jan 2016 15:24:10 +0100 Subject: [PATCH 311/347] Deprecate fuzzy query With this commit we deprecate the widely misunderstood fuzzy query but will still allow the fuzziness parameter in match queries and suggesters. Relates to #15760 --- .../index/query/FuzzyQueryBuilder.java | 4 ++++ .../index/query/FuzzyQueryParser.java | 5 +++++ .../elasticsearch/index/query/QueryBuilders.java | 14 ++++++++++++++ .../index/query/QueryDSLDocumentationTests.java | 1 + .../search/highlight/HighlighterSearchIT.java | 1 + .../search/matchedqueries/MatchedQueriesIT.java | 1 + .../search/profile/RandomQueryGenerator.java | 3 +++ .../elasticsearch/search/query/SearchQueryIT.java | 1 + .../validate/SimpleValidateQueryIT.java | 1 + docs/java-api/query-dsl/fuzzy-query.asciidoc | 2 ++ docs/reference/query-dsl/fuzzy-query.asciidoc | 2 ++ 11 files changed, 35 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java index 3be82f39704..2031abc0073 100644 --- a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java @@ -37,7 +37,11 @@ import java.util.Objects; /** * A Query that does fuzzy matching for a specific value. + * + * @deprecated Fuzzy queries are not useful enough. This class will be removed with Elasticsearch 4.0. In most cases you may want to use + * a match query with the fuzziness parameter for strings or range queries for numeric and date fields. */ +@Deprecated public class FuzzyQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { public static final String NAME = "fuzzy"; diff --git a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java index 85365f84fb9..55dddbe4af7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java @@ -26,6 +26,11 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +/** + * @deprecated Fuzzy queries are not useful enough. This class will be removed with Elasticsearch 4.0. In most cases you may want to use + * a match query with the fuzziness parameter for strings or range queries for numeric and date fields. + */ +@Deprecated public class FuzzyQueryParser implements QueryParser { public static final ParseField TERM_FIELD = new ParseField("term"); diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java index 893c97f332b..03ccebf0479 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java @@ -199,7 +199,14 @@ public abstract class QueryBuilders { * * @param name The name of the field * @param value The value of the term + * + * @deprecated Fuzzy queries are not useful enough and will be removed with Elasticsearch 4.0. In most cases you may want to use + * a match query with the fuzziness parameter for strings or range queries for numeric and date fields. + * + * @see #matchQuery(String, Object) + * @see #rangeQuery(String) */ + @Deprecated public static FuzzyQueryBuilder fuzzyQuery(String name, String value) { return new FuzzyQueryBuilder(name, value); } @@ -209,7 +216,14 @@ public abstract class QueryBuilders { * * @param name The name of the field * @param value The value of the term + * + * @deprecated Fuzzy queries are not useful enough and will be removed with Elasticsearch 4.0. In most cases you may want to use + * a match query with the fuzziness parameter for strings or range queries for numeric and date fields. + * + * @see #matchQuery(String, Object) + * @see #rangeQuery(String) */ + @Deprecated public static FuzzyQueryBuilder fuzzyQuery(String name, Object value) { return new FuzzyQueryBuilder(name, value); } diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java index 3e899498219..cb9177369cf 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java @@ -138,6 +138,7 @@ public class QueryDSLDocumentationTests extends ESTestCase { functionScoreQuery(functions); } + @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0 public void testFuzzy() { fuzzyQuery("name", "kimchy"); } diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index 6f9ef101289..10c7c54b83b 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -2378,6 +2378,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { } + @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0 public void testPostingsHighlighterFuzzyQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java b/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java index d8c16282e18..4daa45fe391 100644 --- a/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java +++ b/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java @@ -250,6 +250,7 @@ public class MatchedQueriesIT extends ESIntegTestCase { } } + @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0 public void testFuzzyQuerySupportsName() { createIndex("test1"); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java b/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java index 9eb41086bed..fff0b99c4d1 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java +++ b/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java @@ -72,6 +72,7 @@ public class RandomQueryGenerator { } } + @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0 private static QueryBuilder randomTerminalQuery(List stringFields, List numericFields, int numDocs) { switch (randomIntBetween(0,6)) { case 0: @@ -195,6 +196,8 @@ public class RandomQueryGenerator { return q; } + @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0 + @Deprecated private static QueryBuilder randomFuzzyQuery(List fields) { QueryBuilder q = QueryBuilders.fuzzyQuery(randomField(fields), randomQueryString(1)); diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index ad9ab044ad3..d723c88535a 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -1451,6 +1451,7 @@ public class SearchQueryIT extends ESIntegTestCase { assertHitCount(searchResponse, 3l); } + @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0 public void testSpanMultiTermQuery() throws IOException { createIndex("test"); diff --git a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java index 16571169915..d937d5bade3 100644 --- a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java +++ b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java @@ -212,6 +212,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase { assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"foo (one* two*)\"")); } + @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0 public void testExplainWithRewriteValidateQuery() throws Exception { client().admin().indices().prepareCreate("test") .addMapping("type1", "field", "type=string,analyzer=whitespace") diff --git a/docs/java-api/query-dsl/fuzzy-query.asciidoc b/docs/java-api/query-dsl/fuzzy-query.asciidoc index e871bc9d0b4..1ea59830723 100644 --- a/docs/java-api/query-dsl/fuzzy-query.asciidoc +++ b/docs/java-api/query-dsl/fuzzy-query.asciidoc @@ -1,6 +1,8 @@ [[java-query-dsl-fuzzy-query]] ==== Fuzzy Query +deprecated[3.0.0, Will be removed without a replacement for `string` fields. Note that the `fuzziness` parameter is still supported for match queries and in suggesters. Use range queries for `date` and `numeric` fields instead.] + See {ref}/query-dsl-fuzzy-query.html[Fuzzy Query] [source,java] diff --git a/docs/reference/query-dsl/fuzzy-query.asciidoc b/docs/reference/query-dsl/fuzzy-query.asciidoc index 72bb151f636..a2d770aabcc 100644 --- a/docs/reference/query-dsl/fuzzy-query.asciidoc +++ b/docs/reference/query-dsl/fuzzy-query.asciidoc @@ -1,6 +1,8 @@ [[query-dsl-fuzzy-query]] === Fuzzy Query +deprecated[3.0.0, Will be removed without a replacement for `string` fields. Note that the `fuzziness` parameter is still supported for match queries and in suggesters. Use range queries for `date` and `numeric` fields instead.] + The fuzzy query uses similarity based on Levenshtein edit distance for `string` fields, and a `+/-` margin on numeric and date fields. From 1bc0f7a8d00b4c6737b5b981af93961db73bc8a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 25 Jan 2016 15:05:16 +0100 Subject: [PATCH 312/347] Replace RescoreBaseBuilder with AbstractRescoreBuilder The current RescoreBaseBuilder only serves as a container for a pair of the optional `window_size` parameter and the actual rescorer. Instead of a wrapper object, this makes it an abstract class that conrete implementations like QueryRescoreBuilder can extend. --- .../action/search/SearchRequestBuilder.java | 13 ++-- .../common/io/stream/StreamInput.java | 2 +- .../common/io/stream/StreamOutput.java | 2 +- .../elasticsearch/search/SearchService.java | 5 +- .../search/builder/SearchSourceBuilder.java | 23 +++--- ...ilder.java => AbstractRescoreBuilder.java} | 76 +++++++------------ .../search/rescore/QueryRescorerBuilder.java | 18 +++-- .../search/rescore/RescoreBuilder.java | 6 +- .../search/functionscore/QueryRescorerIT.java | 48 ++++++------ .../rescore/QueryRescoreBuilderTests.java | 51 ++++++------- 10 files changed, 115 insertions(+), 129 deletions(-) rename core/src/main/java/org/elasticsearch/search/rescore/{RescoreBaseBuilder.java => AbstractRescoreBuilder.java} (66%) diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 3bcc01daccb..4c538e59e71 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -32,7 +32,6 @@ import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.search.highlight.HighlightBuilder; -import org.elasticsearch.search.rescore.RescoreBaseBuilder; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortOrder; @@ -397,7 +396,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder rescorer) { sourceBuilder().clearRescorers(); return addRescorer(rescorer); } @@ -412,7 +411,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder rescorer) { + sourceBuilder().addRescorer(rescorer); return this; } @@ -433,8 +432,8 @@ public class SearchRequestBuilder extends ActionRequestBuilder rescorer, int window) { + sourceBuilder().addRescorer(rescorer.windowSize(window)); return this; } diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index 722de952aa1..02e937dbd83 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -679,7 +679,7 @@ public abstract class StreamInput extends InputStream { /** * Reads a {@link RescoreBuilder} from the current stream */ - public RescoreBuilder readRescorer() throws IOException { + public RescoreBuilder readRescorer() throws IOException { return readNamedWriteable(RescoreBuilder.class); } diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index e755fd5accf..0863717a5ab 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -681,7 +681,7 @@ public abstract class StreamOutput extends OutputStream { /** * Writes a {@link RescoreBuilder} to the current stream */ - public void writeRescorer(RescoreBuilder rescorer) throws IOException { + public void writeRescorer(RescoreBuilder rescorer) throws IOException { writeNamedWriteable(rescorer); } } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index a5511277bc5..f556d74e065 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -61,7 +61,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.search.stats.ShardSearchStats; @@ -101,7 +100,7 @@ import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.search.query.ScrollQuerySearchResult; -import org.elasticsearch.search.rescore.RescoreBaseBuilder; +import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -775,7 +774,7 @@ public class SearchService extends AbstractLifecycleComponent imp } if (source.rescores() != null) { try { - for (RescoreBaseBuilder rescore : source.rescores()) { + for (RescoreBuilder rescore : source.rescores()) { context.addRescore(rescore.build(context.indexShard().getQueryShardContext())); } } catch (IOException e) { diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 4db3a0583bb..5c50134d4ce 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -46,7 +46,8 @@ import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.rescore.RescoreBaseBuilder; +import org.elasticsearch.search.rescore.AbstractRescoreBuilder; +import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; @@ -152,7 +153,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ private BytesReference innerHitsBuilder; - private List rescoreBuilders; + private List> rescoreBuilders; private ObjectFloatHashMap indexBoost = null; @@ -459,7 +460,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ return suggestBuilder; } - public SearchSourceBuilder addRescorer(RescoreBaseBuilder rescoreBuilder) { + public SearchSourceBuilder addRescorer(RescoreBuilder rescoreBuilder) { if (rescoreBuilders == null) { rescoreBuilders = new ArrayList<>(); } @@ -491,7 +492,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ /** * Gets the bytes representing the rescore builders for this request. */ - public List rescores() { + public List> rescores() { return rescoreBuilders; } @@ -871,9 +872,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } builder.sorts = sorts; } else if (context.parseFieldMatcher().match(currentFieldName, RESCORE_FIELD)) { - List rescoreBuilders = new ArrayList<>(); + List> rescoreBuilders = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - rescoreBuilders.add(RescoreBaseBuilder.PROTOTYPE.fromXContent(context)); + rescoreBuilders.add(AbstractRescoreBuilder.parseFromXContent(context)); } builder.rescoreBuilders = rescoreBuilders; } else if (context.parseFieldMatcher().match(currentFieldName, STATS_FIELD)) { @@ -1040,7 +1041,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ if (rescoreBuilders != null) { builder.startArray(RESCORE_FIELD.getPreferredName()); - for (RescoreBaseBuilder rescoreBuilder : rescoreBuilders) { + for (RescoreBuilder rescoreBuilder : rescoreBuilders) { rescoreBuilder.toXContent(builder, params); } builder.endArray(); @@ -1187,9 +1188,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } if (in.readBoolean()) { int size = in.readVInt(); - List rescoreBuilders = new ArrayList<>(); + List> rescoreBuilders = new ArrayList<>(); for (int i = 0; i < size; i++) { - rescoreBuilders.add(RescoreBaseBuilder.PROTOTYPE.readFrom(in)); + rescoreBuilders.add(in.readRescorer()); } builder.rescoreBuilders = rescoreBuilders; } @@ -1303,8 +1304,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ out.writeBoolean(hasRescoreBuilders); if (hasRescoreBuilders) { out.writeVInt(rescoreBuilders.size()); - for (RescoreBaseBuilder rescoreBuilder : rescoreBuilders) { - rescoreBuilder.writeTo(out); + for (RescoreBuilder rescoreBuilder : rescoreBuilders) { + out.writeRescorer(rescoreBuilder); } } boolean hasScriptFields = scriptFields != null; diff --git a/core/src/main/java/org/elasticsearch/search/rescore/RescoreBaseBuilder.java b/core/src/main/java/org/elasticsearch/search/rescore/AbstractRescoreBuilder.java similarity index 66% rename from core/src/main/java/org/elasticsearch/search/rescore/RescoreBaseBuilder.java rename to core/src/main/java/org/elasticsearch/search/rescore/AbstractRescoreBuilder.java index a0201ea8362..8afc3dc301c 100644 --- a/core/src/main/java/org/elasticsearch/search/rescore/RescoreBaseBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/rescore/AbstractRescoreBuilder.java @@ -24,55 +24,40 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.Objects; /** - * The base builder for rescorers. Wraps a conrete instance of {@link RescoreBuilder} and - * adds the ability to specify the optional `window_size` parameter + * The abstract base builder for instances of {@link RescoreBuilder}. */ -public class RescoreBaseBuilder implements ToXContent, Writeable { +public abstract class AbstractRescoreBuilder> implements RescoreBuilder { - private RescoreBuilder rescorer; - private Integer windowSize; - public static final RescoreBaseBuilder PROTOTYPE = new RescoreBaseBuilder(new QueryRescorerBuilder(new MatchAllQueryBuilder())); + protected Integer windowSize; private static ParseField WINDOW_SIZE_FIELD = new ParseField("window_size"); - public RescoreBaseBuilder(RescoreBuilder rescorer) { - if (rescorer == null) { - throw new IllegalArgumentException("rescorer cannot be null"); - } - this.rescorer = rescorer; - } - - public RescoreBuilder rescorer() { - return this.rescorer; - } - - public RescoreBaseBuilder windowSize(int windowSize) { + @SuppressWarnings("unchecked") + @Override + public RB windowSize(int windowSize) { this.windowSize = windowSize; - return this; + return (RB) this; } + @Override public Integer windowSize() { return windowSize; } - public RescoreBaseBuilder fromXContent(QueryParseContext parseContext) throws IOException { + public static RescoreBuilder parseFromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); String fieldName = null; - RescoreBuilder rescorer = null; + AbstractRescoreBuilder rescorer = null; Integer windowSize = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -98,19 +83,10 @@ public class RescoreBaseBuilder implements ToXContent, Writeable queryBuilder) { return new QueryRescorerBuilder(queryBuilder); } @Override - public final int hashCode() { - return Objects.hash(windowSize, rescorer); + public int hashCode() { + return Objects.hash(windowSize); } @Override - public final boolean equals(Object obj) { + public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } - RescoreBaseBuilder other = (RescoreBaseBuilder) obj; - return Objects.equals(windowSize, other.windowSize) && - Objects.equals(rescorer, other.rescorer); + @SuppressWarnings("rawtypes") + AbstractRescoreBuilder other = (AbstractRescoreBuilder) obj; + return Objects.equals(windowSize, other.windowSize); } @Override - public RescoreBaseBuilder readFrom(StreamInput in) throws IOException { - RescoreBaseBuilder builder = new RescoreBaseBuilder(in.readRescorer()); + public RB readFrom(StreamInput in) throws IOException { + RB builder = doReadFrom(in); builder.windowSize = in.readOptionalVInt(); return builder; } + protected abstract RB doReadFrom(StreamInput in) throws IOException; + @Override public void writeTo(StreamOutput out) throws IOException { - out.writeRescorer(rescorer); + doWriteTo(out); out.writeOptionalVInt(this.windowSize); } + protected abstract void doWriteTo(StreamOutput out) throws IOException; + @Override public final String toString() { try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.prettyPrint(); - builder.startObject(); toXContent(builder, EMPTY_PARAMS); - builder.endObject(); return builder.string(); } catch (Exception e) { return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}"; diff --git a/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java index 936353f7868..4e6d0e0f568 100644 --- a/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java @@ -35,7 +35,7 @@ import java.io.IOException; import java.util.Locale; import java.util.Objects; -public class QueryRescorerBuilder implements RescoreBuilder { +public class QueryRescorerBuilder extends AbstractRescoreBuilder { public static final String NAME = "query"; @@ -131,14 +131,13 @@ public class QueryRescorerBuilder implements RescoreBuilder extends ToXContent, NamedWriteable { +public interface RescoreBuilder> extends ToXContent, NamedWriteable { RescoreSearchContext build(QueryShardContext context) throws IOException; RB fromXContent(QueryParseContext parseContext) throws IOException; + + RB windowSize(int windowSize); + + Integer windowSize(); } \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index c9b895b1995..163ee84cf18 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -37,9 +37,9 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.rescore.AbstractRescoreBuilder; import org.elasticsearch.search.rescore.QueryRescoreMode; import org.elasticsearch.search.rescore.QueryRescorerBuilder; -import org.elasticsearch.search.rescore.RescoreBaseBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; @@ -80,7 +80,7 @@ public class QueryRescorerIT extends ESIntegTestCase { for (int j = 0 ; j < iters; j++) { SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchAllQuery()) - .setRescorer(RescoreBaseBuilder.queryRescorer( + .setRescorer(AbstractRescoreBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.matchAllQuery(), ScoreFunctionBuilders.weightFactorFunction(100)).boostMode(CombineFunction.REPLACE)) .setQueryWeight(0.0f).setRescoreQueryWeight(1.0f), 1).setSize(randomIntBetween(2, 10)).execute() @@ -116,7 +116,7 @@ public class QueryRescorerIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer( - RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "quick brown").slop(2).boost(4.0f)) + AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "quick brown").slop(2).boost(4.0f)) .setRescoreQueryWeight(2), 5).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(3l)); @@ -126,7 +126,7 @@ public class QueryRescorerIT extends ESIntegTestCase { searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) - .setRescorer(RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(3)), 5) + .setRescorer(AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(3)), 5) .execute().actionGet(); assertHitCount(searchResponse, 3); @@ -136,7 +136,7 @@ public class QueryRescorerIT extends ESIntegTestCase { searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) - .setRescorer(RescoreBaseBuilder.queryRescorer((QueryBuilders.matchPhraseQuery("field1", "the quick brown"))), 5).execute() + .setRescorer(AbstractRescoreBuilder.queryRescorer((QueryBuilders.matchPhraseQuery("field1", "the quick brown"))), 5).execute() .actionGet(); assertHitCount(searchResponse, 3); @@ -181,7 +181,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(5) .setRescorer( - RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 20).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(5)); @@ -197,7 +197,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setSize(5) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setRescorer( - RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 20).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(5)); @@ -214,7 +214,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setSize(5) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setRescorer( - RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 20).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(5)); @@ -263,7 +263,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(5) .setRescorer( - RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 2).execute().actionGet(); // Only top 2 hits were re-ordered: assertThat(searchResponse.getHits().hits().length, equalTo(4)); @@ -280,7 +280,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(5) .setRescorer( - RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 3).execute().actionGet(); // Only top 3 hits were re-ordered: @@ -333,7 +333,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(5) .setRescorer( - RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(1.0f).setRescoreQueryWeight(-1f), 3).execute().actionGet(); // 6 and 1 got worse, and then the hit (2) outside the rescore window were sorted ahead: @@ -424,7 +424,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(resultSize) .setRescorer( - RescoreBaseBuilder + AbstractRescoreBuilder .queryRescorer( QueryBuilders .constantScoreQuery(QueryBuilders.matchPhraseQuery("field1", intToEnglish).slop(3))) @@ -462,7 +462,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(resultSize) .setRescorer( - RescoreBaseBuilder + AbstractRescoreBuilder .queryRescorer( QueryBuilders .constantScoreQuery(QueryBuilders.matchPhraseQuery("field1", "not in the index").slop(3))) @@ -480,7 +480,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(resultSize) .setRescorer( - RescoreBaseBuilder + AbstractRescoreBuilder .queryRescorer( QueryBuilders.matchPhraseQuery("field1", intToEnglish).slop(0)) .setQueryWeight(1.0f).setRescoreQueryWeight(1.0f), 2 * rescoreWindow).execute().actionGet(); @@ -512,7 +512,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer( - RescoreBaseBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(2).boost(4.0f)) + AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(2).boost(4.0f)) .setQueryWeight(0.5f).setRescoreQueryWeight(0.4f), 5).setExplain(true).execute() .actionGet(); assertHitCount(searchResponse, 3); @@ -538,7 +538,7 @@ public class QueryRescorerIT extends ESIntegTestCase { String[] scoreModes = new String[]{ "max", "min", "avg", "total", "multiply", "" }; String[] descriptionModes = new String[]{ "max of:", "min of:", "avg of:", "sum of:", "product of:", "sum of:" }; for (int innerMode = 0; innerMode < scoreModes.length; innerMode++) { - QueryRescorerBuilder innerRescoreQuery = RescoreBaseBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown").boost(4.0f)) + QueryRescorerBuilder innerRescoreQuery = AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown").boost(4.0f)) .setQueryWeight(0.5f).setRescoreQueryWeight(0.4f); if (!"".equals(scoreModes[innerMode])) { @@ -561,7 +561,7 @@ public class QueryRescorerIT extends ESIntegTestCase { } for (int outerMode = 0; outerMode < scoreModes.length; outerMode++) { - QueryRescorerBuilder outerRescoreQuery = RescoreBaseBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown") + QueryRescorerBuilder outerRescoreQuery = AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown") .boost(4.0f)).setQueryWeight(0.5f).setRescoreQueryWeight(0.4f); if (!"".equals(scoreModes[outerMode])) { @@ -572,7 +572,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .prepareSearch() .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) - .addRescorer(innerRescoreQuery, 5).addRescorer(outerRescoreQuery, 10) + .addRescorer(innerRescoreQuery, 5).addRescorer(outerRescoreQuery.windowSize(10)) .setExplain(true).get(); assertHitCount(searchResponse, 3); assertFirstHit(searchResponse, hasId("1")); @@ -599,7 +599,7 @@ public class QueryRescorerIT extends ESIntegTestCase { for (int i = 0; i < numDocs - 4; i++) { String[] intToEnglish = new String[] { English.intToEnglish(i), English.intToEnglish(i + 1), English.intToEnglish(i + 2), English.intToEnglish(i + 3) }; - QueryRescorerBuilder rescoreQuery = RescoreBaseBuilder + QueryRescorerBuilder rescoreQuery = AbstractRescoreBuilder .queryRescorer( QueryBuilders.boolQuery() .disableCoord(true) @@ -682,10 +682,10 @@ public class QueryRescorerIT extends ESIntegTestCase { public void testMultipleRescores() throws Exception { int numDocs = indexRandomNumbers("keyword", 1, true); - QueryRescorerBuilder eightIsGreat = RescoreBaseBuilder.queryRescorer( + QueryRescorerBuilder eightIsGreat = AbstractRescoreBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(8)), ScoreFunctionBuilders.weightFactorFunction(1000.0f)).boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total); - QueryRescorerBuilder sevenIsBetter = RescoreBaseBuilder.queryRescorer( + QueryRescorerBuilder sevenIsBetter = AbstractRescoreBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(7)), ScoreFunctionBuilders.weightFactorFunction(10000.0f)).boostMode(CombineFunction.REPLACE)) .setScoreMode(QueryRescoreMode.Total); @@ -703,10 +703,10 @@ public class QueryRescorerIT extends ESIntegTestCase { // We have no idea what the second hit will be because we didn't get a chance to look for seven // Now use one rescore to drag the number we're looking for into the window of another - QueryRescorerBuilder ninetyIsGood = RescoreBaseBuilder.queryRescorer( + QueryRescorerBuilder ninetyIsGood = AbstractRescoreBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.queryStringQuery("*ninety*"), ScoreFunctionBuilders.weightFactorFunction(1000.0f)) .boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total); - QueryRescorerBuilder oneToo = RescoreBaseBuilder.queryRescorer( + QueryRescorerBuilder oneToo = AbstractRescoreBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.queryStringQuery("*one*"), ScoreFunctionBuilders.weightFactorFunction(1000.0f)) .boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total); request.clearRescorers().addRescorer(ninetyIsGood, numDocs).addRescorer(oneToo, 10); @@ -759,7 +759,7 @@ public class QueryRescorerIT extends ESIntegTestCase { request.setQuery(QueryBuilders.termQuery("text", "hello")); request.setFrom(1); request.setSize(4); - request.addRescorer(RescoreBaseBuilder.queryRescorer(QueryBuilders.matchAllQuery()), 50); + request.addRescorer(AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchAllQuery()), 50); assertEquals(4, request.get().getHits().hits().length); } diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index 00c3c5f0d0f..cb6f7b50869 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -86,8 +86,8 @@ public class QueryRescoreBuilderTests extends ESTestCase { */ public void testSerialization() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { - RescoreBaseBuilder original = randomRescoreBuilder(); - RescoreBaseBuilder deserialized = serializedCopy(original); + RescoreBuilder original = randomRescoreBuilder(); + RescoreBuilder deserialized = serializedCopy(original); assertEquals(deserialized, original); assertEquals(deserialized.hashCode(), original.hashCode()); assertNotSame(deserialized, original); @@ -99,7 +99,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { */ public void testEqualsAndHashcode() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { - RescoreBaseBuilder firstBuilder = randomRescoreBuilder(); + RescoreBuilder firstBuilder = randomRescoreBuilder(); assertFalse("rescore builder is equal to null", firstBuilder.equals(null)); assertFalse("rescore builder is equal to incompatible type", firstBuilder.equals("")); assertTrue("rescore builder is not equal to self", firstBuilder.equals(firstBuilder)); @@ -107,13 +107,13 @@ public class QueryRescoreBuilderTests extends ESTestCase { equalTo(firstBuilder.hashCode())); assertThat("different rescore builder should not be equal", mutate(firstBuilder), not(equalTo(firstBuilder))); - RescoreBaseBuilder secondBuilder = serializedCopy(firstBuilder); + RescoreBuilder secondBuilder = serializedCopy(firstBuilder); assertTrue("rescore builder is not equal to self", secondBuilder.equals(secondBuilder)); assertTrue("rescore builder is not equal to its copy", firstBuilder.equals(secondBuilder)); assertTrue("equals is not symmetric", secondBuilder.equals(firstBuilder)); assertThat("rescore builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(firstBuilder.hashCode())); - RescoreBaseBuilder thirdBuilder = serializedCopy(secondBuilder); + RescoreBuilder thirdBuilder = serializedCopy(secondBuilder); assertTrue("rescore builder is not equal to self", thirdBuilder.equals(thirdBuilder)); assertTrue("rescore builder is not equal to its copy", secondBuilder.equals(thirdBuilder)); assertThat("rescore builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(thirdBuilder.hashCode())); @@ -131,19 +131,19 @@ public class QueryRescoreBuilderTests extends ESTestCase { QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { - RescoreBaseBuilder rescoreBuilder = randomRescoreBuilder(); + RescoreBuilder rescoreBuilder = randomRescoreBuilder(); XContentParser parser = createParser(rescoreBuilder); context.reset(parser); parser.nextToken(); - RescoreBaseBuilder secondRescoreBuilder = RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + RescoreBuilder secondRescoreBuilder = AbstractRescoreBuilder.parseFromXContent(context); assertNotSame(rescoreBuilder, secondRescoreBuilder); assertEquals(rescoreBuilder, secondRescoreBuilder); assertEquals(rescoreBuilder.hashCode(), secondRescoreBuilder.hashCode()); } } - private static XContentParser createParser(RescoreBaseBuilder rescoreBuilder) throws IOException { + private static XContentParser createParser(RescoreBuilder rescoreBuilder) throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); @@ -171,7 +171,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { }; for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { - RescoreBaseBuilder rescoreBuilder = randomRescoreBuilder(); + RescoreBuilder rescoreBuilder = randomRescoreBuilder(); QueryRescoreContext rescoreContext = (QueryRescoreContext) rescoreBuilder.build(mockShardContext); XContentParser parser = createParser(rescoreBuilder); @@ -198,7 +198,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { "}\n"; prepareContext(context, rescoreElement); try { - RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + AbstractRescoreBuilder.parseFromXContent(context); fail("expected a parsing exception"); } catch (ParsingException e) { assertEquals("rescore doesn't support rescorer with name [bad_rescorer_name]", e.getMessage()); @@ -209,7 +209,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { "}\n"; prepareContext(context, rescoreElement); try { - RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + AbstractRescoreBuilder.parseFromXContent(context); fail("expected a parsing exception"); } catch (ParsingException e) { assertEquals("rescore doesn't support [bad_fieldName]", e.getMessage()); @@ -221,7 +221,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { "}\n"; prepareContext(context, rescoreElement); try { - RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + AbstractRescoreBuilder.parseFromXContent(context); fail("expected a parsing exception"); } catch (ParsingException e) { assertEquals("unexpected token [START_ARRAY] after [query]", e.getMessage()); @@ -230,7 +230,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { rescoreElement = "{ }"; prepareContext(context, rescoreElement); try { - RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + AbstractRescoreBuilder.parseFromXContent(context); fail("expected a parsing exception"); } catch (ParsingException e) { assertEquals("missing rescore type", e.getMessage()); @@ -242,7 +242,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { "}\n"; prepareContext(context, rescoreElement); try { - RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + AbstractRescoreBuilder.parseFromXContent(context); fail("expected a parsing exception"); } catch (IllegalArgumentException e) { assertEquals("[query] unknown field [bad_fieldname], parser not found", e.getMessage()); @@ -254,7 +254,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { "}\n"; prepareContext(context, rescoreElement); try { - RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + AbstractRescoreBuilder.parseFromXContent(context); fail("expected a parsing exception"); } catch (ParsingException e) { assertEquals("[query] failed to parse field [rescore_query]", e.getMessage()); @@ -265,7 +265,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { " \"query\" : { \"rescore_query\" : { \"match_all\" : { } } } \n" + "}\n"; prepareContext(context, rescoreElement); - RescoreBaseBuilder.PROTOTYPE.fromXContent(context); + AbstractRescoreBuilder.parseFromXContent(context); } /** @@ -278,8 +278,8 @@ public class QueryRescoreBuilderTests extends ESTestCase { assertTrue(parser.nextToken() == XContentParser.Token.START_OBJECT); } - private static RescoreBaseBuilder mutate(RescoreBaseBuilder original) throws IOException { - RescoreBaseBuilder mutation = serializedCopy(original); + private static RescoreBuilder mutate(RescoreBuilder original) throws IOException { + RescoreBuilder mutation = serializedCopy(original); if (randomBoolean()) { Integer windowSize = original.windowSize(); if (windowSize != null) { @@ -288,7 +288,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { mutation.windowSize(randomIntBetween(0, 100)); } } else { - QueryRescorerBuilder queryRescorer = (QueryRescorerBuilder) mutation.rescorer(); + QueryRescorerBuilder queryRescorer = (QueryRescorerBuilder) mutation; switch (randomIntBetween(0, 3)) { case 0: queryRescorer.setQueryWeight(queryRescorer.getQueryWeight() + 0.1f); @@ -317,7 +317,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { /** * create random shape that is put under test */ - public static RescoreBaseBuilder randomRescoreBuilder() { + public static org.elasticsearch.search.rescore.QueryRescorerBuilder randomRescoreBuilder() { QueryBuilder queryBuilder = new MatchAllQueryBuilder().boost(randomFloat()).queryName(randomAsciiOfLength(20)); org.elasticsearch.search.rescore.QueryRescorerBuilder rescorer = new org.elasticsearch.search.rescore.QueryRescorerBuilder(queryBuilder); @@ -330,18 +330,17 @@ public class QueryRescoreBuilderTests extends ESTestCase { if (randomBoolean()) { rescorer.setScoreMode(randomFrom(QueryRescoreMode.values())); } - RescoreBaseBuilder builder = new RescoreBaseBuilder(rescorer); if (randomBoolean()) { - builder.windowSize(randomIntBetween(0, 100)); + rescorer.windowSize(randomIntBetween(0, 100)); } - return builder; + return rescorer; } - private static RescoreBaseBuilder serializedCopy(RescoreBaseBuilder original) throws IOException { + private static RescoreBuilder serializedCopy(RescoreBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { - original.writeTo(output); + output.writeRescorer(original); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { - return RescoreBaseBuilder.PROTOTYPE.readFrom(in); + return in.readRescorer(); } } } From 9bb4caa6f1d9f04b5ed3ead8eef2ff1e6aef2a81 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 25 Jan 2016 16:40:19 +0100 Subject: [PATCH 313/347] adapt to upstream changes, jvm flag doesn't get returned anymore --- .../test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml b/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml index 68d1fc649c7..5c0cca3772e 100644 --- a/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml +++ b/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml @@ -9,4 +9,3 @@ nodes.info: {} - match: { nodes.$master.modules.0.name: ingest-grok } - - match: { nodes.$master.modules.0.jvm: true } From a306836e1a0ae6c361b86ea06f7bc2b379539f05 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 25 Jan 2016 17:25:31 +0100 Subject: [PATCH 314/347] adapt to upstream changes, mustache supports now lists and arrays --- .../elasticsearch/ingest/IngestDocumentMustacheIT.java | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java index 1b080fec7ed..f27a8e4c8d6 100644 --- a/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java +++ b/qa/ingest-with-mustache/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java @@ -69,12 +69,8 @@ public class IngestDocumentMustacheIT extends AbstractMustacheTests { list.add(null); document.put("list2", list); IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); - // TODO: fix index based lookups in lists: - ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("1 {{list1}} {{list2}}", templateService)); - assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 [foo, bar, null] [{field=value}, null]")); - - ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("2 {{_source.list1}} {{_source.list2}}", templateService)); - assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("2 [foo, bar, null] [{field=value}, null]")); + ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("1 {{list1.0}} {{list2.0}}", templateService)); + assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 foo {field=value}")); } public void testAccessIngestMetadataViaTemplate() { @@ -86,5 +82,4 @@ public class IngestDocumentMustacheIT extends AbstractMustacheTests { ingestDocument.setFieldValue(templateService.compile("ingest_timestamp"), ValueSource.wrap("{{_ingest.timestamp}} and {{_source._ingest.timestamp}}", templateService)); assertThat(ingestDocument.getFieldValue("ingest_timestamp", String.class), equalTo(ingestDocument.getIngestMetadata().get("timestamp") + " and bogus_timestamp")); } - } From 894efa3fb6eaeecf0c3ec1d63af0cc97176aa009 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Mon, 25 Jan 2016 12:06:39 -0800 Subject: [PATCH 315/347] update ingest docs - move ingest plugin docs to core reference docs - move geoip processor docs to plugins/ingest-geoip.asciidoc - add missing options tables for some processors - add description of pipeline definition - add description of processor definitions including common parameters like "tag" and "on_failure" --- docs/plugins/ingest-geoip.asciidoc | 64 ++++++ .../ingest}/ingest.asciidoc | 216 ++++++++++++------ 2 files changed, 212 insertions(+), 68 deletions(-) create mode 100644 docs/plugins/ingest-geoip.asciidoc rename docs/{plugins => reference/ingest}/ingest.asciidoc (87%) diff --git a/docs/plugins/ingest-geoip.asciidoc b/docs/plugins/ingest-geoip.asciidoc new file mode 100644 index 00000000000..539c29971a4 --- /dev/null +++ b/docs/plugins/ingest-geoip.asciidoc @@ -0,0 +1,64 @@ +[[ingest-geoip]] +== Ingest Geoip Processor Plugin + +The GeoIP processor adds information about the geographical location of IP addresses, based on data from the Maxmind databases. +This processor adds this information by default under the `geoip` field. + +The ingest plugin ships by default with the GeoLite2 City and GeoLite2 Country geoip2 databases from Maxmind made available +under the CCA-ShareAlike 3.0 license. For more details see, http://dev.maxmind.com/geoip/geoip2/geolite2/ + +The GeoIP processor can run with other geoip2 databases from Maxmind. The files must be copied into the geoip config directory +and the `database_file` option should be used to specify the filename of the custom database. The geoip config directory +is located at `$ES_HOME/config/ingest/geoip` and holds the shipped databases too. + +[[geoip-options]] +.Geoip options +[options="header"] +|====== +| Name | Required | Default | Description +| `source_field` | yes | - | The field to get the ip address or hostname from for the geographical lookup. +| `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database. +| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest plugin ships with the GeoLite2-City.mmdb and GeoLite2-Country.mmdb files. +| `fields` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] <1> | Controls what properties are added to the `target_field` based on the geoip lookup. +|====== + +<1> Depends on what is available in `database_field`: +* If the GeoLite2 City database is used then the following fields may be added under the `target_field`: `ip`, +`country_iso_code`, `country_name`, `continent_name`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude` +and `location`. The fields actually added depend on what has been found and which fields were configured in `fields`. +* If the GeoLite2 Country database is used then the following fields may be added under the `target_field`: `ip`, +`country_iso_code`, `country_name` and `continent_name`.The fields actually added depend on what has been found and which fields were configured in `fields`. + +An example that uses the default city database and adds the geographical information to the `geoip` field based on the `ip` field: + +[source,js] +-------------------------------------------------- +{ + "description" : "...", + "processors" : [ + { + "geoip" : { + "source_field" : "ip" + } + } + ] +} +-------------------------------------------------- + +An example that uses the default country database and add the geographical information to the `geo` field based on the `ip` field`: + +[source,js] +-------------------------------------------------- +{ + "description" : "...", + "processors" : [ + { + "geoip" : { + "source_field" : "ip", + "target_field" : "geo", + "database_file" : "GeoLite2-Country.mmdb" + } + } + ] +} +-------------------------------------------------- diff --git a/docs/plugins/ingest.asciidoc b/docs/reference/ingest/ingest.asciidoc similarity index 87% rename from docs/plugins/ingest.asciidoc rename to docs/reference/ingest/ingest.asciidoc index 72336086b23..0c049f82b69 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/reference/ingest/ingest.asciidoc @@ -28,12 +28,59 @@ PUT /my-index/my-type/my-id?pipeline=my_pipeline_id -------------------------------------------------- // AUTOSENSE +=== Pipeline Definition + +A pipeline is a definition of a series of processors that are to be +executed in the same sequential order as they are declared. +[source,js] +-------------------------------------------------- +{ + "description" : "...", + "processors" : [ ... ] +} +-------------------------------------------------- + +The `description` is a special field to store a helpful description of +what the pipeline attempts to achieve. + +The `processors` parameter defines a list of processors to be executed in +order. + === Processors +All processors are defined in the following way within a pipeline definition: + +[source,js] +-------------------------------------------------- +{ + "PROCESSOR_NAME" : { + ... processor configuration options ... + } +} +-------------------------------------------------- + +Each processor defines its own configuration parameters, but all processors have +the ability to declare `tag` and `on_failure` fields. These fields are optional. + +A `tag` is simply a string identifier of the specific instatiation of a certain +processor in a pipeline. The `tag` field does not affect any processor's behavior, +but is very useful for bookkeeping and tracing errors to specific processors. + +See <> to learn more about the `on_failure` field and error handling in pipelines. + ==== Set processor Sets one field and associates it with the specified value. If the field already exists, its value will be replaced with the provided one. +[[set-options]] +.Set Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field to insert, upsert, or update +| `value` | yes | - | The value to be set for the field +|====== + [source,js] -------------------------------------------------- { @@ -50,6 +97,15 @@ Converts a scalar to an array and appends one or more values to it if the field Creates an array containing the provided values if the fields doesn't exist. Accepts a single value or an array of values. +[[append-options]] +.Append Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field to be appended to +| `value` | yes | - | The value to be appended +|====== + [source,js] -------------------------------------------------- { @@ -63,6 +119,14 @@ Accepts a single value or an array of values. ==== Remove processor Removes an existing field. If the field doesn't exist, an exception will be thrown +[[remove-options]] +.Remove Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field to be removed +|====== + [source,js] -------------------------------------------------- { @@ -76,6 +140,15 @@ Removes an existing field. If the field doesn't exist, an exception will be thro Renames an existing field. If the field doesn't exist, an exception will be thrown. Also, the new field name must not exist. +[[rename-options]] +.Rename Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field to be renamed +| `to` | yes | - | The new name of the field +|====== + [source,js] -------------------------------------------------- { @@ -96,6 +169,15 @@ The supported types include: `integer`, `float`, `string`, and `boolean`. `boolean` will set the field to true if its string value is equal to `true` (ignore case), to false if its string value is equal to `false` (ignore case) and it will throw exception otherwise. +[[convert-options]] +.Convert Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field whose value is to be converted +| `type` | yes | - | The type to convert the existing value to +|====== + [source,js] -------------------------------------------------- { @@ -110,9 +192,15 @@ false if its string value is equal to `false` (ignore case) and it will throw ex Converts a string field by applying a regular expression and a replacement. If the field is not a string, the processor will throw an exception. -This configuration takes a `field` for the field name, `pattern` for the -pattern to be replaced, and `replacement` for the string to replace the matching patterns with. - +[[gsub-options]] +.Gsub Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field apply the replacement for +| `pattern` | yes | - | The pattern to be replaced +| `replacement` | yes | - | The string to replace the matching patterns with. +|====== [source,js] -------------------------------------------------- @@ -129,6 +217,15 @@ pattern to be replaced, and `replacement` for the string to replace the matching Joins each element of an array into a single string using a separator character between each element. Throws error when the field is not an array. +[[join-options]] +.Join Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field to be separated +| `separator` | yes | - | The separator character +|====== + [source,js] -------------------------------------------------- { @@ -142,6 +239,14 @@ Throws error when the field is not an array. ==== Split processor Split a field to an array using a separator character. Only works on string fields. +[[split-options]] +.Split Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field to split +|====== + [source,js] -------------------------------------------------- { @@ -154,6 +259,14 @@ Split a field to an array using a separator character. Only works on string fiel ==== Lowercase processor Converts a string to its lowercase equivalent. +[[lowercase-options]] +.Lowercase Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field to lowercase +|====== + [source,js] -------------------------------------------------- { @@ -166,6 +279,14 @@ Converts a string to its lowercase equivalent. ==== Uppercase processor Converts a string to its uppercase equivalent. +[[uppercase-options]] +.Uppercase Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field to uppercase +|====== + [source,js] -------------------------------------------------- { @@ -178,6 +299,14 @@ Converts a string to its uppercase equivalent. ==== Trim processor Trims whitespace from field. NOTE: this only works on leading and trailing whitespaces. +[[trim-options]] +.Trim Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The string-valued field to trim whitespace from +|====== + [source,js] -------------------------------------------------- { @@ -346,71 +475,6 @@ An example of a pipeline specifying custom pattern definitions: } -------------------------------------------------- - -==== Geoip processor - -The GeoIP processor adds information about the geographical location of IP addresses, based on data from the Maxmind databases. -This processor adds this information by default under the `geoip` field. - -The ingest plugin ships by default with the GeoLite2 City and GeoLite2 Country geoip2 databases from Maxmind made available -under the CCA-ShareAlike 3.0 license. For more details see, http://dev.maxmind.com/geoip/geoip2/geolite2/ - -The GeoIP processor can run with other geoip2 databases from Maxmind. The files must be copied into the geoip config directory -and the `database_file` option should be used to specify the filename of the custom database. The geoip config directory -is located at `$ES_HOME/config/ingest/geoip` and holds the shipped databases too. - -[[geoip-options]] -.Geoip options -[options="header"] -|====== -| Name | Required | Default | Description -| `source_field` | yes | - | The field to get the ip address or hostname from for the geographical lookup. -| `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database. -| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest plugin ships with the GeoLite2-City.mmdb and GeoLite2-Country.mmdb files. -| `fields` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] <1> | Controls what properties are added to the `target_field` based on the geoip lookup. -|====== - -<1> Depends on what is available in `database_field`: -* If the GeoLite2 City database is used then the following fields may be added under the `target_field`: `ip`, -`country_iso_code`, `country_name`, `continent_name`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude` -and `location`. The fields actually added depend on what has been found and which fields were configured in `fields`. -* If the GeoLite2 Country database is used then the following fields may be added under the `target_field`: `ip`, -`country_iso_code`, `country_name` and `continent_name`.The fields actually added depend on what has been found and which fields were configured in `fields`. - -An example that uses the default city database and adds the geographical information to the `geoip` field based on the `ip` field: - -[source,js] --------------------------------------------------- -{ - "description" : "...", - "processors" : [ - { - "geoip" : { - "source_field" : "ip" - } - } - ] -} --------------------------------------------------- - -An example that uses the default country database and add the geographical information to the `geo` field based on the `ip` field`: - -[source,js] --------------------------------------------------- -{ - "description" : "...", - "processors" : [ - { - "geoip" : { - "source_field" : "ip", - "target_field" : "geo", - "database_file" : "GeoLite2-Country.mmdb" - } - } - ] -} --------------------------------------------------- - ==== Date processor The date processor is used for parsing dates from fields, and then using that date or timestamp as the timestamp for that document. @@ -454,6 +518,14 @@ The Fail Processor is used to raise an exception. This is useful for when a user expects a pipeline to fail and wishes to relay a specific message to the requester. +[[fail-options]] +.Fail Options +[options="header"] +|====== +| Name | Required | Default | Description +| `message` | yes | - | The error message of the `FailException` thrown by the processor +|====== + [source,js] -------------------------------------------------- { @@ -467,6 +539,14 @@ to the requester. The DeDot Processor is used to remove dots (".") from field names and replace them with a specific `separator` string. +[[dedot-options]] +.DeDot Options +[options="header"] +|====== +| Name | Required | Default | Description +| `separator` | yes | "_" | The string to replace dots with in all field names +|====== + [source,js] -------------------------------------------------- { From 8b02f214c43673d917e835fdbf127cb94b06a28b Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 19 Jan 2016 09:35:48 +0100 Subject: [PATCH 316/347] percolator: The percolate api shouldn't add field mappings for unmapped fields inside the document being percolated to the mapping. Closes #15751 --- .../MultiDocumentPercolatorIndex.java | 12 ++- .../percolator/PercolateDocumentParser.java | 8 +- .../percolator/PercolatorService.java | 4 +- .../SingleDocumentPercolatorIndex.java | 7 +- .../PercolateDocumentParserTests.java | 6 +- .../percolator/PercolatorIT.java | 88 +------------------ docs/reference/migration/migrate_3_0.asciidoc | 4 + docs/reference/search/percolate.asciidoc | 10 +-- .../percolate/18_highligh_with_query.yaml | 2 +- 9 files changed, 29 insertions(+), 112 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java b/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java index 10eeec75cbc..9d091a4c0bd 100644 --- a/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java +++ b/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java @@ -39,6 +39,8 @@ import org.apache.lucene.util.CloseableThreadLocal; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.internal.UidFieldMapper; @@ -76,8 +78,7 @@ class MultiDocumentPercolatorIndex implements PercolatorIndex { } else { memoryIndex = new MemoryIndex(true); } - Analyzer analyzer = context.mapperService().documentMapper(parsedDocument.type()).mappers().indexAnalyzer(); - memoryIndices[i] = indexDoc(d, analyzer, memoryIndex).createSearcher().getIndexReader(); + memoryIndices[i] = indexDoc(d, memoryIndex, context, parsedDocument).createSearcher().getIndexReader(); } try { MultiReader mReader = new MultiReader(memoryIndices, true); @@ -101,8 +102,13 @@ class MultiDocumentPercolatorIndex implements PercolatorIndex { } } - MemoryIndex indexDoc(ParseContext.Document d, Analyzer analyzer, MemoryIndex memoryIndex) { + MemoryIndex indexDoc(ParseContext.Document d, MemoryIndex memoryIndex, PercolateContext context, ParsedDocument parsedDocument) { for (IndexableField field : d.getFields()) { + Analyzer analyzer = context.analysisService().defaultIndexAnalyzer(); + DocumentMapper documentMapper = context.mapperService().documentMapper(parsedDocument.type()); + if (documentMapper != null && documentMapper.mappers().getMapper(field.name()) != null) { + analyzer = documentMapper.mappers().indexAnalyzer(); + } if (field.fieldType().indexOptions() == IndexOptions.NONE && field.name().equals(UidFieldMapper.NAME)) { continue; } diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java b/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java index 6733ebd0b3a..8edc5212523 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java @@ -49,14 +49,13 @@ public class PercolateDocumentParser { private final HighlightPhase highlightPhase; private final SortParseElement sortParseElement; private final AggregationPhase aggregationPhase; - private final MappingUpdatedAction mappingUpdatedAction; @Inject - public PercolateDocumentParser(HighlightPhase highlightPhase, SortParseElement sortParseElement, AggregationPhase aggregationPhase, MappingUpdatedAction mappingUpdatedAction) { + public PercolateDocumentParser(HighlightPhase highlightPhase, SortParseElement sortParseElement, + AggregationPhase aggregationPhase) { this.highlightPhase = highlightPhase; this.sortParseElement = sortParseElement; this.aggregationPhase = aggregationPhase; - this.mappingUpdatedAction = mappingUpdatedAction; } public ParsedDocument parse(PercolateShardRequest request, PercolateContext context, MapperService mapperService, QueryShardContext queryShardContext) { @@ -98,9 +97,6 @@ public class PercolateDocumentParser { if (docMapper.getMapping() != null) { doc.addDynamicMappingsUpdate(docMapper.getMapping()); } - if (doc.dynamicMappingsUpdate() != null) { - mappingUpdatedAction.updateMappingOnMasterSynchronously(request.shardId().getIndex(), request.documentType(), doc.dynamicMappingsUpdate()); - } // the document parsing exists the "doc" object, so we need to set the new current field. currentFieldName = parser.currentName(); } diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java index e6ffa313e83..6ac0ca680c1 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java @@ -52,6 +52,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.fieldvisitor.SingleFieldsVisitor; +import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.percolator.PercolatorFieldMapper; @@ -201,7 +202,8 @@ public class PercolatorService extends AbstractComponent { // parse the source either into one MemoryIndex, if it is a single document or index multiple docs if nested PercolatorIndex percolatorIndex; - boolean isNested = indexShard.mapperService().documentMapper(request.documentType()).hasNestedObjects(); + DocumentMapper documentMapper = indexShard.mapperService().documentMapper(request.documentType()); + boolean isNested = documentMapper != null && documentMapper.hasNestedObjects(); if (parsedDocument.docs().size() > 1) { assert isNested; percolatorIndex = multi; diff --git a/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java b/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java index 1271872cab6..1d5268e3794 100644 --- a/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java +++ b/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java @@ -28,6 +28,7 @@ import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.util.CloseableThreadLocal; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.internal.UidFieldMapper; @@ -49,11 +50,15 @@ class SingleDocumentPercolatorIndex implements PercolatorIndex { public void prepare(PercolateContext context, ParsedDocument parsedDocument) { MemoryIndex memoryIndex = cache.get(); for (IndexableField field : parsedDocument.rootDoc().getFields()) { + Analyzer analyzer = context.analysisService().defaultIndexAnalyzer(); + DocumentMapper documentMapper = context.mapperService().documentMapper(parsedDocument.type()); + if (documentMapper != null && documentMapper.mappers().getMapper(field.name()) != null) { + analyzer = documentMapper.mappers().indexAnalyzer(); + } if (field.fieldType().indexOptions() == IndexOptions.NONE && field.name().equals(UidFieldMapper.NAME)) { continue; } try { - Analyzer analyzer = context.mapperService().documentMapper(parsedDocument.type()).mappers().indexAnalyzer(); // TODO: instead of passing null here, we can have a CTL> and pass previous, // like the indexer does try (TokenStream tokenStream = field.tokenStream(analyzer, null)) { diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java index c50c191f8f4..eecc71f388a 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java @@ -23,7 +23,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.TermQuery; import org.elasticsearch.Version; import org.elasticsearch.action.percolate.PercolateShardRequest; -import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -90,10 +89,7 @@ public class PercolateDocumentParserTests extends ESTestCase { HighlightPhase highlightPhase = new HighlightPhase(Settings.EMPTY, new Highlighters()); AggregatorParsers aggregatorParsers = new AggregatorParsers(Collections.emptySet(), Collections.emptySet()); AggregationPhase aggregationPhase = new AggregationPhase(new AggregationParseElement(aggregatorParsers), new AggregationBinaryParseElement(aggregatorParsers)); - MappingUpdatedAction mappingUpdatedAction = Mockito.mock(MappingUpdatedAction.class); - parser = new PercolateDocumentParser( - highlightPhase, new SortParseElement(), aggregationPhase, mappingUpdatedAction - ); + parser = new PercolateDocumentParser(highlightPhase, new SortParseElement(), aggregationPhase); request = Mockito.mock(PercolateShardRequest.class); Mockito.when(request.shardId()).thenReturn(new ShardId(new Index("_index"), 0)); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index 4a15b65382c..22183bdcefc 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -175,7 +175,7 @@ public class PercolatorIT extends ESIntegTestCase { } public void testSimple2() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=long,doc_values=true")); + assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=long,doc_values=true", "field2", "type=string")); ensureGreen(); // introduce the doc @@ -1577,92 +1577,6 @@ public class PercolatorIT extends ESIntegTestCase { assertEquals(response.getMatches()[0].getId().string(), "Q"); } - public void testPercolationWithDynamicTemplates() throws Exception { - assertAcked(prepareCreate("idx").addMapping("type", jsonBuilder().startObject().startObject("type") - .field("dynamic", false) - .startObject("properties") - .startObject("custom") - .field("dynamic", true) - .field("type", "object") - .field("include_in_all", false) - .endObject() - .endObject() - .startArray("dynamic_templates") - .startObject() - .startObject("custom_fields") - .field("path_match", "custom.*") - .startObject("mapping") - .field("index", "not_analyzed") - .endObject() - .endObject() - .endObject() - .endArray() - .endObject().endObject())); - ensureGreen("idx"); - - try { - client().prepareIndex("idx", PercolatorService.TYPE_NAME, "1") - .setSource(jsonBuilder().startObject().field("query", QueryBuilders.queryStringQuery("color:red")).endObject()) - .get(); - fail(); - } catch (MapperParsingException e) { - } - refresh(); - - PercolateResponse percolateResponse = client().preparePercolate().setDocumentType("type") - .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(jsonBuilder().startObject().startObject("custom").field("color", "blue").endObject().endObject())) - .get(); - - assertMatchCount(percolateResponse, 0l); - assertThat(percolateResponse.getMatches(), arrayWithSize(0)); - - // The previous percolate request introduced the custom.color field, so now we register the query again - // and the field name `color` will be resolved to `custom.color` field in mapping via smart field mapping resolving. - client().prepareIndex("idx", PercolatorService.TYPE_NAME, "1") - .setSource(jsonBuilder().startObject().field("query", QueryBuilders.queryStringQuery("custom.color:red")).endObject()) - .get(); - client().prepareIndex("idx", PercolatorService.TYPE_NAME, "2") - .setSource(jsonBuilder().startObject().field("query", QueryBuilders.queryStringQuery("custom.color:blue")).field("type", "type").endObject()) - .get(); - refresh(); - - // The second request will yield a match, since the query during the proper field during parsing. - percolateResponse = client().preparePercolate().setDocumentType("type") - .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(jsonBuilder().startObject().startObject("custom").field("color", "blue").endObject().endObject())) - .get(); - - assertMatchCount(percolateResponse, 1l); - assertThat(percolateResponse.getMatches()[0].getId().string(), equalTo("2")); - } - - public void testUpdateMappingDynamicallyWhilePercolating() throws Exception { - createIndex("test"); - ensureSearchable(); - - // percolation source - XContentBuilder percolateDocumentSource = XContentFactory.jsonBuilder().startObject().startObject("doc") - .field("field1", 1) - .field("field2", "value") - .endObject().endObject(); - - PercolateResponse response = client().preparePercolate() - .setIndices("test").setDocumentType("type1") - .setSource(percolateDocumentSource).execute().actionGet(); - assertAllSuccessful(response); - assertMatchCount(response, 0l); - assertThat(response.getMatches(), arrayWithSize(0)); - - assertMappingOnMaster("test", "type1"); - - GetMappingsResponse mappingsResponse = client().admin().indices().prepareGetMappings("test").get(); - assertThat(mappingsResponse.getMappings().get("test"), notNullValue()); - assertThat(mappingsResponse.getMappings().get("test").get("type1"), notNullValue()); - assertThat(mappingsResponse.getMappings().get("test").get("type1").getSourceAsMap().isEmpty(), is(false)); - Map properties = (Map) mappingsResponse.getMappings().get("test").get("type1").getSourceAsMap().get("properties"); - assertThat(((Map) properties.get("field1")).get("type"), equalTo("long")); - assertThat(((Map) properties.get("field2")).get("type"), equalTo("string")); - } - public void testDontReportDeletedPercolatorDocs() throws Exception { client().admin().indices().prepareCreate("test").execute().actionGet(); ensureGreen(); diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 76b1ddb417e..b4aa2d6d844 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -644,6 +644,10 @@ The percolate api can no longer accept documents that have fields that don't exi When percolating an existing document then specifying a document in the source of the percolate request is not allowed any more. +The percolate api no longer modifies the mappings. Before the percolate api could be used to dynamically introduce new +fields to the mappings based on the fields in the document being percolated. This no longer works, because these +unmapped fields are not persisted in the mapping. + Percolator documents are no longer excluded from the search response. [[breaking_30_packaging]] diff --git a/docs/reference/search/percolate.asciidoc b/docs/reference/search/percolate.asciidoc index 7f160d1a503..4ac1b6b35ea 100644 --- a/docs/reference/search/percolate.asciidoc +++ b/docs/reference/search/percolate.asciidoc @@ -20,14 +20,8 @@ in a request to the percolate API. ===================================== Fields referred to in a percolator query must *already* exist in the mapping -associated with the index used for percolation. -There are two ways to make sure that a field mapping exist: - -* Add or update a mapping via the <> or - <> APIs. -* Percolate a document before registering a query. Percolating a document can - add field mappings dynamically, in the same way as happens when indexing a - document. +associated with the index used for percolation. In order to make sure these fields exist, +add or update a mapping via the <> or <> APIs. ===================================== diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/18_highligh_with_query.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/18_highligh_with_query.yaml index d7e1fbdcc68..97d652366da 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/18_highligh_with_query.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/18_highligh_with_query.yaml @@ -27,7 +27,7 @@ - do: percolate: index: test_index - type: test_type + type: type_1 body: doc: foo: "bar foo" From 80d79c9c11ab00c0a56756399ac70715e53bcc47 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Thu, 21 Jan 2016 16:11:34 +0100 Subject: [PATCH 317/347] Remove deprecated parameters This is in preparation of refactoring. Relates to #15178 --- .../search/sort/ScriptSortBuilder.java | 81 +------------------ .../search/sort/SortBuilders.java | 14 ---- 2 files changed, 2 insertions(+), 93 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index 36b651a2220..e9a9c8df57c 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -22,11 +22,8 @@ package org.elasticsearch.search.sort; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptService.ScriptType; import java.io.IOException; -import java.util.HashMap; -import java.util.Map; /** * Script sort builder allows to sort based on a custom script expression. @@ -35,17 +32,8 @@ public class ScriptSortBuilder extends SortBuilder { private Script script; - @Deprecated - private String scriptString; - private final String type; - @Deprecated - private String lang; - - @Deprecated - private Map params; - private SortOrder order; private String sortMode; @@ -65,66 +53,6 @@ public class ScriptSortBuilder extends SortBuilder { this.type = type; } - /** - * Constructs a script sort builder with the script and the type. - * - * @param script - * The script to use. - * @param type - * The type, can either be "string" or "number". - * - * @deprecated Use {@link #ScriptSortBuilder(Script, String)} instead. - */ - @Deprecated - public ScriptSortBuilder(String script, String type) { - this.scriptString = script; - this.type = type; - } - - /** - * Adds a parameter to the script. - * - * @param name - * The name of the parameter. - * @param value - * The value of the parameter. - * - * @deprecated Use {@link #ScriptSortBuilder(Script, String)} instead. - */ - @Deprecated - public ScriptSortBuilder param(String name, Object value) { - if (params == null) { - params = new HashMap<>(); - } - params.put(name, value); - return this; - } - - /** - * Sets parameters for the script. - * - * @param params - * The script parameters - * - * @deprecated Use {@link #ScriptSortBuilder(Script, String)} instead. - */ - @Deprecated - public ScriptSortBuilder setParams(Map params) { - this.params = params; - return this; - } - - /** - * The language of the script. - * - * @deprecated Use {@link #ScriptSortBuilder(Script, String)} instead. - */ - @Deprecated - public ScriptSortBuilder lang(String lang) { - this.lang = lang; - return this; - } - /** * Sets the sort order. */ @@ -172,12 +100,7 @@ public class ScriptSortBuilder extends SortBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException { builder.startObject("_script"); - if (script == null) { - - builder.field("script", new Script(scriptString, ScriptType.INLINE, lang, params)); - } else { - builder.field("script", script); - } + builder.field("script", script); builder.field("type", type); if (order == SortOrder.DESC) { builder.field("reverse", true); @@ -189,7 +112,7 @@ public class ScriptSortBuilder extends SortBuilder { builder.field("nested_path", nestedPath); } if (nestedFilter != null) { - builder.field("nested_filter", nestedFilter, params); + builder.field("nested_filter", nestedFilter, builderParams); } builder.endObject(); return builder; diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortBuilders.java b/core/src/main/java/org/elasticsearch/search/sort/SortBuilders.java index 01134ca2648..9a843c43f74 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortBuilders.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortBuilders.java @@ -54,20 +54,6 @@ public class SortBuilders { return new ScriptSortBuilder(script, type); } - /** - * Constructs a new script based sort. - * - * @param script - * The script to use. - * @param type - * The type, can either be "string" or "number". - * @deprecated Use {@link #scriptSort(Script, String)} instead. - */ - @Deprecated - public static ScriptSortBuilder scriptSort(String script, String type) { - return new ScriptSortBuilder(script, type); - } - /** * A geo distance based sort. * From 6aa1a4930ed412b8bdab6731d169fafbd7e59fd2 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Tue, 26 Jan 2016 11:56:36 +0100 Subject: [PATCH 318/347] Added back deprecation notices for _ttl and _timestamp --- docs/reference/docs/bulk.asciidoc | 4 ++++ docs/reference/docs/index_.asciidoc | 4 ++++ docs/reference/mapping/fields/timestamp-field.asciidoc | 2 ++ docs/reference/mapping/fields/ttl-field.asciidoc | 2 ++ 4 files changed, 12 insertions(+) diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index ef066eb9bb5..b9b7d4751e6 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -131,6 +131,8 @@ operation based on the `_parent` / `_routing` mapping. [[bulk-timestamp]] === Timestamp +deprecated[2.0.0,The `_timestamp` field is deprecated. Instead, use a normal <> field and set its value explicitly] + Each bulk item can include the timestamp value using the `_timestamp`/`timestamp` field. It automatically follows the behavior of the index operation based on the `_timestamp` mapping. @@ -139,6 +141,8 @@ the index operation based on the `_timestamp` mapping. [[bulk-ttl]] === TTL +deprecated[2.0.0,The current `_ttl` implementation is deprecated and will be replaced with a different implementation in a future version] + Each bulk item can include the ttl value using the `_ttl`/`ttl` field. It automatically follows the behavior of the index operation based on the `_ttl` mapping. diff --git a/docs/reference/docs/index_.asciidoc b/docs/reference/docs/index_.asciidoc index 5f79efbcc60..27ac85b9595 100644 --- a/docs/reference/docs/index_.asciidoc +++ b/docs/reference/docs/index_.asciidoc @@ -258,6 +258,8 @@ specified using the `routing` parameter. [[index-timestamp]] === Timestamp +deprecated[2.0.0,The `_timestamp` field is deprecated. Instead, use a normal <> field and set its value explicitly] + A document can be indexed with a `timestamp` associated with it. The `timestamp` value of a document can be set using the `timestamp` parameter. For example: @@ -280,6 +282,8 @@ page>>. [[index-ttl]] === TTL +deprecated[2.0.0,The current `_ttl` implementation is deprecated and will be replaced with a different implementation in a future version] + A document can be indexed with a `ttl` (time to live) associated with it. Expired documents will be expunged automatically. The expiration diff --git a/docs/reference/mapping/fields/timestamp-field.asciidoc b/docs/reference/mapping/fields/timestamp-field.asciidoc index 5971a02c771..3f4bf8a8134 100644 --- a/docs/reference/mapping/fields/timestamp-field.asciidoc +++ b/docs/reference/mapping/fields/timestamp-field.asciidoc @@ -1,6 +1,8 @@ [[mapping-timestamp-field]] === `_timestamp` field +deprecated[2.0.0,The `_timestamp` field is deprecated. Instead, use a normal <> field and set its value explicitly] + The `_timestamp` field, when enabled, allows a timestamp to be indexed and stored with a document. The timestamp may be specified manually, generated automatically, or set to a default value: diff --git a/docs/reference/mapping/fields/ttl-field.asciidoc b/docs/reference/mapping/fields/ttl-field.asciidoc index d81582c9078..9bfdc72b4c4 100644 --- a/docs/reference/mapping/fields/ttl-field.asciidoc +++ b/docs/reference/mapping/fields/ttl-field.asciidoc @@ -1,6 +1,8 @@ [[mapping-ttl-field]] === `_ttl` field +deprecated[2.0.0,The current `_ttl` implementation is deprecated and will be replaced with a different implementation in a future version] + Some types of documents, such as session data or special offers, come with an expiration date. The `_ttl` field allows you to specify the minimum time a document should live, after which time the document is deleted automatically. From 7cde0d47bc3f1af9391f1d51e9901c8bcf959f0c Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Tue, 26 Jan 2016 12:55:46 +0100 Subject: [PATCH 319/347] Merge pull request #16215 from eemp/patch-1 Update filters-aggregation.asciidoc --- docs/reference/aggregations/bucket/filters-aggregation.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/aggregations/bucket/filters-aggregation.asciidoc b/docs/reference/aggregations/bucket/filters-aggregation.asciidoc index 322dccb790f..a7e07acc660 100644 --- a/docs/reference/aggregations/bucket/filters-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/filters-aggregation.asciidoc @@ -146,7 +146,7 @@ The following snippet shows a response where the `other` bucket is requested to "aggs" : { "messages" : { "filters" : { - "other_bucket": "other_messages", + "other_bucket_key": "other_messages", "filters" : { "errors" : { "term" : { "body" : "error" }}, "warnings" : { "term" : { "body" : "warning" }} From 4e5316591a95b28528335e1905558bfa1c0c4c6a Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Tue, 26 Jan 2016 13:47:57 +0100 Subject: [PATCH 320/347] Update stats.asciidoc Renamed filter_cache->query_cache and removed id_cache Closes #16626 --- docs/reference/cluster/stats.asciidoc | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index 8093dd32d7d..3f36ad6df25 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -57,15 +57,11 @@ Will return, for example: "memory_size_in_bytes": 0, "evictions": 0 }, - "filter_cache": { + "query_cache": { "memory_size": "0b", "memory_size_in_bytes": 0, "evictions": 0 }, - "id_cache": { - "memory_size": "0b", - "memory_size_in_bytes": 0 - }, "completion": { "size": "0b", "size_in_bytes": 0 From 80b74a48859c8e0a409386789d1bb26c771f6a12 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 26 Jan 2016 10:30:14 +0100 Subject: [PATCH 321/347] migrate node.client, node.data and node.master to new Setting infra --- .../client/transport/TransportClient.java | 3 ++- .../cluster/node/DiscoveryNode.java | 19 +++++++---------- .../common/settings/ClusterSettings.java | 3 +++ .../java/org/elasticsearch/node/Node.java | 8 +++++-- .../org/elasticsearch/tribe/TribeService.java | 4 ++-- .../admin/cluster/stats/ClusterStatsIT.java | 7 ++++--- .../OldIndexBackwardsCompatibilityIT.java | 7 ++++--- .../client/transport/TransportClientIT.java | 4 ++-- .../cluster/ClusterServiceIT.java | 3 ++- .../cluster/SimpleDataNodesIT.java | 7 ++++--- .../cluster/SpecificMasterNodesIT.java | 21 ++++++++++--------- .../cluster/UpdateSettingsValidationIT.java | 7 ++++--- .../discovery/zen/ZenDiscoveryIT.java | 9 ++++---- .../PublishClusterStateActionTests.java | 5 +++-- .../gateway/GatewayIndexStateIT.java | 9 ++++---- .../gateway/RecoverAfterNodesIT.java | 17 ++++++++------- .../index/store/CorruptedFileIT.java | 3 ++- .../DedicatedMasterGetFieldMappingIT.java | 3 ++- .../store/IndicesStoreIntegrationIT.java | 16 +++++++------- .../DedicatedClusterSnapshotRestoreIT.java | 7 ++++--- .../test/ESSingleNodeTestCase.java | 2 +- .../test/InternalTestCluster.java | 16 +++++++------- 22 files changed, 99 insertions(+), 81 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 9930a9d1539..ea809a8cc38 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -45,6 +45,7 @@ import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.indices.breaker.CircuitBreakerModule; import org.elasticsearch.monitor.MonitorService; +import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsModule; @@ -113,7 +114,7 @@ public class TransportClient extends AbstractClient { .put(NettyTransport.PING_SCHEDULE, "5s") // enable by default the transport schedule ping interval .put( InternalSettingsPreparer.prepareSettings(settings)) .put("network.server", false) - .put("node.client", true) + .put(Node.NODE_CLIENT_SETTING.getKey(), true) .put(CLIENT_TYPE_SETTING, CLIENT_TYPE); return new PluginsService(settingsBuilder.build(), null, null, pluginClasses); } diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index e05bab6d4a4..d1523697ed5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -64,28 +64,25 @@ public class DiscoveryNode implements Streamable, ToXContent { } public static boolean nodeRequiresLocalStorage(Settings settings) { - return !(settings.getAsBoolean("node.client", false) || (!settings.getAsBoolean("node.data", true) && !settings.getAsBoolean("node.master", true))); + return (Node.NODE_CLIENT_SETTING.get(settings) || (Node.NODE_DATA_SETTING.get(settings) == false && Node.NODE_MASTER_SETTING.get(settings) == false)) == false; } public static boolean clientNode(Settings settings) { - String client = settings.get("node.client"); - return Booleans.isExplicitTrue(client); + return Node.NODE_CLIENT_SETTING.get(settings); } public static boolean masterNode(Settings settings) { - String master = settings.get("node.master"); - if (master == null) { - return !clientNode(settings); + if (Node.NODE_MASTER_SETTING.exists(settings)) { + return Node.NODE_MASTER_SETTING.get(settings); } - return Booleans.isExplicitTrue(master); + return clientNode(settings) == false; } public static boolean dataNode(Settings settings) { - String data = settings.get("node.data"); - if (data == null) { - return !clientNode(settings); + if (Node.NODE_DATA_SETTING.exists(settings)) { + return Node.NODE_DATA_SETTING.get(settings); } - return Booleans.isExplicitTrue(data); + return clientNode(settings) == false; } public static boolean ingestNode(Settings settings) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 05978a13053..499faf5c6ab 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -249,6 +249,9 @@ public final class ClusterSettings extends AbstractScopedSettings { SearchService.DEFAULT_KEEPALIVE_SETTING, SearchService.KEEPALIVE_INTERVAL_SETTING, Node.WRITE_PORTS_FIELD_SETTING, + Node.NODE_CLIENT_SETTING, + Node.NODE_DATA_SETTING, + Node.NODE_MASTER_SETTING, URLRepository.ALLOWED_URLS_SETTING, URLRepository.REPOSITORIES_LIST_DIRECTORIES_SETTING, URLRepository.REPOSITORIES_URL_SETTING, diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index f51c7e27dfc..2a0f550980c 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -121,9 +121,13 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; */ public class Node implements Releasable { - public static final Setting NODE_INGEST_SETTING = Setting.boolSetting("node.ingest", true, false, Setting.Scope.CLUSTER); - private static final String CLIENT_TYPE = "node"; public static final Setting WRITE_PORTS_FIELD_SETTING = Setting.boolSetting("node.portsfile", false, false, Setting.Scope.CLUSTER); + public static final Setting NODE_CLIENT_SETTING = Setting.boolSetting("node.client", false, false, Setting.Scope.CLUSTER); + public static final Setting NODE_DATA_SETTING = Setting.boolSetting("node.data", true, false, Setting.Scope.CLUSTER); + public static final Setting NODE_MASTER_SETTING = Setting.boolSetting("node.master", true, false, Setting.Scope.CLUSTER); + public static final Setting NODE_INGEST_SETTING = Setting.boolSetting("node.ingest", true, false, Setting.Scope.CLUSTER); + + private static final String CLIENT_TYPE = "node"; private final Lifecycle lifecycle = new Lifecycle(); private final Injector injector; private final Settings settings; diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index 48f04e3690e..ce6185a6473 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -101,7 +101,7 @@ public class TribeService extends AbstractLifecycleComponent { } // its a tribe configured node..., force settings Settings.Builder sb = Settings.builder().put(settings); - sb.put("node.client", true); // this node should just act as a node client + sb.put(Node.NODE_CLIENT_SETTING.getKey(), true); // this node should just act as a node client sb.put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local"); // a tribe node should not use zen discovery sb.put(DiscoveryService.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0); // nothing is going to be discovered, since no master will be elected if (sb.get("cluster.name") == null) { @@ -139,7 +139,7 @@ public class TribeService extends AbstractLifecycleComponent { if (sb.get("http.enabled") == null) { sb.put("http.enabled", false); } - sb.put("node.client", true); + sb.put(Node.NODE_CLIENT_SETTING.getKey(), true); nodes.add(new TribeClientNode(sb.build())); } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java index 56eaad11f94..1e8915d9448 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.index.store.Store; +import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -60,17 +61,17 @@ public class ClusterStatsIT extends ESIntegTestCase { ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get(); assertCounts(response.getNodesStats().getCounts(), 1, 0, 0, 1, 0); - internalCluster().startNode(Settings.builder().put("node.data", false)); + internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false)); waitForNodes(2); response = client().admin().cluster().prepareClusterStats().get(); assertCounts(response.getNodesStats().getCounts(), 2, 1, 0, 1, 0); - internalCluster().startNode(Settings.builder().put("node.master", false)); + internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false)); waitForNodes(3); response = client().admin().cluster().prepareClusterStats().get(); assertCounts(response.getNodesStats().getCounts(), 3, 1, 1, 1, 0); - internalCluster().startNode(Settings.builder().put("node.client", true)); + internalCluster().startNode(Settings.builder().put(Node.NODE_CLIENT_SETTING.getKey(), true)); waitForNodes(4); response = client().admin().cluster().prepareClusterStats().get(); assertCounts(response.getNodesStats().getCounts(), 4, 1, 1, 1, 1); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index 74881413799..5c351abbce3 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -47,10 +47,11 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.mapper.string.StringFieldMapperPositionIncrementGapTests; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.MergePolicyConfig; +import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -144,13 +145,13 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { // start single data path node Settings.Builder nodeSettings = Settings.builder() .put(Environment.PATH_DATA_SETTING.getKey(), baseTempDir.resolve("single-path").toAbsolutePath()) - .put("node.master", false); // workaround for dangling index loading issue when node is master + .put(Node.NODE_MASTER_SETTING.getKey(), false); // workaround for dangling index loading issue when node is master InternalTestCluster.Async singleDataPathNode = internalCluster().startNodeAsync(nodeSettings.build()); // start multi data path node nodeSettings = Settings.builder() .put(Environment.PATH_DATA_SETTING.getKey(), baseTempDir.resolve("multi-path1").toAbsolutePath() + "," + baseTempDir.resolve("multi-path2").toAbsolutePath()) - .put("node.master", false); // workaround for dangling index loading issue when node is master + .put(Node.NODE_MASTER_SETTING.getKey(), false); // workaround for dangling index loading issue when node is master InternalTestCluster.Async multiDataPathNode = internalCluster().startNodeAsync(nodeSettings.build()); // find single data path dir diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java index 8ab432d6e41..3bf05f21c4c 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java @@ -41,7 +41,7 @@ import static org.hamcrest.Matchers.startsWith; @ClusterScope(scope = Scope.TEST, numDataNodes = 0, transportClientRatio = 1.0) public class TransportClientIT extends ESIntegTestCase { public void testPickingUpChangesInDiscoveryNode() { - String nodeName = internalCluster().startNode(Settings.builder().put("node.data", false)); + String nodeName = internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false)); TransportClient client = (TransportClient) internalCluster().client(nodeName); assertThat(client.connectedNodes().get(0).dataNode(), equalTo(false)); @@ -56,7 +56,7 @@ public class TransportClientIT extends ESIntegTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("node.name", "testNodeVersionIsUpdated") .put("http.enabled", false) - .put("node.data", false) + .put(Node.NODE_DATA_SETTING.getKey(), false) .put("cluster.name", "foobar") .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // make sure we get what we set :) .build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java index 72d58f7f70e..4819a97fce7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.inject.Singleton; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.zen.ZenDiscovery; +import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -305,7 +306,7 @@ public class ClusterServiceIT extends ESIntegTestCase { .build(); InternalTestCluster.Async master = internalCluster().startNodeAsync(settings); - InternalTestCluster.Async nonMaster = internalCluster().startNodeAsync(settingsBuilder().put(settings).put("node.master", false).build()); + InternalTestCluster.Async nonMaster = internalCluster().startNodeAsync(settingsBuilder().put(settings).put(Node.NODE_MASTER_SETTING.getKey(), false).build()); master.get(); ensureGreen(); // make sure we have a cluster diff --git a/core/src/test/java/org/elasticsearch/cluster/SimpleDataNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/SimpleDataNodesIT.java index bc3aea4ac76..44565a269bd 100644 --- a/core/src/test/java/org/elasticsearch/cluster/SimpleDataNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/SimpleDataNodesIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Priority; +import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -38,7 +39,7 @@ import static org.hamcrest.Matchers.equalTo; @ClusterScope(scope= Scope.TEST, numDataNodes =0) public class SimpleDataNodesIT extends ESIntegTestCase { public void testDataNodes() throws Exception { - internalCluster().startNode(settingsBuilder().put("node.data", false).build()); + internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build()); client().admin().indices().create(createIndexRequest("test")).actionGet(); try { client().index(Requests.indexRequest("test").type("type1").id("1").source(source("1", "test")).timeout(timeValueSeconds(1))).actionGet(); @@ -47,7 +48,7 @@ public class SimpleDataNodesIT extends ESIntegTestCase { // all is well } - internalCluster().startNode(settingsBuilder().put("node.data", false).build()); + internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build()); assertThat(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes("2").setLocal(true).execute().actionGet().isTimedOut(), equalTo(false)); // still no shard should be allocated @@ -59,7 +60,7 @@ public class SimpleDataNodesIT extends ESIntegTestCase { } // now, start a node data, and see that it gets with shards - internalCluster().startNode(settingsBuilder().put("node.data", true).build()); + internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), true).build()); assertThat(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes("3").setLocal(true).execute().actionGet().isTimedOut(), equalTo(false)); IndexResponse indexResponse = client().index(Requests.indexRequest("test").type("type1").id("1").source(source("1", "test"))).actionGet(); diff --git a/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java index 90c39d7bbec..934a4d0cb84 100644 --- a/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -43,7 +44,7 @@ public class SpecificMasterNodesIT extends ESIntegTestCase { public void testSimpleOnlyMasterNodeElection() throws IOException { logger.info("--> start data node / non master node"); - internalCluster().startNode(settingsBuilder().put("node.data", true).put("node.master", false).put("discovery.initial_state_timeout", "1s")); + internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false).put("discovery.initial_state_timeout", "1s")); try { assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("100ms").execute().actionGet().getState().nodes().masterNodeId(), nullValue()); fail("should not be able to find master"); @@ -51,7 +52,7 @@ public class SpecificMasterNodesIT extends ESIntegTestCase { // all is well, no master elected } logger.info("--> start master node"); - final String masterNodeName = internalCluster().startNode(settingsBuilder().put("node.data", false).put("node.master", true)); + final String masterNodeName = internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); assertThat(internalCluster().nonMasterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(masterNodeName)); assertThat(internalCluster().masterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(masterNodeName)); @@ -66,14 +67,14 @@ public class SpecificMasterNodesIT extends ESIntegTestCase { } logger.info("--> start master node"); - final String nextMasterEligibleNodeName = internalCluster().startNode(settingsBuilder().put("node.data", false).put("node.master", true)); + final String nextMasterEligibleNodeName = internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); assertThat(internalCluster().nonMasterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(nextMasterEligibleNodeName)); assertThat(internalCluster().masterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(nextMasterEligibleNodeName)); } public void testElectOnlyBetweenMasterNodes() throws IOException { logger.info("--> start data node / non master node"); - internalCluster().startNode(settingsBuilder().put("node.data", true).put("node.master", false).put("discovery.initial_state_timeout", "1s")); + internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false).put("discovery.initial_state_timeout", "1s")); try { assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("100ms").execute().actionGet().getState().nodes().masterNodeId(), nullValue()); fail("should not be able to find master"); @@ -81,12 +82,12 @@ public class SpecificMasterNodesIT extends ESIntegTestCase { // all is well, no master elected } logger.info("--> start master node (1)"); - final String masterNodeName = internalCluster().startNode(settingsBuilder().put("node.data", false).put("node.master", true)); + final String masterNodeName = internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); assertThat(internalCluster().nonMasterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(masterNodeName)); assertThat(internalCluster().masterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(masterNodeName)); logger.info("--> start master node (2)"); - final String nextMasterEligableNodeName = internalCluster().startNode(settingsBuilder().put("node.data", false).put("node.master", true)); + final String nextMasterEligableNodeName = internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); assertThat(internalCluster().nonMasterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(masterNodeName)); assertThat(internalCluster().nonMasterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(masterNodeName)); assertThat(internalCluster().masterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(masterNodeName)); @@ -103,10 +104,10 @@ public class SpecificMasterNodesIT extends ESIntegTestCase { */ public void testCustomDefaultMapping() throws Exception { logger.info("--> start master node / non data"); - internalCluster().startNode(settingsBuilder().put("node.data", false).put("node.master", true)); + internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); logger.info("--> start data node / non master node"); - internalCluster().startNode(settingsBuilder().put("node.data", true).put("node.master", false)); + internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false)); createIndex("test"); assertAcked(client().admin().indices().preparePutMapping("test").setType("_default_").setSource("_timestamp", "enabled=true")); @@ -123,10 +124,10 @@ public class SpecificMasterNodesIT extends ESIntegTestCase { public void testAliasFilterValidation() throws Exception { logger.info("--> start master node / non data"); - internalCluster().startNode(settingsBuilder().put("node.data", false).put("node.master", true)); + internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); logger.info("--> start data node / non master node"); - internalCluster().startNode(settingsBuilder().put("node.data", true).put("node.master", false)); + internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false)); assertAcked(prepareCreate("test").addMapping("type1", "{\"type1\" : {\"properties\" : {\"table_a\" : { \"type\" : \"nested\", \"properties\" : {\"field_a\" : { \"type\" : \"string\" },\"field_b\" :{ \"type\" : \"string\" }}}}}}")); client().admin().indices().prepareAliases().addAlias("test", "a_test", QueryBuilders.nestedQuery("table_a", QueryBuilders.termQuery("table_a.field_b", "y"))).get(); diff --git a/core/src/test/java/org/elasticsearch/cluster/UpdateSettingsValidationIT.java b/core/src/test/java/org/elasticsearch/cluster/UpdateSettingsValidationIT.java index 526f64a8b4e..43a455cb84a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/UpdateSettingsValidationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/UpdateSettingsValidationIT.java @@ -21,6 +21,7 @@ package org.elasticsearch.cluster; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.common.Priority; +import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -36,9 +37,9 @@ import static org.hamcrest.Matchers.equalTo; public class UpdateSettingsValidationIT extends ESIntegTestCase { public void testUpdateSettingsValidation() throws Exception { List nodes = internalCluster().startNodesAsync( - settingsBuilder().put("node.data", false).build(), - settingsBuilder().put("node.master", false).build(), - settingsBuilder().put("node.master", false).build() + settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build(), + settingsBuilder().put(Node.NODE_MASTER_SETTING.getKey(), false).build(), + settingsBuilder().put(Node.NODE_MASTER_SETTING.getKey(), false).build() ).get(); String master = nodes.get(0); String node_1 = nodes.get(1); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index eb17ab29d33..fbc1e687241 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -45,6 +45,7 @@ import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.fd.FaultDetection; import org.elasticsearch.discovery.zen.membership.MembershipAction; import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; +import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.TestCustomMetaData; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -101,12 +102,12 @@ public class ZenDiscoveryIT extends ESIntegTestCase { .build(); Settings masterNodeSettings = Settings.builder() - .put("node.data", false) + .put(Node.NODE_DATA_SETTING.getKey(), false) .put(defaultSettings) .build(); internalCluster().startNodesAsync(2, masterNodeSettings).get(); Settings dateNodeSettings = Settings.builder() - .put("node.master", false) + .put(Node.NODE_MASTER_SETTING.getKey(), false) .put(defaultSettings) .build(); internalCluster().startNodesAsync(2, dateNodeSettings).get(); @@ -148,12 +149,12 @@ public class ZenDiscoveryIT extends ESIntegTestCase { .build(); Settings masterNodeSettings = Settings.builder() - .put("node.data", false) + .put(Node.NODE_DATA_SETTING.getKey(), false) .put(defaultSettings) .build(); String master = internalCluster().startNode(masterNodeSettings); Settings dateNodeSettings = Settings.builder() - .put("node.master", false) + .put(Node.NODE_MASTER_SETTING.getKey(), false) .put(defaultSettings) .build(); internalCluster().startNodesAsync(2, dateNodeSettings).get(); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java index 6faa02e16d7..738c6713276 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.DiscoveryNodesProvider; +import org.elasticsearch.node.Node; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -486,7 +487,7 @@ public class PublishClusterStateActionTests extends ESTestCase { discoveryNodesBuilder.put(createMockNode("node" + i).discoveryNode); } final int dataNodes = randomIntBetween(0, 5); - final Settings dataSettings = Settings.builder().put("node.master", false).build(); + final Settings dataSettings = Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false).build(); for (int i = 0; i < dataNodes; i++) { discoveryNodesBuilder.put(createMockNode("data_" + i, dataSettings).discoveryNode); } @@ -544,7 +545,7 @@ public class PublishClusterStateActionTests extends ESTestCase { } final int dataNodes = randomIntBetween(0, 3); // data nodes don't matter for (int i = 0; i < dataNodes; i++) { - final MockNode mockNode = createMockNode("data_" + i, Settings.builder().put("node.master", false).build()); + final MockNode mockNode = createMockNode("data_" + i, Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false).build()); discoveryNodesBuilder.put(mockNode.discoveryNode); if (randomBoolean()) { // we really don't care - just chaos monkey diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java b/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java index c804239c694..3dbb39d6281 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.indices.IndexClosedException; +import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -180,7 +181,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase { logger.info("--> cleaning nodes"); logger.info("--> starting 1 master node non data"); - internalCluster().startNode(settingsBuilder().put("node.data", false).build()); + internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build()); logger.info("--> create an index"); client().admin().indices().prepareCreate("test").execute().actionGet(); @@ -189,7 +190,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase { internalCluster().closeNonSharedNodes(false); logger.info("--> starting 1 master node non data again"); - internalCluster().startNode(settingsBuilder().put("node.data", false).build()); + internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build()); logger.info("--> waiting for test index to be created"); ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setIndices("test").execute().actionGet(); @@ -204,8 +205,8 @@ public class GatewayIndexStateIT extends ESIntegTestCase { logger.info("--> cleaning nodes"); logger.info("--> starting 1 master node non data"); - internalCluster().startNode(settingsBuilder().put("node.data", false).build()); - internalCluster().startNode(settingsBuilder().put("node.master", false).build()); + internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build()); + internalCluster().startNode(settingsBuilder().put(Node.NODE_MASTER_SETTING.getKey(), false).build()); logger.info("--> create an index"); client().admin().indices().prepareCreate("test").execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java index 3dd6597a6eb..59f7dd24644 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -81,13 +82,13 @@ public class RecoverAfterNodesIT extends ESIntegTestCase { public void testRecoverAfterMasterNodes() throws Exception { logger.info("--> start master_node (1)"); - Client master1 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put("node.data", false).put("node.master", true)); + Client master1 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet() .getState().blocks().global(ClusterBlockLevel.METADATA_WRITE), hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); logger.info("--> start data_node (1)"); - Client data1 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put("node.data", true).put("node.master", false)); + Client data1 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false)); assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet() .getState().blocks().global(ClusterBlockLevel.METADATA_WRITE), hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); @@ -96,7 +97,7 @@ public class RecoverAfterNodesIT extends ESIntegTestCase { hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); logger.info("--> start data_node (2)"); - Client data2 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put("node.data", true).put("node.master", false)); + Client data2 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false)); assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet() .getState().blocks().global(ClusterBlockLevel.METADATA_WRITE), hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); @@ -108,7 +109,7 @@ public class RecoverAfterNodesIT extends ESIntegTestCase { hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); logger.info("--> start master_node (2)"); - Client master2 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put("node.data", false).put("node.master", true)); + Client master2 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master1).isEmpty(), equalTo(true)); assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master2).isEmpty(), equalTo(true)); assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, data1).isEmpty(), equalTo(true)); @@ -117,13 +118,13 @@ public class RecoverAfterNodesIT extends ESIntegTestCase { public void testRecoverAfterDataNodes() throws Exception { logger.info("--> start master_node (1)"); - Client master1 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put("node.data", false).put("node.master", true)); + Client master1 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet() .getState().blocks().global(ClusterBlockLevel.METADATA_WRITE), hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); logger.info("--> start data_node (1)"); - Client data1 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put("node.data", true).put("node.master", false)); + Client data1 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false)); assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet() .getState().blocks().global(ClusterBlockLevel.METADATA_WRITE), hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); @@ -132,7 +133,7 @@ public class RecoverAfterNodesIT extends ESIntegTestCase { hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); logger.info("--> start master_node (2)"); - Client master2 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put("node.data", false).put("node.master", true)); + Client master2 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); assertThat(master2.admin().cluster().prepareState().setLocal(true).execute().actionGet() .getState().blocks().global(ClusterBlockLevel.METADATA_WRITE), hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); @@ -144,7 +145,7 @@ public class RecoverAfterNodesIT extends ESIntegTestCase { hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); logger.info("--> start data_node (2)"); - Client data2 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put("node.data", true).put("node.master", false)); + Client data2 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false)); assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master1).isEmpty(), equalTo(true)); assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master2).isEmpty(), equalTo(true)); assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, data1).isEmpty(), equalTo(true)); diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index 2c9de235b7d..0f680307455 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -58,6 +58,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.recovery.RecoveryFileChunkRequest; import org.elasticsearch.indices.recovery.RecoveryTarget; import org.elasticsearch.monitor.fs.FsInfo; +import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.CorruptionUtils; @@ -372,7 +373,7 @@ public class CorruptedFileIT extends ESIntegTestCase { int numDocs = scaledRandomIntBetween(100, 1000); internalCluster().ensureAtLeastNumDataNodes(2); if (cluster().numDataNodes() < 3) { - internalCluster().startNode(Settings.builder().put("node.data", true).put("node.client", false).put("node.master", false)); + internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false)); } NodesStatsResponse nodeStats = client().admin().cluster().prepareNodesStats().get(); List dataNodeStats = new ArrayList<>(); diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/DedicatedMasterGetFieldMappingIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/DedicatedMasterGetFieldMappingIT.java index 62745a64245..1e51133eeb8 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/DedicatedMasterGetFieldMappingIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/DedicatedMasterGetFieldMappingIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.indices.mapping; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.node.Node; import org.junit.Before; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -32,7 +33,7 @@ public class DedicatedMasterGetFieldMappingIT extends SimpleGetFieldMappingsIT { @Before public void before1() throws Exception { Settings settings = settingsBuilder() - .put("node.data", false) + .put(Node.NODE_DATA_SETTING.getKey(), false) .build(); internalCluster().startNodesAsync(settings, Settings.EMPTY).get(); } diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index 48f01ab0852..ee4c79b50b5 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -36,7 +36,6 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; -import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.common.Priority; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; @@ -47,6 +46,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoverySource; +import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -103,9 +103,9 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { } public void testIndexCleanup() throws Exception { - final String masterNode = internalCluster().startNode(Settings.builder().put("node.data", false)); - final String node_1 = internalCluster().startNode(Settings.builder().put("node.master", false)); - final String node_2 = internalCluster().startNode(Settings.builder().put("node.master", false)); + final String masterNode = internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false)); + final String node_1 = internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false)); + final String node_2 = internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false)); logger.info("--> creating index [test] with one shard and on replica"); assertAcked(prepareCreate("test").setSettings( Settings.builder().put(indexSettings()) @@ -121,7 +121,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { assertThat(Files.exists(indexDirectory(node_2, "test")), equalTo(true)); logger.info("--> starting node server3"); - final String node_3 = internalCluster().startNode(Settings.builder().put("node.master", false)); + final String node_3 = internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false)); logger.info("--> running cluster_health"); ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth() .setWaitForNodes("4") @@ -293,9 +293,9 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { @TestLogging("cluster.service:TRACE") public void testShardActiveElsewhereDoesNotDeleteAnother() throws Exception { InternalTestCluster.Async masterFuture = internalCluster().startNodeAsync( - Settings.builder().put("node.master", true, "node.data", false).build()); + Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), true, Node.NODE_DATA_SETTING.getKey(), false).build()); InternalTestCluster.Async> nodesFutures = internalCluster().startNodesAsync(4, - Settings.builder().put("node.master", false, "node.data", true).build()); + Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false, Node.NODE_DATA_SETTING.getKey(), true).build()); final String masterNode = masterFuture.get(); final String node1 = nodesFutures.get().get(0); @@ -355,7 +355,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { logger.debug("--> starting the two old nodes back"); internalCluster().startNodesAsync(2, - Settings.builder().put("node.master", false, "node.data", true).build()); + Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false, Node.NODE_DATA_SETTING.getKey(), true).build()); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("5").get().isTimedOut()); diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index 22bef614bf6..bd6c2533652 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -47,6 +47,7 @@ import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.indices.ttl.IndicesTTLService; +import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoryMissingException; import org.elasticsearch.rest.RestChannel; @@ -609,7 +610,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest internalCluster().startNode(); logger.info("--> start second node"); // Make sure the first node is elected as master - internalCluster().startNode(settingsBuilder().put("node.master", false)); + internalCluster().startNode(settingsBuilder().put(Node.NODE_MASTER_SETTING.getKey(), false)); // Register mock repositories for (int i = 0; i < 5; i++) { client().admin().cluster().preparePutRepository("test-repo" + i) @@ -784,8 +785,8 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest } public void testMasterShutdownDuringSnapshot() throws Exception { - Settings masterSettings = settingsBuilder().put("node.data", false).build(); - Settings dataSettings = settingsBuilder().put("node.master", false).build(); + Settings masterSettings = settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build(); + Settings dataSettings = settingsBuilder().put(Node.NODE_MASTER_SETTING.getKey(), false).build(); logger.info("--> starting two master nodes and two data nodes"); internalCluster().startNode(masterSettings); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index f73839c5cec..848d5a770f6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -171,7 +171,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { .put(EsExecutors.PROCESSORS, 1) // limit the number of threads created .put("http.enabled", false) .put("node.local", true) - .put("node.data", true) + .put(Node.NODE_DATA_SETTING.getKey(), true) .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // make sure we get what we set :) .build(); Node build = new MockNode(settings, getVersion(), getPlugins()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 9fb3eec8489..284f1133fb9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -670,7 +670,7 @@ public final class InternalTestCluster extends TestCluster { public synchronized Client startNodeClient(Settings settings) { ensureOpen(); // currently unused - Builder builder = settingsBuilder().put(settings).put("node.client", true); + Builder builder = settingsBuilder().put(settings).put(Node.NODE_CLIENT_SETTING.getKey(), true); if (size() == 0) { // if we are the first node - don't wait for a state builder.put(DiscoveryService.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0); @@ -951,7 +951,7 @@ public final class InternalTestCluster extends TestCluster { NodeAndClient nodeAndClient = nodes.get(buildNodeName); if (nodeAndClient == null) { changed = true; - Builder clientSettingsBuilder = Settings.builder().put("node.client", true); + Builder clientSettingsBuilder = Settings.builder().put(Node.NODE_CLIENT_SETTING.getKey(), true); nodeAndClient = buildNode(i, sharedNodesSeeds[i], clientSettingsBuilder.build(), Version.CURRENT); nodeAndClient.node.start(); logger.info("Start Shared Node [{}] not shared", nodeAndClient.name); @@ -1461,7 +1461,7 @@ public final class InternalTestCluster extends TestCluster { } public synchronized Async> startMasterOnlyNodesAsync(int numNodes, Settings settings) { - Settings settings1 = Settings.builder().put(settings).put("node.master", true).put("node.data", false).build(); + Settings settings1 = Settings.builder().put(settings).put(Node.NODE_MASTER_SETTING.getKey(), true).put(Node.NODE_DATA_SETTING.getKey(), false).build(); return startNodesAsync(numNodes, settings1, Version.CURRENT); } @@ -1470,7 +1470,7 @@ public final class InternalTestCluster extends TestCluster { } public synchronized Async> startDataOnlyNodesAsync(int numNodes, Settings settings) { - Settings settings1 = Settings.builder().put(settings).put("node.master", false).put("node.data", true).build(); + Settings settings1 = Settings.builder().put(settings).put(Node.NODE_MASTER_SETTING.getKey(), false).put(Node.NODE_DATA_SETTING.getKey(), true).build(); return startNodesAsync(numNodes, settings1, Version.CURRENT); } @@ -1479,12 +1479,12 @@ public final class InternalTestCluster extends TestCluster { } public synchronized Async startMasterOnlyNodeAsync(Settings settings) { - Settings settings1 = Settings.builder().put(settings).put("node.master", true).put("node.data", false).build(); + Settings settings1 = Settings.builder().put(settings).put(Node.NODE_MASTER_SETTING.getKey(), true).put(Node.NODE_DATA_SETTING.getKey(), false).build(); return startNodeAsync(settings1, Version.CURRENT); } public synchronized String startMasterOnlyNode(Settings settings) { - Settings settings1 = Settings.builder().put(settings).put("node.master", true).put("node.data", false).build(); + Settings settings1 = Settings.builder().put(settings).put(Node.NODE_MASTER_SETTING.getKey(), true).put(Node.NODE_DATA_SETTING.getKey(), false).build(); return startNode(settings1, Version.CURRENT); } @@ -1493,12 +1493,12 @@ public final class InternalTestCluster extends TestCluster { } public synchronized Async startDataOnlyNodeAsync(Settings settings) { - Settings settings1 = Settings.builder().put(settings).put("node.master", false).put("node.data", true).build(); + Settings settings1 = Settings.builder().put(settings).put(Node.NODE_MASTER_SETTING.getKey(), false).put(Node.NODE_DATA_SETTING.getKey(), true).build(); return startNodeAsync(settings1, Version.CURRENT); } public synchronized String startDataOnlyNode(Settings settings) { - Settings settings1 = Settings.builder().put(settings).put("node.master", false).put("node.data", true).build(); + Settings settings1 = Settings.builder().put(settings).put(Node.NODE_MASTER_SETTING.getKey(), false).put(Node.NODE_DATA_SETTING.getKey(), true).build(); return startNode(settings1, Version.CURRENT); } From 61630c2b27ad9c95a344b8672587c23ad86ef335 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 26 Jan 2016 12:23:35 +0100 Subject: [PATCH 322/347] migrate node.local and node.mode to new Setting infra --- .../elasticsearch/cluster/node/DiscoveryNode.java | 8 ++++---- .../common/settings/ClusterSettings.java | 2 ++ .../main/java/org/elasticsearch/node/Node.java | 3 +++ .../client/transport/TransportClientRetryIT.java | 3 ++- .../discovery/DiscoveryModuleTests.java | 9 +++++---- .../transport/netty/NettyTransportIT.java | 3 ++- .../NettyTransportMultiPortIntegrationIT.java | 3 ++- .../netty/NettyTransportPublishAddressIT.java | 3 ++- .../AbstractAzureComputeServiceTestCase.java | 3 ++- .../org/elasticsearch/tribe/TribeUnitTests.java | 2 +- .../smoketest/ESSmokeClientTestCase.java | 4 ++-- .../elasticsearch/test/ESBackcompatTestCase.java | 3 ++- .../elasticsearch/test/ESSingleNodeTestCase.java | 2 +- .../java/org/elasticsearch/test/ExternalNode.java | 3 ++- .../elasticsearch/test/ExternalTestCluster.java | 3 ++- .../elasticsearch/test/InternalTestCluster.java | 15 ++++++++++----- 16 files changed, 44 insertions(+), 25 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index d1523697ed5..d8504a210c1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -47,11 +47,11 @@ import static org.elasticsearch.common.transport.TransportAddressSerializers.add public class DiscoveryNode implements Streamable, ToXContent { public static boolean localNode(Settings settings) { - if (settings.get("node.local") != null) { - return settings.getAsBoolean("node.local", false); + if (Node.NODE_LOCAL_SETTING.exists(settings)) { + return Node.NODE_LOCAL_SETTING.get(settings); } - if (settings.get("node.mode") != null) { - String nodeMode = settings.get("node.mode"); + if (Node.NODE_MODE_SETTING.exists(settings)) { + String nodeMode = Node.NODE_MODE_SETTING.get(settings); if ("local".equals(nodeMode)) { return true; } else if ("network".equals(nodeMode)) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 499faf5c6ab..13424a5a17c 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -252,6 +252,8 @@ public final class ClusterSettings extends AbstractScopedSettings { Node.NODE_CLIENT_SETTING, Node.NODE_DATA_SETTING, Node.NODE_MASTER_SETTING, + Node.NODE_LOCAL_SETTING, + Node.NODE_MODE_SETTING, URLRepository.ALLOWED_URLS_SETTING, URLRepository.REPOSITORIES_LIST_DIRECTORIES_SETTING, URLRepository.REPOSITORIES_URL_SETTING, diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 2a0f550980c..dae76019ed4 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -112,6 +112,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.concurrent.TimeUnit; +import java.util.function.Function; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -125,6 +126,8 @@ public class Node implements Releasable { public static final Setting NODE_CLIENT_SETTING = Setting.boolSetting("node.client", false, false, Setting.Scope.CLUSTER); public static final Setting NODE_DATA_SETTING = Setting.boolSetting("node.data", true, false, Setting.Scope.CLUSTER); public static final Setting NODE_MASTER_SETTING = Setting.boolSetting("node.master", true, false, Setting.Scope.CLUSTER); + public static final Setting NODE_LOCAL_SETTING = Setting.boolSetting("node.local", false, false, Setting.Scope.CLUSTER); + public static final Setting NODE_MODE_SETTING = new Setting<>("node.mode", "network", Function.identity(), false, Setting.Scope.CLUSTER); public static final Setting NODE_INGEST_SETTING = Setting.boolSetting("node.ingest", true, false, Setting.Scope.CLUSTER); private static final String CLIENT_TYPE = "node"; diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java index e5367d1da42..265f0614b7a 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; +import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -55,7 +56,7 @@ public class TransportClientRetryIT extends ESIntegTestCase { Settings.Builder builder = settingsBuilder().put("client.transport.nodes_sampler_interval", "1s") .put("name", "transport_client_retry_test") - .put("node.mode", internalCluster().getNodeMode()) + .put(Node.NODE_MODE_SETTING.getKey(), internalCluster().getNodeMode()) .put(ClusterName.SETTING, internalCluster().getClusterName()) .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()); diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java index 2b3f918d545..64b1f5756cf 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.local.LocalDiscovery; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; +import org.elasticsearch.node.Node; import org.elasticsearch.node.service.NodeService; /** @@ -45,7 +46,7 @@ public class DiscoveryModuleTests extends ModuleTestCase { public void testRegisterMasterElectionService() { - Settings settings = Settings.builder().put("node.local", false). + Settings settings = Settings.builder().put(Node.NODE_LOCAL_SETTING.getKey(), false). put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING.getKey(), "custom").build(); DiscoveryModule module = new DiscoveryModule(settings); module.addElectMasterService("custom", DummyMasterElectionService.class); @@ -54,7 +55,7 @@ public class DiscoveryModuleTests extends ModuleTestCase { } public void testLoadUnregisteredMasterElectionService() { - Settings settings = Settings.builder().put("node.local", false). + Settings settings = Settings.builder().put(Node.NODE_LOCAL_SETTING.getKey(), false). put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING.getKey(), "foobar").build(); DiscoveryModule module = new DiscoveryModule(settings); module.addElectMasterService("custom", DummyMasterElectionService.class); @@ -63,14 +64,14 @@ public class DiscoveryModuleTests extends ModuleTestCase { public void testRegisterDefaults() { boolean local = randomBoolean(); - Settings settings = Settings.builder().put("node.local", local).build(); + Settings settings = Settings.builder().put(Node.NODE_LOCAL_SETTING.getKey(), local).build(); DiscoveryModule module = new DiscoveryModule(settings); assertBinding(module, Discovery.class, local ? LocalDiscovery.class : ZenDiscovery.class); } public void testRegisterDiscovery() { boolean local = randomBoolean(); - Settings settings = Settings.builder().put("node.local", local). + Settings settings = Settings.builder().put(Node.NODE_LOCAL_SETTING.getKey(), local). put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "custom").build(); DiscoveryModule module = new DiscoveryModule(settings); module.addDiscoveryType("custom", DummyDisco.class); diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java index 55f9bc49df3..ce090cd1cbc 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -65,7 +66,7 @@ public class NettyTransportIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { return settingsBuilder().put(super.nodeSettings(nodeOrdinal)) - .put("node.mode", "network") + .put(Node.NODE_MODE_SETTING.getKey(), "network") .put(NetworkModule.TRANSPORT_TYPE_KEY, "exception-throwing").build(); } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java index f4dccc77161..f936b5f6756 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; +import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -62,7 +63,7 @@ public class NettyTransportMultiPortIntegrationIT extends ESIntegTestCase { .put(super.nodeSettings(nodeOrdinal)) .put("network.host", "127.0.0.1") .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty") - .put("node.mode", "network") + .put(Node.NODE_MODE_SETTING.getKey(), "network") .put("transport.profiles.client1.port", randomPortRange) .put("transport.profiles.client1.publish_host", "127.0.0.7") .put("transport.profiles.client1.publish_port", "4321") diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java index ea67ce32717..0fceda31664 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import java.net.Inet4Address; @@ -46,7 +47,7 @@ public class NettyTransportPublishAddressIT extends ESIntegTestCase { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty") - .put("node.mode", "network").build(); + .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); } public void testDifferentPorts() throws Exception { diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java index 9babfb4c2ad..01f3aff7401 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.cloud.azure.management.AzureComputeService.Discovery; import org.elasticsearch.cloud.azure.management.AzureComputeService.Management; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.node.Node; import org.elasticsearch.plugin.discovery.azure.AzureDiscoveryPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -44,7 +45,7 @@ public abstract class AbstractAzureComputeServiceTestCase extends ESIntegTestCas .put(super.nodeSettings(nodeOrdinal)) .put("discovery.type", "azure") // We need the network to make the mock working - .put("node.mode", "network"); + .put(Node.NODE_MODE_SETTING.getKey(), "network"); // We add a fake subscription_id to start mock compute service builder.put(Management.SUBSCRIPTION_ID, "fake") diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java index 24560aa8b1c..d0b37f8c232 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java @@ -56,7 +56,7 @@ public class TribeUnitTests extends ESTestCase { public static void createTribes() { Settings baseSettings = Settings.builder() .put("http.enabled", false) - .put("node.mode", NODE_MODE) + .put(Node.NODE_MODE_SETTING.getKey(), NODE_MODE) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); tribe1 = new TribeClientNode( diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index 3fea66459c2..bd9f42490b2 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; +import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.junit.After; import org.junit.AfterClass; @@ -39,7 +40,6 @@ import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.URL; -import java.net.UnknownHostException; import java.nio.file.Path; import java.util.Locale; import java.util.concurrent.atomic.AtomicInteger; @@ -81,7 +81,7 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // prevents any settings to be replaced by system properties. .put("client.transport.ignore_cluster_name", true) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) - .put("node.mode", "network").build(); // we require network here! + .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we require network here! TransportClient.Builder transportClientBuilder = TransportClient.builder().settings(clientSettings); TransportClient client = transportClientBuilder.build().addTransportAddresses(transportAddresses); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java index 49644196da4..71bcca341b8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.junit.listeners.LoggingListener; @@ -241,7 +242,7 @@ public abstract class ESBackcompatTestCase extends ESIntegTestCase { protected Settings commonNodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder().put(requiredSettings()); builder.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty"); // run same transport / disco as external - builder.put("node.mode", "network"); + builder.put(Node.NODE_MODE_SETTING.getKey(), "network"); return builder.build(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 848d5a770f6..cfd4691c3a6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -170,7 +170,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { .put("script.indexed", "on") .put(EsExecutors.PROCESSORS, 1) // limit the number of threads created .put("http.enabled", false) - .put("node.local", true) + .put(Node.NODE_LOCAL_SETTING.getKey(), true) .put(Node.NODE_DATA_SETTING.getKey(), true) .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // make sure we get what we set :) .build(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java index 5d169fc6acd..8c07111742a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalSettingsPreparer; import java.io.Closeable; @@ -54,7 +55,7 @@ final class ExternalNode implements Closeable { public static final Settings REQUIRED_SETTINGS = Settings.builder() .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen") - .put("node.mode", "network").build(); // we need network mode for this + .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we need network mode for this private final Path path; private final Random random; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java index 0b3facca05d..97e0d0f1642 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; +import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.plugins.Plugin; @@ -76,7 +77,7 @@ public final class ExternalTestCluster extends TestCluster { .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // prevents any settings to be replaced by system properties. .put("client.transport.ignore_cluster_name", true) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) - .put("node.mode", "network").build(); // we require network here! + .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we require network here! TransportClient.Builder transportClientBuilder = TransportClient.builder().settings(clientSettings); for (Class pluginClass : pluginClasses) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 284f1133fb9..767fbd983f6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -291,7 +291,7 @@ public final class InternalTestCluster extends TestCluster { builder.put("transport.tcp.port", TRANSPORT_BASE_PORT + "-" + (TRANSPORT_BASE_PORT + PORTS_PER_CLUSTER)); builder.put("http.port", HTTP_BASE_PORT + "-" + (HTTP_BASE_PORT + PORTS_PER_CLUSTER)); builder.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true); - builder.put("node.mode", nodeMode); + builder.put(Node.NODE_MODE_SETTING.getKey(), nodeMode); builder.put("http.pipelining", enableHttpPipelining); if (Strings.hasLength(System.getProperty("es.logger.level"))) { builder.put("logger.level", System.getProperty("es.logger.level")); @@ -322,10 +322,10 @@ public final class InternalTestCluster extends TestCluster { return "local"; // default if nothing is specified } if (Strings.hasLength(System.getProperty("es.node.mode"))) { - builder.put("node.mode", System.getProperty("es.node.mode")); + builder.put(Node.NODE_MODE_SETTING.getKey(), System.getProperty("es.node.mode")); } if (Strings.hasLength(System.getProperty("es.node.local"))) { - builder.put("node.local", System.getProperty("es.node.local")); + builder.put(Node.NODE_LOCAL_SETTING.getKey(), System.getProperty("es.node.local")); } if (DiscoveryNode.localNode(builder.build())) { return "local"; @@ -888,13 +888,18 @@ public final class InternalTestCluster extends TestCluster { .put(Environment.PATH_HOME_SETTING.getKey(), baseDir) .put("name", TRANSPORT_CLIENT_PREFIX + node.settings().get("name")) .put(ClusterName.SETTING, clusterName).put("client.transport.sniff", sniff) - .put("node.mode", nodeSettings.get("node.mode", nodeMode)) - .put("node.local", nodeSettings.get("node.local", "")) .put("logger.prefix", nodeSettings.get("logger.prefix", "")) .put("logger.level", nodeSettings.get("logger.level", "INFO")) .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) .put(settings); + if (Node.NODE_MODE_SETTING.exists(nodeSettings)) { + builder.put(Node.NODE_MODE_SETTING.getKey(), Node.NODE_MODE_SETTING.get(nodeSettings)); + } + if (Node.NODE_LOCAL_SETTING.exists(nodeSettings)) { + builder.put(Node.NODE_LOCAL_SETTING.getKey(), Node.NODE_LOCAL_SETTING.get(nodeSettings)); + } + TransportClient.Builder clientBuilder = TransportClient.builder().settings(builder.build()); for (Class plugin : plugins) { clientBuilder.addPlugin(plugin); From 664da3fa664f0ee4256ffe330d45fb9275f6191c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 26 Jan 2016 14:40:40 +0100 Subject: [PATCH 323/347] Remove RescoreBuilder interface, rename AbstractRescoreBuilder to RescoreBuilder --- .../search/builder/SearchSourceBuilder.java | 4 +- .../rescore/AbstractRescoreBuilder.java | 155 ------------------ .../search/rescore/QueryRescorerBuilder.java | 3 +- .../search/rescore/RescoreBuilder.java | 133 ++++++++++++++- .../search/functionscore/QueryRescorerIT.java | 46 +++--- .../rescore/QueryRescoreBuilderTests.java | 16 +- 6 files changed, 161 insertions(+), 196 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/search/rescore/AbstractRescoreBuilder.java diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 5c50134d4ce..a5e133466e0 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -46,7 +46,7 @@ import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.rescore.AbstractRescoreBuilder; +import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; @@ -874,7 +874,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } else if (context.parseFieldMatcher().match(currentFieldName, RESCORE_FIELD)) { List> rescoreBuilders = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - rescoreBuilders.add(AbstractRescoreBuilder.parseFromXContent(context)); + rescoreBuilders.add(RescoreBuilder.parseFromXContent(context)); } builder.rescoreBuilders = rescoreBuilders; } else if (context.parseFieldMatcher().match(currentFieldName, STATS_FIELD)) { diff --git a/core/src/main/java/org/elasticsearch/search/rescore/AbstractRescoreBuilder.java b/core/src/main/java/org/elasticsearch/search/rescore/AbstractRescoreBuilder.java deleted file mode 100644 index 8afc3dc301c..00000000000 --- a/core/src/main/java/org/elasticsearch/search/rescore/AbstractRescoreBuilder.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.rescore; - -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryParseContext; - -import java.io.IOException; -import java.util.Objects; - -/** - * The abstract base builder for instances of {@link RescoreBuilder}. - */ -public abstract class AbstractRescoreBuilder> implements RescoreBuilder { - - protected Integer windowSize; - - private static ParseField WINDOW_SIZE_FIELD = new ParseField("window_size"); - - @SuppressWarnings("unchecked") - @Override - public RB windowSize(int windowSize) { - this.windowSize = windowSize; - return (RB) this; - } - - @Override - public Integer windowSize() { - return windowSize; - } - - public static RescoreBuilder parseFromXContent(QueryParseContext parseContext) throws IOException { - XContentParser parser = parseContext.parser(); - String fieldName = null; - AbstractRescoreBuilder rescorer = null; - Integer windowSize = null; - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } else if (token.isValue()) { - if (parseContext.parseFieldMatcher().match(fieldName, WINDOW_SIZE_FIELD)) { - windowSize = parser.intValue(); - } else { - throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support [" + fieldName + "]"); - } - } else if (token == XContentParser.Token.START_OBJECT) { - // we only have QueryRescorer at this point - if (QueryRescorerBuilder.NAME.equals(fieldName)) { - rescorer = QueryRescorerBuilder.PROTOTYPE.fromXContent(parseContext); - } else { - throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support rescorer with name [" + fieldName + "]"); - } - } else { - throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "] after [" + fieldName + "]"); - } - } - if (rescorer == null) { - throw new ParsingException(parser.getTokenLocation(), "missing rescore type"); - } - if (windowSize != null) { - rescorer.windowSize(windowSize.intValue()); - } - return rescorer; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (windowSize != null) { - builder.field("window_size", windowSize); - } - doXContent(builder, params); - builder.endObject(); - return builder; - } - - protected abstract void doXContent(XContentBuilder builder, Params params) throws IOException; - - public static QueryRescorerBuilder queryRescorer(QueryBuilder queryBuilder) { - return new QueryRescorerBuilder(queryBuilder); - } - - @Override - public int hashCode() { - return Objects.hash(windowSize); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - @SuppressWarnings("rawtypes") - AbstractRescoreBuilder other = (AbstractRescoreBuilder) obj; - return Objects.equals(windowSize, other.windowSize); - } - - @Override - public RB readFrom(StreamInput in) throws IOException { - RB builder = doReadFrom(in); - builder.windowSize = in.readOptionalVInt(); - return builder; - } - - protected abstract RB doReadFrom(StreamInput in) throws IOException; - - @Override - public void writeTo(StreamOutput out) throws IOException { - doWriteTo(out); - out.writeOptionalVInt(this.windowSize); - } - - protected abstract void doWriteTo(StreamOutput out) throws IOException; - - @Override - public final String toString() { - try { - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.prettyPrint(); - toXContent(builder, EMPTY_PARAMS); - return builder.string(); - } catch (Exception e) { - return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}"; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java index 4e6d0e0f568..10c727a9029 100644 --- a/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java @@ -35,7 +35,7 @@ import java.io.IOException; import java.util.Locale; import java.util.Objects; -public class QueryRescorerBuilder extends AbstractRescoreBuilder { +public class QueryRescorerBuilder extends RescoreBuilder { public static final String NAME = "query"; @@ -140,7 +140,6 @@ public class QueryRescorerBuilder extends AbstractRescoreBuilder> extends ToXContent, NamedWriteable { +/** + * The abstract base builder for instances of {@link RescoreBuilder}. + */ +public abstract class RescoreBuilder> implements ToXContent, NamedWriteable { - RescoreSearchContext build(QueryShardContext context) throws IOException; + protected Integer windowSize; - RB fromXContent(QueryParseContext parseContext) throws IOException; + private static ParseField WINDOW_SIZE_FIELD = new ParseField("window_size"); - RB windowSize(int windowSize); + @SuppressWarnings("unchecked") + public RB windowSize(int windowSize) { + this.windowSize = windowSize; + return (RB) this; + } - Integer windowSize(); -} \ No newline at end of file + public Integer windowSize() { + return windowSize; + } + + public static RescoreBuilder parseFromXContent(QueryParseContext parseContext) throws IOException { + XContentParser parser = parseContext.parser(); + String fieldName = null; + RescoreBuilder rescorer = null; + Integer windowSize = null; + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = parser.currentName(); + } else if (token.isValue()) { + if (parseContext.parseFieldMatcher().match(fieldName, WINDOW_SIZE_FIELD)) { + windowSize = parser.intValue(); + } else { + throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support [" + fieldName + "]"); + } + } else if (token == XContentParser.Token.START_OBJECT) { + // we only have QueryRescorer at this point + if (QueryRescorerBuilder.NAME.equals(fieldName)) { + rescorer = QueryRescorerBuilder.PROTOTYPE.fromXContent(parseContext); + } else { + throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support rescorer with name [" + fieldName + "]"); + } + } else { + throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "] after [" + fieldName + "]"); + } + } + if (rescorer == null) { + throw new ParsingException(parser.getTokenLocation(), "missing rescore type"); + } + if (windowSize != null) { + rescorer.windowSize(windowSize.intValue()); + } + return rescorer; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (windowSize != null) { + builder.field("window_size", windowSize); + } + doXContent(builder, params); + builder.endObject(); + return builder; + } + + protected abstract void doXContent(XContentBuilder builder, Params params) throws IOException; + + public abstract QueryRescoreContext build(QueryShardContext context) throws IOException; + + public static QueryRescorerBuilder queryRescorer(QueryBuilder queryBuilder) { + return new QueryRescorerBuilder(queryBuilder); + } + + @Override + public int hashCode() { + return Objects.hash(windowSize); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + @SuppressWarnings("rawtypes") + RescoreBuilder other = (RescoreBuilder) obj; + return Objects.equals(windowSize, other.windowSize); + } + + @Override + public RB readFrom(StreamInput in) throws IOException { + RB builder = doReadFrom(in); + builder.windowSize = in.readOptionalVInt(); + return builder; + } + + protected abstract RB doReadFrom(StreamInput in) throws IOException; + + @Override + public void writeTo(StreamOutput out) throws IOException { + doWriteTo(out); + out.writeOptionalVInt(this.windowSize); + } + + protected abstract void doWriteTo(StreamOutput out) throws IOException; + + @Override + public final String toString() { + try { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.prettyPrint(); + toXContent(builder, EMPTY_PARAMS); + return builder.string(); + } catch (Exception e) { + return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}"; + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 163ee84cf18..07f51772a7d 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -37,7 +37,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.rescore.AbstractRescoreBuilder; +import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.rescore.QueryRescoreMode; import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -80,7 +80,7 @@ public class QueryRescorerIT extends ESIntegTestCase { for (int j = 0 ; j < iters; j++) { SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchAllQuery()) - .setRescorer(AbstractRescoreBuilder.queryRescorer( + .setRescorer(RescoreBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.matchAllQuery(), ScoreFunctionBuilders.weightFactorFunction(100)).boostMode(CombineFunction.REPLACE)) .setQueryWeight(0.0f).setRescoreQueryWeight(1.0f), 1).setSize(randomIntBetween(2, 10)).execute() @@ -116,7 +116,7 @@ public class QueryRescorerIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer( - AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "quick brown").slop(2).boost(4.0f)) + RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "quick brown").slop(2).boost(4.0f)) .setRescoreQueryWeight(2), 5).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(3l)); @@ -126,7 +126,7 @@ public class QueryRescorerIT extends ESIntegTestCase { searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) - .setRescorer(AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(3)), 5) + .setRescorer(RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(3)), 5) .execute().actionGet(); assertHitCount(searchResponse, 3); @@ -136,7 +136,7 @@ public class QueryRescorerIT extends ESIntegTestCase { searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) - .setRescorer(AbstractRescoreBuilder.queryRescorer((QueryBuilders.matchPhraseQuery("field1", "the quick brown"))), 5).execute() + .setRescorer(RescoreBuilder.queryRescorer((QueryBuilders.matchPhraseQuery("field1", "the quick brown"))), 5).execute() .actionGet(); assertHitCount(searchResponse, 3); @@ -181,7 +181,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(5) .setRescorer( - AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 20).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(5)); @@ -197,7 +197,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setSize(5) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setRescorer( - AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 20).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(5)); @@ -214,7 +214,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setSize(5) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setRescorer( - AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 20).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(5)); @@ -263,7 +263,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(5) .setRescorer( - AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 2).execute().actionGet(); // Only top 2 hits were re-ordered: assertThat(searchResponse.getHits().hits().length, equalTo(4)); @@ -280,7 +280,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(5) .setRescorer( - AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 3).execute().actionGet(); // Only top 3 hits were re-ordered: @@ -333,7 +333,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(5) .setRescorer( - AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) + RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(1.0f).setRescoreQueryWeight(-1f), 3).execute().actionGet(); // 6 and 1 got worse, and then the hit (2) outside the rescore window were sorted ahead: @@ -424,7 +424,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(resultSize) .setRescorer( - AbstractRescoreBuilder + RescoreBuilder .queryRescorer( QueryBuilders .constantScoreQuery(QueryBuilders.matchPhraseQuery("field1", intToEnglish).slop(3))) @@ -462,7 +462,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(resultSize) .setRescorer( - AbstractRescoreBuilder + RescoreBuilder .queryRescorer( QueryBuilders .constantScoreQuery(QueryBuilders.matchPhraseQuery("field1", "not in the index").slop(3))) @@ -480,7 +480,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setFrom(0) .setSize(resultSize) .setRescorer( - AbstractRescoreBuilder + RescoreBuilder .queryRescorer( QueryBuilders.matchPhraseQuery("field1", intToEnglish).slop(0)) .setQueryWeight(1.0f).setRescoreQueryWeight(1.0f), 2 * rescoreWindow).execute().actionGet(); @@ -512,7 +512,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer( - AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(2).boost(4.0f)) + RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(2).boost(4.0f)) .setQueryWeight(0.5f).setRescoreQueryWeight(0.4f), 5).setExplain(true).execute() .actionGet(); assertHitCount(searchResponse, 3); @@ -538,7 +538,7 @@ public class QueryRescorerIT extends ESIntegTestCase { String[] scoreModes = new String[]{ "max", "min", "avg", "total", "multiply", "" }; String[] descriptionModes = new String[]{ "max of:", "min of:", "avg of:", "sum of:", "product of:", "sum of:" }; for (int innerMode = 0; innerMode < scoreModes.length; innerMode++) { - QueryRescorerBuilder innerRescoreQuery = AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown").boost(4.0f)) + QueryRescorerBuilder innerRescoreQuery = RescoreBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown").boost(4.0f)) .setQueryWeight(0.5f).setRescoreQueryWeight(0.4f); if (!"".equals(scoreModes[innerMode])) { @@ -561,7 +561,7 @@ public class QueryRescorerIT extends ESIntegTestCase { } for (int outerMode = 0; outerMode < scoreModes.length; outerMode++) { - QueryRescorerBuilder outerRescoreQuery = AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown") + QueryRescorerBuilder outerRescoreQuery = RescoreBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown") .boost(4.0f)).setQueryWeight(0.5f).setRescoreQueryWeight(0.4f); if (!"".equals(scoreModes[outerMode])) { @@ -599,7 +599,7 @@ public class QueryRescorerIT extends ESIntegTestCase { for (int i = 0; i < numDocs - 4; i++) { String[] intToEnglish = new String[] { English.intToEnglish(i), English.intToEnglish(i + 1), English.intToEnglish(i + 2), English.intToEnglish(i + 3) }; - QueryRescorerBuilder rescoreQuery = AbstractRescoreBuilder + QueryRescorerBuilder rescoreQuery = RescoreBuilder .queryRescorer( QueryBuilders.boolQuery() .disableCoord(true) @@ -682,10 +682,10 @@ public class QueryRescorerIT extends ESIntegTestCase { public void testMultipleRescores() throws Exception { int numDocs = indexRandomNumbers("keyword", 1, true); - QueryRescorerBuilder eightIsGreat = AbstractRescoreBuilder.queryRescorer( + QueryRescorerBuilder eightIsGreat = RescoreBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(8)), ScoreFunctionBuilders.weightFactorFunction(1000.0f)).boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total); - QueryRescorerBuilder sevenIsBetter = AbstractRescoreBuilder.queryRescorer( + QueryRescorerBuilder sevenIsBetter = RescoreBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(7)), ScoreFunctionBuilders.weightFactorFunction(10000.0f)).boostMode(CombineFunction.REPLACE)) .setScoreMode(QueryRescoreMode.Total); @@ -703,10 +703,10 @@ public class QueryRescorerIT extends ESIntegTestCase { // We have no idea what the second hit will be because we didn't get a chance to look for seven // Now use one rescore to drag the number we're looking for into the window of another - QueryRescorerBuilder ninetyIsGood = AbstractRescoreBuilder.queryRescorer( + QueryRescorerBuilder ninetyIsGood = RescoreBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.queryStringQuery("*ninety*"), ScoreFunctionBuilders.weightFactorFunction(1000.0f)) .boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total); - QueryRescorerBuilder oneToo = AbstractRescoreBuilder.queryRescorer( + QueryRescorerBuilder oneToo = RescoreBuilder.queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.queryStringQuery("*one*"), ScoreFunctionBuilders.weightFactorFunction(1000.0f)) .boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total); request.clearRescorers().addRescorer(ninetyIsGood, numDocs).addRescorer(oneToo, 10); @@ -759,7 +759,7 @@ public class QueryRescorerIT extends ESIntegTestCase { request.setQuery(QueryBuilders.termQuery("text", "hello")); request.setFrom(1); request.setSize(4); - request.addRescorer(AbstractRescoreBuilder.queryRescorer(QueryBuilders.matchAllQuery()), 50); + request.addRescorer(RescoreBuilder.queryRescorer(QueryBuilders.matchAllQuery()), 50); assertEquals(4, request.get().getHits().hits().length); } diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index cb6f7b50869..1305aa727f5 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -136,7 +136,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { XContentParser parser = createParser(rescoreBuilder); context.reset(parser); parser.nextToken(); - RescoreBuilder secondRescoreBuilder = AbstractRescoreBuilder.parseFromXContent(context); + RescoreBuilder secondRescoreBuilder = RescoreBuilder.parseFromXContent(context); assertNotSame(rescoreBuilder, secondRescoreBuilder); assertEquals(rescoreBuilder, secondRescoreBuilder); assertEquals(rescoreBuilder.hashCode(), secondRescoreBuilder.hashCode()); @@ -198,7 +198,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { "}\n"; prepareContext(context, rescoreElement); try { - AbstractRescoreBuilder.parseFromXContent(context); + RescoreBuilder.parseFromXContent(context); fail("expected a parsing exception"); } catch (ParsingException e) { assertEquals("rescore doesn't support rescorer with name [bad_rescorer_name]", e.getMessage()); @@ -209,7 +209,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { "}\n"; prepareContext(context, rescoreElement); try { - AbstractRescoreBuilder.parseFromXContent(context); + RescoreBuilder.parseFromXContent(context); fail("expected a parsing exception"); } catch (ParsingException e) { assertEquals("rescore doesn't support [bad_fieldName]", e.getMessage()); @@ -221,7 +221,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { "}\n"; prepareContext(context, rescoreElement); try { - AbstractRescoreBuilder.parseFromXContent(context); + RescoreBuilder.parseFromXContent(context); fail("expected a parsing exception"); } catch (ParsingException e) { assertEquals("unexpected token [START_ARRAY] after [query]", e.getMessage()); @@ -230,7 +230,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { rescoreElement = "{ }"; prepareContext(context, rescoreElement); try { - AbstractRescoreBuilder.parseFromXContent(context); + RescoreBuilder.parseFromXContent(context); fail("expected a parsing exception"); } catch (ParsingException e) { assertEquals("missing rescore type", e.getMessage()); @@ -242,7 +242,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { "}\n"; prepareContext(context, rescoreElement); try { - AbstractRescoreBuilder.parseFromXContent(context); + RescoreBuilder.parseFromXContent(context); fail("expected a parsing exception"); } catch (IllegalArgumentException e) { assertEquals("[query] unknown field [bad_fieldname], parser not found", e.getMessage()); @@ -254,7 +254,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { "}\n"; prepareContext(context, rescoreElement); try { - AbstractRescoreBuilder.parseFromXContent(context); + RescoreBuilder.parseFromXContent(context); fail("expected a parsing exception"); } catch (ParsingException e) { assertEquals("[query] failed to parse field [rescore_query]", e.getMessage()); @@ -265,7 +265,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { " \"query\" : { \"rescore_query\" : { \"match_all\" : { } } } \n" + "}\n"; prepareContext(context, rescoreElement); - AbstractRescoreBuilder.parseFromXContent(context); + RescoreBuilder.parseFromXContent(context); } /** From cf358e426c7dd80018ba53884c9205e6f10ac181 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 26 Jan 2016 14:30:21 +0100 Subject: [PATCH 324/347] Use Version#id to parse version from settings. When a Version is passed to `Settings#put(String, Version)` it's id is used as an integer which should also be used to deserialize it on the consumer end. Today AssertinLocalTransport expects Version#toString() to be used which can lead to subtile bugs in tests. --- .../search/aggregations/bucket/DateHistogramOffsetIT.java | 2 +- .../elasticsearch/test/transport/AssertingLocalTransport.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index 75c0fc25e6d..cbd9a250ebd 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -61,7 +61,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY.getKey(), Version.V_1_4_0_Beta1.toString()).build(); + .put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY.getKey(), Version.V_1_4_0_Beta1).build(); } @Before diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java index d66acb7ff06..9e8d7a4af85 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java @@ -62,9 +62,9 @@ public class AssertingLocalTransport extends LocalTransport { } public static final Setting ASSERTING_TRANSPORT_MIN_VERSION_KEY = new Setting<>("transport.asserting.version.min", - Version.CURRENT.minimumCompatibilityVersion().toString(), Version::fromString, false, Setting.Scope.CLUSTER); + Integer.toString(Version.CURRENT.minimumCompatibilityVersion().id), (s) -> Version.fromId(Integer.parseInt(s)), false, Setting.Scope.CLUSTER); public static final Setting ASSERTING_TRANSPORT_MAX_VERSION_KEY = new Setting<>("transport.asserting.version.max", - Version.CURRENT.toString(), Version::fromString, false, Setting.Scope.CLUSTER); + Integer.toString(Version.CURRENT.id), (s) -> Version.fromId(Integer.parseInt(s)), false, Setting.Scope.CLUSTER); private final Random random; private final Version minVersion; private final Version maxVersion; From 9dfcfd5d423dd63e4d2036eafba381bde5478383 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 26 Jan 2016 14:54:41 +0100 Subject: [PATCH 325/347] move node.ingest up a few lines --- .../org/elasticsearch/common/settings/ClusterSettings.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 13424a5a17c..4c5073c6074 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -254,9 +254,9 @@ public final class ClusterSettings extends AbstractScopedSettings { Node.NODE_MASTER_SETTING, Node.NODE_LOCAL_SETTING, Node.NODE_MODE_SETTING, + Node.NODE_INGEST_SETTING, URLRepository.ALLOWED_URLS_SETTING, URLRepository.REPOSITORIES_LIST_DIRECTORIES_SETTING, URLRepository.REPOSITORIES_URL_SETTING, - URLRepository.SUPPORTED_PROTOCOLS_SETTING, - Node.NODE_INGEST_SETTING))); + URLRepository.SUPPORTED_PROTOCOLS_SETTING))); } From 98446e7dd3feaf405ba2f28056875ee9ffe9513a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 26 Jan 2016 15:26:47 +0100 Subject: [PATCH 326/347] Adding java api changes to migrate_3_0.asciidoc --- docs/reference/migration/migrate_3_0.asciidoc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 7e4955b2342..b704fe81c16 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -543,6 +543,10 @@ to index a document only if it doesn't already exist. `InternalLineStringBuilder` is removed in favour of `LineStringBuilder`, `InternalPolygonBuilder` in favour of PolygonBuilder` and `Ring` has been replaced with `LineStringBuilder`. Also the abstract base classes `BaseLineStringBuilder` and `BasePolygonBuilder` haven been merged with their corresponding implementations. +==== RescoreBuilder + +`RecoreBuilder.Rescorer` was merged with `RescoreBuilder`, which now is an abstract superclass. QueryRescoreBuilder currently is its only implementation. + [[breaking_30_cache_concurrency]] === Cache concurrency level settings removed From 9cefeabd9e6b1a8adfad195f4f740095f1f803fa Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 26 Jan 2016 15:37:31 +0100 Subject: [PATCH 327/347] [TEST] make sure node.mode is used in TransportClientFactory --- .../java/org/elasticsearch/test/InternalTestCluster.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 767fbd983f6..a6cadc3de0c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -24,7 +24,6 @@ import com.carrotsearch.randomizedtesting.SysGlobals; import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.apache.lucene.store.StoreRateLimiting; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; @@ -888,14 +887,12 @@ public final class InternalTestCluster extends TestCluster { .put(Environment.PATH_HOME_SETTING.getKey(), baseDir) .put("name", TRANSPORT_CLIENT_PREFIX + node.settings().get("name")) .put(ClusterName.SETTING, clusterName).put("client.transport.sniff", sniff) + .put(Node.NODE_MODE_SETTING.getKey(), Node.NODE_MODE_SETTING.exists(nodeSettings) ? Node.NODE_MODE_SETTING.get(nodeSettings) : nodeMode) .put("logger.prefix", nodeSettings.get("logger.prefix", "")) .put("logger.level", nodeSettings.get("logger.level", "INFO")) .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) .put(settings); - if (Node.NODE_MODE_SETTING.exists(nodeSettings)) { - builder.put(Node.NODE_MODE_SETTING.getKey(), Node.NODE_MODE_SETTING.get(nodeSettings)); - } if (Node.NODE_LOCAL_SETTING.exists(nodeSettings)) { builder.put(Node.NODE_LOCAL_SETTING.getKey(), Node.NODE_LOCAL_SETTING.get(nodeSettings)); } From b784b816653a17524868bc8c0e5a4090d91ec41f Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 26 Jan 2016 14:52:40 +0100 Subject: [PATCH 328/347] docs: Remove the fact that ingest was a plugin from the docs. --- docs/plugins/ingest-geoip.asciidoc | 4 ++-- docs/reference/ingest/ingest.asciidoc | 23 +++++++++++------------ 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/docs/plugins/ingest-geoip.asciidoc b/docs/plugins/ingest-geoip.asciidoc index 539c29971a4..2c0663b5958 100644 --- a/docs/plugins/ingest-geoip.asciidoc +++ b/docs/plugins/ingest-geoip.asciidoc @@ -4,7 +4,7 @@ The GeoIP processor adds information about the geographical location of IP addresses, based on data from the Maxmind databases. This processor adds this information by default under the `geoip` field. -The ingest plugin ships by default with the GeoLite2 City and GeoLite2 Country geoip2 databases from Maxmind made available +The ingest-geoip plugin ships by default with the GeoLite2 City and GeoLite2 Country geoip2 databases from Maxmind made available under the CCA-ShareAlike 3.0 license. For more details see, http://dev.maxmind.com/geoip/geoip2/geolite2/ The GeoIP processor can run with other geoip2 databases from Maxmind. The files must be copied into the geoip config directory @@ -18,7 +18,7 @@ is located at `$ES_HOME/config/ingest/geoip` and holds the shipped databases too | Name | Required | Default | Description | `source_field` | yes | - | The field to get the ip address or hostname from for the geographical lookup. | `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database. -| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest plugin ships with the GeoLite2-City.mmdb and GeoLite2-Country.mmdb files. +| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest-geoip plugin ships with the GeoLite2-City.mmdb and GeoLite2-Country.mmdb files. | `fields` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] <1> | Controls what properties are added to the `target_field` based on the geoip lookup. |====== diff --git a/docs/reference/ingest/ingest.asciidoc b/docs/reference/ingest/ingest.asciidoc index 0c049f82b69..ee724d4ad07 100644 --- a/docs/reference/ingest/ingest.asciidoc +++ b/docs/reference/ingest/ingest.asciidoc @@ -1,23 +1,22 @@ [[ingest]] -== Ingest Plugin +== Ingest Node -The ingest plugin can be used to pre-process documents before the actual indexing takes place. -This pre-processing happens by the ingest plugin that intercepts bulk and index requests, applies the +Ingest node can be used to pre-process documents before the actual indexing takes place. +This pre-processing happens by an ingest node that intercepts bulk and index requests, applies the transformations and then passes the documents back to the index or bulk APIs. -The ingest plugin is disabled by default. In order to enable the ingest plugin the following +Ingest node is enabled by default. In order to disable ingest the following setting should be configured in the elasticsearch.yml file: [source,yaml] -------------------------------------------------- -node.ingest: true +node.ingest: false -------------------------------------------------- -The ingest plugin can be installed and enabled on any node. It is possible to run ingest -on an master and or data node or have dedicated client nodes that run with ingest. +It is possible to enable ingest on any node or have dedicated ingest nodes. In order to pre-process document before indexing the `pipeline` parameter should be used -on an index or bulk request to tell the ingest plugin what pipeline is going to be used. +on an index or bulk request to tell Ingest what pipeline is going to be used. [source,js] -------------------------------------------------- @@ -606,9 +605,9 @@ The following example sets the id of a document to `1`: The following metadata fields are accessible by a processor: `_index`, `_type`, `_id`, `_routing`, `_parent`, `_timestamp` and `_ttl`. -Beyond metadata fields and source fields, the ingest plugin also adds ingest metadata to documents being processed. -These metadata properties are accessible under the `_ingest` key. Currently the ingest plugin adds the ingest timestamp -under `_ingest.timestamp` key to the ingest metadata, which is the time the ingest plugin received the index or bulk +Beyond metadata fields and source fields, ingest also adds ingest metadata to documents being processed. +These metadata properties are accessible under the `_ingest` key. Currently ingest adds the ingest timestamp +under `_ingest.timestamp` key to the ingest metadata, which is the time ES received the index or bulk request to pre-process. But any processor is free to add more ingest related metadata to it. Ingest metadata is transient and is lost after a document has been processed by the pipeline and thus ingest metadata won't be indexed. @@ -626,7 +625,7 @@ The following example adds a field with the name `received` and the value is the As opposed to Elasticsearch metadata fields, the ingest metadata field name _ingest can be used as a valid field name in the source of a document. Use _source._ingest to refer to it, otherwise _ingest will be interpreted as ingest -metadata fields by the ingest plugin. +metadata fields. A number of processor settings also support templating. Settings that support templating can have zero or more template snippets. A template snippet begins with `{{` and ends with `}}`. From 28a9709d7f64d922f51f06cebdc551c7454625f3 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 26 Jan 2016 16:13:10 +0100 Subject: [PATCH 329/347] updated jdocs and removed redundant import --- .../main/java/org/elasticsearch/ingest/IngestMetadata.java | 2 -- .../src/main/java/org/elasticsearch/ingest/IngestService.java | 4 +--- .../java/org/elasticsearch/ingest/PipelineConfiguration.java | 3 --- .../java/org/elasticsearch/ingest/core/CompoundProcessor.java | 1 - .../org/elasticsearch/ingest/processor/DeDotProcessor.java | 1 - .../org/elasticsearch/ingest/processor/FailProcessor.java | 1 - .../org/elasticsearch/ingest/processor/GsubProcessor.java | 1 - .../org/elasticsearch/ingest/processor/JoinProcessor.java | 1 - .../org/elasticsearch/ingest/processor/RemoveProcessor.java | 1 - .../org/elasticsearch/ingest/processor/RenameProcessor.java | 1 - 10 files changed, 1 insertion(+), 15 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java b/core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java index 0e507513102..32fade45e9d 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java @@ -22,8 +22,6 @@ package org.elasticsearch.ingest; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestService.java b/core/src/main/java/org/elasticsearch/ingest/IngestService.java index bc7cd75070c..8af82b28a38 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -19,7 +19,6 @@ package org.elasticsearch.ingest; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; @@ -28,8 +27,7 @@ import java.io.Closeable; import java.io.IOException; /** - * Instantiates and wires all the services that the ingest plugin will be needing. - * Also the bootstrapper is in charge of starting and stopping the ingest plugin based on the cluster state. + * Holder class for several ingest related services. */ public class IngestService implements Closeable { diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java b/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java index 90ab2a76c2e..3bd80edc306 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java @@ -19,8 +19,6 @@ package org.elasticsearch.ingest; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Build; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -34,7 +32,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Map; -import java.util.Objects; import java.util.function.BiFunction; /** diff --git a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java index bc5fd19aac7..699720e18ca 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java @@ -25,7 +25,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.stream.Collectors; /** * A Processor that executes a list of other "processors". It executes a separate list of diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java index 295a9884997..b8f86616ffc 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.IngestDocument; -import org.elasticsearch.ingest.core.Processor; import java.util.HashMap; import java.util.Iterator; diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java index 65b4b602bd0..76c7b3c40ea 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.ConfigurationUtils; import org.elasticsearch.ingest.core.IngestDocument; -import org.elasticsearch.ingest.core.Processor; import org.elasticsearch.ingest.core.TemplateService; import java.util.Map; diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java index 3dc4b3f0cad..0ec7fba84f2 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; -import org.elasticsearch.ingest.core.Processor; import java.util.Map; import java.util.regex.Matcher; diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java index 3516929e26f..dd729dd0afd 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; -import org.elasticsearch.ingest.core.Processor; import java.util.List; import java.util.Map; diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java index e994954a034..a39ac8f5cf4 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java @@ -24,7 +24,6 @@ import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.ingest.core.ConfigurationUtils; -import org.elasticsearch.ingest.core.Processor; import java.util.Map; diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java index 7726a720b49..6088315884e 100644 --- a/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java @@ -23,7 +23,6 @@ import org.elasticsearch.ingest.core.AbstractProcessor; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.ConfigurationUtils; -import org.elasticsearch.ingest.core.Processor; import java.util.Map; From 429b372edbd3b346e226ae642938b9bb1b1d971f Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 26 Jan 2016 16:19:13 +0100 Subject: [PATCH 330/347] Validate tribe node settings on startup --- .../common/settings/SettingsModule.java | 37 ++++++--- .../common/settings/SettingsModuleTests.java | 81 +++++++++++++++++++ 2 files changed, 108 insertions(+), 10 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index eec6e734229..24fe7be56c6 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.inject.AbstractModule; import java.util.HashMap; import java.util.HashSet; import java.util.Map; +import java.util.function.Predicate; /** * A module that binds the provided settings to the {@link Settings} interface. @@ -54,18 +55,13 @@ public class SettingsModule extends AbstractModule { final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(this.clusterSettings.values())); // by now we are fully configured, lets check node level settings for unregistered index settings indexScopedSettings.validate(settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE)); - // we can't call this method yet since we have not all node level settings registered. - // yet we can validate the ones we have registered to not have invalid values. this is better than nothing - // and progress over perfection and we fail as soon as possible. - // clusterSettings.validate(settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE.negate())); - for (Map.Entry entry : settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE.negate()).getAsMap().entrySet()) { - if (clusterSettings.get(entry.getKey()) != null) { - clusterSettings.validate(entry.getKey(), settings); - } else if (AbstractScopedSettings.isValidKey(entry.getKey()) == false) { - throw new IllegalArgumentException("illegal settings key: [" + entry.getKey() + "]"); - } + Predicate noIndexSettingPredicate = IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE.negate(); + Predicate noTribePredicate = (s) -> s.startsWith("tribe.") == false; + for (Map.Entry entry : settings.filter(noTribePredicate.and(noIndexSettingPredicate)).getAsMap().entrySet()) { + validateClusterSetting(clusterSettings, entry.getKey(), settings); } + validateTribeSettings(settings, clusterSettings); bind(Settings.class).toInstance(settings); bind(SettingsFilter.class).toInstance(settingsFilter); @@ -90,4 +86,25 @@ public class SettingsModule extends AbstractModule { } } + public void validateTribeSettings(Settings settings, ClusterSettings clusterSettings) { + Map groups = settings.getGroups("tribe."); + for (Map.Entry tribeSettings : groups.entrySet()) { + for (Map.Entry entry : tribeSettings.getValue().getAsMap().entrySet()) { + validateClusterSetting(clusterSettings, entry.getKey(), tribeSettings.getValue()); + } + } + } + + private final void validateClusterSetting(ClusterSettings clusterSettings, String key, Settings settings) { + // we can't call this method yet since we have not all node level settings registered. + // yet we can validate the ones we have registered to not have invalid values. this is better than nothing + // and progress over perfection and we fail as soon as possible. + // clusterSettings.validate(settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE.negate())); + if (clusterSettings.get(key) != null) { + clusterSettings.validate(key, settings); + } else if (AbstractScopedSettings.isValidKey(key) == false) { + throw new IllegalArgumentException("illegal settings key: [" + key + "]"); + } + } + } diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java new file mode 100644 index 00000000000..731957cba06 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.settings; + +import org.elasticsearch.common.inject.ModuleTestCase; + +public class SettingsModuleTests extends ModuleTestCase { + + public void testValidate() { + { + Settings settings = Settings.builder().put("cluster.routing.allocation.balance.shard", "2.0").build(); + SettingsModule module = new SettingsModule(settings, new SettingsFilter(Settings.EMPTY)); + assertInstanceBinding(module, Settings.class, (s) -> s == settings); + } + { + Settings settings = Settings.builder().put("cluster.routing.allocation.balance.shard", "[2.0]").build(); + SettingsModule module = new SettingsModule(settings, new SettingsFilter(Settings.EMPTY)); + try { + assertInstanceBinding(module, Settings.class, (s) -> s == settings); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("Failed to parse value [[2.0]] for setting [cluster.routing.allocation.balance.shard]", ex.getMessage()); + } + } + } + + public void testRegisterSettings() { + { + Settings settings = Settings.builder().put("some.custom.setting", "2.0").build(); + SettingsModule module = new SettingsModule(settings, new SettingsFilter(Settings.EMPTY)); + module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, false, Setting.Scope.CLUSTER)); + assertInstanceBinding(module, Settings.class, (s) -> s == settings); + } + { + Settings settings = Settings.builder().put("some.custom.setting", "false").build(); + SettingsModule module = new SettingsModule(settings, new SettingsFilter(Settings.EMPTY)); + module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, false, Setting.Scope.CLUSTER)); + try { + assertInstanceBinding(module, Settings.class, (s) -> s == settings); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("Failed to parse value [false] for setting [some.custom.setting]", ex.getMessage()); + } + } + } + + public void testTribeSetting() { + { + Settings settings = Settings.builder().put("tribe.t1.cluster.routing.allocation.balance.shard", "2.0").build(); + SettingsModule module = new SettingsModule(settings, new SettingsFilter(Settings.EMPTY)); + assertInstanceBinding(module, Settings.class, (s) -> s == settings); + } + { + Settings settings = Settings.builder().put("tribe.t1.cluster.routing.allocation.balance.shard", "[2.0]").build(); + SettingsModule module = new SettingsModule(settings, new SettingsFilter(Settings.EMPTY)); + try { + assertInstanceBinding(module, Settings.class, (s) -> s == settings); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("Failed to parse value [[2.0]] for setting [cluster.routing.allocation.balance.shard]", ex.getMessage()); + } + } + } +} From 09d7e9127d32eb9013edce4ce88a15778592de49 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 26 Jan 2016 16:39:17 +0100 Subject: [PATCH 331/347] Convert `cluster.routing.allocation.type` to new settings infrastructure --- .../main/java/org/elasticsearch/cluster/ClusterModule.java | 6 ++++-- .../org/elasticsearch/common/settings/ClusterSettings.java | 4 +++- .../java/org/elasticsearch/cluster/ClusterModuleTests.java | 6 +++--- .../cluster/allocation/ShardsAllocatorModuleIT.java | 4 ++-- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 626b020c56c..3e668191ff3 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -57,6 +57,7 @@ import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.gateway.GatewayAllocator; @@ -64,6 +65,7 @@ import org.elasticsearch.gateway.GatewayAllocator; import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.function.Function; /** * Configures classes and services that affect the entire cluster. @@ -72,7 +74,7 @@ public class ClusterModule extends AbstractModule { public static final String EVEN_SHARD_COUNT_ALLOCATOR = "even_shard"; public static final String BALANCED_ALLOCATOR = "balanced"; // default - public static final String SHARDS_ALLOCATOR_TYPE_KEY = "cluster.routing.allocation.type"; + public static final Setting SHARDS_ALLOCATOR_TYPE_SETTING = new Setting<>("cluster.routing.allocation.type", BALANCED_ALLOCATOR, Function.identity(), false, Setting.Scope.CLUSTER); public static final List> DEFAULT_ALLOCATION_DECIDERS = Collections.unmodifiableList(Arrays.asList( SameShardAllocationDecider.class, @@ -121,7 +123,7 @@ public class ClusterModule extends AbstractModule { @Override protected void configure() { // bind ShardsAllocator - String shardsAllocatorType = shardsAllocators.bindType(binder(), settings, ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, ClusterModule.BALANCED_ALLOCATOR); + String shardsAllocatorType = shardsAllocators.bindType(binder(), settings, ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), ClusterModule.BALANCED_ALLOCATOR); if (shardsAllocatorType.equals(ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR)) { final ESLogger logger = Loggers.getLogger(getClass(), settings); logger.warn("{} allocator has been removed in 2.0 using {} instead", ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR, ClusterModule.BALANCED_ALLOCATOR); diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 05978a13053..597f3058bd1 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.settings; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.client.transport.TransportClientNodesService; +import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.metadata.MetaData; @@ -253,5 +254,6 @@ public final class ClusterSettings extends AbstractScopedSettings { URLRepository.REPOSITORIES_LIST_DIRECTORIES_SETTING, URLRepository.REPOSITORIES_URL_SETTING, URLRepository.SUPPORTED_PROTOCOLS_SETTING, - Node.NODE_INGEST_SETTING))); + Node.NODE_INGEST_SETTING, + ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING))); } diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index 7cfa0eeaaa7..7af4e375fdc 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -122,7 +122,7 @@ public class ClusterModuleTests extends ModuleTestCase { } public void testRegisterShardsAllocator() { - Settings settings = Settings.builder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, "custom").build(); + Settings settings = Settings.builder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), "custom").build(); ClusterModule module = new ClusterModule(settings); module.registerShardsAllocator("custom", FakeShardsAllocator.class); assertBinding(module, ShardsAllocator.class, FakeShardsAllocator.class); @@ -138,14 +138,14 @@ public class ClusterModuleTests extends ModuleTestCase { } public void testUnknownShardsAllocator() { - Settings settings = Settings.builder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, "dne").build(); + Settings settings = Settings.builder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), "dne").build(); ClusterModule module = new ClusterModule(settings); assertBindingFailure(module, "Unknown [shards_allocator]"); } public void testEvenShardsAllocatorBackcompat() { Settings settings = Settings.builder() - .put(ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR).build(); + .put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR).build(); ClusterModule module = new ClusterModule(settings); assertBinding(module, ShardsAllocator.class, BalancedShardsAllocator.class); } diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java index 89a7f8ad65c..60fa45ebfa1 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java @@ -40,10 +40,10 @@ public class ShardsAllocatorModuleIT extends ESIntegTestCase { } public void testLoadByShortKeyShardsAllocator() throws IOException { - Settings build = settingsBuilder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, "even_shard") // legacy just to make sure we don't barf + Settings build = settingsBuilder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), "even_shard") // legacy just to make sure we don't barf .build(); assertAllocatorInstance(build, BalancedShardsAllocator.class); - build = settingsBuilder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, ClusterModule.BALANCED_ALLOCATOR).build(); + build = settingsBuilder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), ClusterModule.BALANCED_ALLOCATOR).build(); assertAllocatorInstance(build, BalancedShardsAllocator.class); } From 94aa9f3bfc7efcca79d9cc42140c7163c5f95466 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 26 Jan 2016 16:41:14 +0100 Subject: [PATCH 332/347] Remove unsupported settings --- core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java index f871995cc20..d2d9a8507e4 100644 --- a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java +++ b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java @@ -65,8 +65,6 @@ public class SimpleTTLIT extends ESIntegTestCase { return settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) .put("indices.ttl.interval", PURGE_INTERVAL, TimeUnit.MILLISECONDS) - .put("cluster.routing.operation.use_type", false) // make sure we control the shard computation - .put("cluster.routing.operation.hash.type", "djb") .build(); } From daadf8e04feb3a316a6c845a0c2ed56f519a1eab Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 26 Jan 2016 17:04:53 +0100 Subject: [PATCH 333/347] Convert processors to new settings infrastructure --- .../common/settings/ClusterSettings.java | 4 +++- .../common/util/concurrent/EsExecutors.java | 12 +++--------- .../admin/cluster/stats/ClusterStatsIT.java | 2 +- .../cluster/settings/ClusterSettingsIT.java | 2 +- .../nodesinfo/SimpleNodesInfoIT.java | 4 ++-- .../test/ESSingleNodeTestCase.java | 2 +- .../org/elasticsearch/test/ESTestCase.java | 18 ------------------ .../test/InternalTestCluster.java | 6 +----- 8 files changed, 12 insertions(+), 38 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 597f3058bd1..6ac9d92f67f 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -37,6 +37,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAl import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.logging.ESLoggerFactory; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.common.network.NetworkModule; @@ -255,5 +256,6 @@ public final class ClusterSettings extends AbstractScopedSettings { URLRepository.REPOSITORIES_URL_SETTING, URLRepository.SUPPORTED_PROTOCOLS_SETTING, Node.NODE_INGEST_SETTING, - ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING))); + ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING, + EsExecutors.PROCESSORS_SETTING))); } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index 140f026c357..bc44494000e 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.util.concurrent; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import java.util.Arrays; @@ -40,10 +41,7 @@ public class EsExecutors { * Settings key to manually set the number of available processors. * This is used to adjust thread pools sizes etc. per node. */ - public static final String PROCESSORS = "processors"; - - /** Useful for testing */ - public static final String DEFAULT_SYSPROP = "es.processors.override"; + public static final Setting PROCESSORS_SETTING = Setting.intSetting("processors", Math.min(32, Runtime.getRuntime().availableProcessors()), 1, false, Setting.Scope.CLUSTER) ; /** * Returns the number of processors available but at most 32. @@ -53,11 +51,7 @@ public class EsExecutors { * ie. >= 48 create too many threads and run into OOM see #3478 * We just use an 32 core upper-bound here to not stress the system * too much with too many created threads */ - int defaultValue = Math.min(32, Runtime.getRuntime().availableProcessors()); - try { - defaultValue = Integer.parseInt(System.getProperty(DEFAULT_SYSPROP)); - } catch (Throwable ignored) {} - return settings.getAsInt(PROCESSORS, defaultValue); + return PROCESSORS_SETTING.get(settings); } public static PrioritizedEsThreadPoolExecutor newSinglePrioritizing(String name, ThreadFactory threadFactory) { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java index 56eaad11f94..1d22b19d917 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java @@ -164,7 +164,7 @@ public class ClusterStatsIT extends ESIntegTestCase { internalCluster().ensureAtMostNumDataNodes(0); // start one node with 7 processors. - internalCluster().startNodesAsync(Settings.builder().put(EsExecutors.PROCESSORS, 7).build()).get(); + internalCluster().startNodesAsync(Settings.builder().put(EsExecutors.PROCESSORS_SETTING.getKey(), 7).build()).get(); waitForNodes(1); ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get(); diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index 80df54518ba..70d7697e666 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -335,7 +335,7 @@ public class ClusterSettingsIT extends ESIntegTestCase { .put("node.name", "ClusterSettingsIT") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(EsExecutors.PROCESSORS, 1) // limit the number of threads created + .put(EsExecutors.PROCESSORS_SETTING.getKey(), 1) // limit the number of threads created .put("http.enabled", false) .put("config.ignore_system_properties", true) // make sure we get what we set :) .put(settings) diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java index 93ba861dca0..b643ba0d0ad 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java @@ -88,8 +88,8 @@ public class SimpleNodesInfoIT extends ESIntegTestCase { public void testAllocatedProcessors() throws Exception { List nodesIds = internalCluster(). startNodesAsync( - Settings.builder().put(EsExecutors.PROCESSORS, 3).build(), - Settings.builder().put(EsExecutors.PROCESSORS, 6).build() + Settings.builder().put(EsExecutors.PROCESSORS_SETTING.getKey(), 3).build(), + Settings.builder().put(EsExecutors.PROCESSORS_SETTING.getKey(), 6).build() ).get(); final String node_1 = nodesIds.get(0); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index f73839c5cec..8b562965f26 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -168,7 +168,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put("script.inline", "on") .put("script.indexed", "on") - .put(EsExecutors.PROCESSORS, 1) // limit the number of threads created + .put(EsExecutors.PROCESSORS_SETTING.getKey(), 1) // limit the number of threads created .put("http.enabled", false) .put("node.local", true) .put("node.data", true) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 598b6216ce2..558d05e19c2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -40,14 +40,12 @@ import org.elasticsearch.bootstrap.BootstrapForTesting; import org.elasticsearch.cache.recycler.MockPageCacheRecycler; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.PathUtilsForTesting; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; @@ -162,22 +160,6 @@ public abstract class ESTestCase extends LuceneTestCase { Requests.INDEX_CONTENT_TYPE = XContentType.JSON; } - // randomize and override the number of cpus so tests reproduce regardless of real number of cpus - - @BeforeClass - @SuppressForbidden(reason = "sets the number of cpus during tests") - public static void setProcessors() { - int numCpu = TestUtil.nextInt(random(), 1, 4); - System.setProperty(EsExecutors.DEFAULT_SYSPROP, Integer.toString(numCpu)); - assertEquals(numCpu, EsExecutors.boundedNumberOfProcessors(Settings.EMPTY)); - } - - @AfterClass - @SuppressForbidden(reason = "clears the number of cpus during tests") - public static void restoreProcessors() { - System.clearProperty(EsExecutors.DEFAULT_SYSPROP); - } - @After public final void ensureCleanedUp() throws Exception { MockPageCacheRecycler.ensureAllPagesAreReleased(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 9fb3eec8489..5749d67eb83 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -392,11 +392,7 @@ public final class InternalTestCluster extends TestCluster { builder.put(SearchService.DEFAULT_KEEPALIVE_SETTING.getKey(), TimeValue.timeValueSeconds(100 + random.nextInt(5 * 60))); } - if (random.nextInt(10) == 0) { - // node gets an extra cpu this time - builder.put(EsExecutors.PROCESSORS, 1 + EsExecutors.boundedNumberOfProcessors(Settings.EMPTY)); - } - + builder.put(EsExecutors.PROCESSORS_SETTING.getKey(), 1 + random.nextInt(3)); if (random.nextBoolean()) { if (random.nextBoolean()) { builder.put("indices.fielddata.cache.size", 1 + random.nextInt(1000), ByteSizeUnit.MB); From b8b3a7aa4f346b59157c982a63261307f631610f Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Tue, 26 Jan 2016 17:54:41 +0100 Subject: [PATCH 334/347] Merge pull request #16236 from dlangille/patch-1 Better wording --- docs/reference/modules/snapshots.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/modules/snapshots.asciidoc b/docs/reference/modules/snapshots.asciidoc index 969a74f019d..5713a42d8d1 100644 --- a/docs/reference/modules/snapshots.asciidoc +++ b/docs/reference/modules/snapshots.asciidoc @@ -74,7 +74,7 @@ GET /_snapshot/_all The shared file system repository (`"type": "fs"`) uses the shared file system to store snapshots. In order to register the shared file system repository it is necessary to mount the same shared filesystem to the same location on all -master and data nodes. This location (or one of its parent directories) has to be registered in the `path.repo` +master and data nodes. This location (or one of its parent directories) must be registered in the `path.repo` setting on all master and data nodes. Assuming that the shared filesystem is mounted to `/mount/backups/my_backup`, the following setting should be added to From bdddea2dd0ecd3624e5484b3a34e668ca74cd6d0 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 22 Jan 2016 20:55:38 -0500 Subject: [PATCH 335/347] Security permissions for Groovy closures This commit adds some permissions that Groovy needs to use closures. Closes #16196 --- .../src/main/plugin-metadata/plugin-security.policy | 6 ++++++ .../elasticsearch/script/groovy/GroovySecurityTests.java | 3 +++ 2 files changed, 9 insertions(+) diff --git a/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy b/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy index 4ada1ad5f38..b9466daa402 100644 --- a/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy +++ b/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy @@ -25,6 +25,7 @@ grant { // needed by groovy engine permission java.lang.RuntimePermission "accessDeclaredMembers"; permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect"; + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; // needed by GroovyScriptEngineService to close its classloader (why?) permission java.lang.RuntimePermission "closeClassLoader"; // Allow executing groovy scripts with codesource of /untrusted @@ -48,4 +49,9 @@ grant { permission org.elasticsearch.script.ClassPermission "org.codehaus.groovy.runtime.typehandling.DefaultTypeTransformation"; permission org.elasticsearch.script.ClassPermission "org.codehaus.groovy.vmplugin.v7.IndyInterface"; permission org.elasticsearch.script.ClassPermission "sun.reflect.ConstructorAccessorImpl"; + + permission org.elasticsearch.script.ClassPermission "groovy.lang.Closure"; + permission org.elasticsearch.script.ClassPermission "org.codehaus.groovy.runtime.GeneratedClosure"; + permission org.elasticsearch.script.ClassPermission "groovy.lang.MetaClass"; + permission org.elasticsearch.script.ClassPermission "groovy.lang.Range"; }; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java index f5c44c6eea1..341fb009766 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java @@ -87,6 +87,9 @@ public class GroovySecurityTests extends ESTestCase { assertSuccess("def t = Instant.now().getMillis()"); // GroovyCollections assertSuccess("def n = [1,2,3]; GroovyCollections.max(n)"); + // Groovy closures + assertSuccess("[1, 2, 3, 4].findAll { it % 2 == 0 }"); + assertSuccess("def buckets=[ [2, 4, 6, 8], [10, 12, 16, 14], [18, 22, 20, 24] ]; buckets[-3..-1].every { it.every { i -> i % 2 == 0 } }"); // Fail cases: assertFailure("pr = Runtime.getRuntime().exec(\"touch /tmp/gotcha\"); pr.waitFor()", MissingPropertyException.class); From 82ae74071ec09273d61b6ae765951aa5a7a9d8fb Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 26 Jan 2016 20:48:07 +0100 Subject: [PATCH 336/347] logging: cleanup some old TestLogging annotations and add a few debug level by default --- .../admin/indices/shards/IndicesShardStoreRequestIT.java | 5 +---- .../client/transport/TransportClientRetryIT.java | 2 -- .../org/elasticsearch/cluster/MinimumMasterNodesIT.java | 2 +- .../discovery/DiscoveryWithServiceDisruptionsIT.java | 6 ++---- .../org/elasticsearch/discovery/zen/ZenDiscoveryIT.java | 2 +- core/src/test/java/org/elasticsearch/get/GetActionIT.java | 2 -- .../test/java/org/elasticsearch/indices/flush/FlushIT.java | 2 -- .../org/elasticsearch/indices/recovery/IndexRecoveryIT.java | 3 --- .../org/elasticsearch/indices/state/RareClusterStateIT.java | 3 +-- .../indices/store/IndicesStoreIntegrationIT.java | 2 -- .../elasticsearch/recovery/RecoveryWhileUnderLoadIT.java | 2 ++ .../test/java/org/elasticsearch/recovery/RelocationIT.java | 2 +- .../search/basic/SearchWhileCreatingIndexIT.java | 2 ++ .../snapshots/SharedClusterSnapshotRestoreIT.java | 3 --- .../java/org/elasticsearch/messy/tests/SimpleSortTests.java | 2 -- 15 files changed, 11 insertions(+), 29 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index d7f85ec5650..3c70f241eec 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -37,11 +37,8 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.MockIndexEventListener; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.store.MockFSIndexStore; -import org.elasticsearch.test.transport.MockTransportService; import java.util.Collection; import java.util.HashMap; @@ -61,6 +58,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) +@TestLogging("_root:DEBUG,action.admin.indices.shards:TRACE,cluster.service:TRACE") public class IndicesShardStoreRequestIT extends ESIntegTestCase { @Override @@ -74,7 +72,6 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase { assertThat(rsp.getStoreStatuses().size(), equalTo(0)); } - @TestLogging("action.admin.indices.shards:TRACE,cluster.service:TRACE") public void testBasic() throws Exception { String index = "test"; internalCluster().ensureAtLeastNumDataNodes(2); diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java index 265f0614b7a..afb693db1e5 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java @@ -33,7 +33,6 @@ import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -44,7 +43,6 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @ClusterScope(scope = Scope.TEST, numClientNodes = 0) -@TestLogging("discovery.zen:TRACE") public class TransportClientRetryIT extends ESIntegTestCase { public void testRetry() throws IOException, ExecutionException, InterruptedException { Iterable instances = internalCluster().getInstances(TransportService.class); diff --git a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 612f910d3fa..d764216c056 100644 --- a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -62,6 +62,7 @@ import static org.hamcrest.Matchers.nullValue; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) @ESIntegTestCase.SuppressLocalMode +@TestLogging("_root:DEBUG,cluster.service:TRACE,discovery.zen:TRACE") public class MinimumMasterNodesIT extends ESIntegTestCase { @Override @@ -71,7 +72,6 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { return classes; } - @TestLogging("cluster.service:TRACE,discovery.zen:TRACE,gateway:TRACE,transport.tracer:TRACE") public void testSimpleMinimumMasterNodes() throws Exception { Settings settings = settingsBuilder() diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index f77bea8a27d..4dcf6f55059 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -112,6 +112,7 @@ import static org.hamcrest.Matchers.nullValue; @ClusterScope(scope = Scope.TEST, numDataNodes = 0, transportClientRatio = 0) @ESIntegTestCase.SuppressLocalMode +@TestLogging("_root:DEBUG,cluster.service:TRACE") public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { private static final TimeValue DISRUPTION_HEALING_OVERHEAD = TimeValue.timeValueSeconds(40); // we use 30s as timeout in many places. @@ -422,7 +423,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { */ // NOTE: if you remove the awaitFix, make sure to port the test to the 1.x branch @LuceneTestCase.AwaitsFix(bugUrl = "needs some more work to stabilize") - @TestLogging("action.index:TRACE,action.get:TRACE,discovery:TRACE,cluster.service:TRACE,indices.recovery:TRACE,indices.cluster:TRACE") + @TestLogging("_root:DEBUG,action.index:TRACE,action.get:TRACE,discovery:TRACE,cluster.service:TRACE,indices.recovery:TRACE,indices.cluster:TRACE") public void testAckedIndexing() throws Exception { // TODO: add node count randomizaion final List nodes = startCluster(3); @@ -705,7 +706,6 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { * Test that a document which is indexed on the majority side of a partition, is available from the minority side, * once the partition is healed */ - @TestLogging(value = "cluster.service:TRACE") public void testRejoinDocumentExistsInAllShardCopies() throws Exception { List nodes = startCluster(3); @@ -795,7 +795,6 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { assertMaster(masterNode, nodes); } - @TestLogging("discovery.zen:TRACE,cluster.service:TRACE") public void testIsolatedUnicastNodes() throws Exception { List nodes = startCluster(4, -1, new int[]{0}); // Figure out what is the elected master node @@ -979,7 +978,6 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { * sure that the node is removed form the cluster, that the node start pinging and that * the cluster reforms when healed. */ - @TestLogging("discovery.zen:TRACE,action:TRACE") public void testNodeNotReachableFromMaster() throws Exception { startCluster(3); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index fbc1e687241..6c564a97740 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -78,6 +78,7 @@ import static org.hamcrest.Matchers.sameInstance; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) @ESIntegTestCase.SuppressLocalMode +@TestLogging("_root:DEBUG") public class ZenDiscoveryIT extends ESIntegTestCase { public void testChangeRejoinOnMasterOptionIsDynamic() throws Exception { Settings nodeSettings = Settings.settingsBuilder() @@ -140,7 +141,6 @@ public class ZenDiscoveryIT extends ESIntegTestCase { assertThat(numRecoveriesAfterNewMaster, equalTo(numRecoveriesBeforeNewMaster)); } - @TestLogging(value = "action.admin.cluster.health:TRACE") public void testNodeFailuresAreProcessedOnce() throws ExecutionException, InterruptedException, IOException { Settings defaultSettings = Settings.builder() .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index 6cc6def4931..dee48c5f288 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -43,7 +43,6 @@ import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOException; import java.util.Collection; @@ -745,7 +744,6 @@ public class GetActionIT extends ESIntegTestCase { } } - @TestLogging("index.shard.service:TRACE,cluster.service:TRACE,action.admin.indices.flush:TRACE") public void testGetFieldsComplexField() throws Exception { assertAcked(prepareCreate("my-index") .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1)) diff --git a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index c8a80f6621f..46767073404 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -34,7 +34,6 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOException; import java.util.Arrays; @@ -86,7 +85,6 @@ public class FlushIT extends ESIntegTestCase { } } - @TestLogging("indices:TRACE") public void testSyncedFlush() throws ExecutionException, InterruptedException, IOException { internalCluster().ensureAtLeastNumDataNodes(2); prepareCreate("test").setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).get(); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index dacf23758e8..88ccf99f6f1 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -33,7 +33,6 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; -import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -50,7 +49,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; -import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.store.MockFSDirectoryService; import org.elasticsearch.test.store.MockFSIndexStore; import org.elasticsearch.test.transport.MockTransportService; @@ -243,7 +241,6 @@ public class IndexRecoveryIT extends ESIntegTestCase { validateIndexRecoveryState(nodeBRecoveryState.getIndex()); } - @TestLogging("indices.recovery:TRACE") public void testRerouteRecovery() throws Exception { logger.info("--> start node A"); final String nodeA = internalCluster().startNode(); diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index 65a4d5ab76d..8a9fa191854 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -74,6 +74,7 @@ import static org.hamcrest.Matchers.instanceOf; */ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0) @ESIntegTestCase.SuppressLocalMode +@TestLogging("_root:DEBUG") public class RareClusterStateIT extends ESIntegTestCase { @Override protected int numberOfShards() { @@ -103,7 +104,6 @@ public class RareClusterStateIT extends ESIntegTestCase { allocator.allocateUnassigned(routingAllocation); } - @TestLogging("gateway:TRACE") public void testAssignmentWithJustAddedNodes() throws Exception { internalCluster().startNode(); final String index = "index"; @@ -167,7 +167,6 @@ public class RareClusterStateIT extends ESIntegTestCase { }); } - @TestLogging("cluster.service:TRACE") @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/14932") public void testDeleteCreateInOneBulk() throws Exception { internalCluster().startNodesAsync(2, Settings.builder() diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index ee4c79b50b5..b4260bcfe46 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -54,7 +54,6 @@ import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.disruption.BlockClusterStateProcessing; import org.elasticsearch.test.disruption.SingleNodeDisruption; -import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportException; @@ -290,7 +289,6 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { assertThat(waitForShardDeletion(node_4, "test", 0), equalTo(false)); } - @TestLogging("cluster.service:TRACE") public void testShardActiveElsewhereDoesNotDeleteAnother() throws Exception { InternalTestCluster.Async masterFuture = internalCluster().startNodeAsync( Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), true, Node.NODE_DATA_SETTING.getKey(), false).build()); diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index e9349a97d7d..ed07b06a2ef 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -36,6 +36,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.BackgroundIndexer; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.util.Arrays; import java.util.concurrent.TimeUnit; @@ -49,6 +50,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllS import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; +@TestLogging("_root:DEBUG") public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { private final ESLogger logger = Loggers.getLogger(RecoveryWhileUnderLoadIT.class); diff --git a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 6542a8ab1c6..4a3479958db 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -85,7 +85,7 @@ import static org.hamcrest.Matchers.startsWith; /** */ @ClusterScope(scope = Scope.TEST, numDataNodes = 0) -@TestLogging("indices.recovery:TRACE,index.shard.service:TRACE") +@TestLogging("_root:DEBUG,indices.recovery:TRACE,index.shard.service:TRACE") public class RelocationIT extends ESIntegTestCase { private final TimeValue ACCEPTABLE_RELOCATION_TIME = new TimeValue(5, TimeUnit.MINUTES); diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java index 35dbde26aba..53ac2bc045a 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.junit.annotations.TestLogging; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -33,6 +34,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; * This test basically verifies that search with a single shard active (cause we indexed to it) and other * shards possibly not active at all (cause they haven't allocated) will still work. */ +@TestLogging("_root:DEBUG") public class SearchWhileCreatingIndexIT extends ESIntegTestCase { public void testIndexCausesIndexCreation() throws Exception { searchWhileCreatingIndex(false, 1); // 1 replica in our default... diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index dac6ac0904d..a245919d944 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -52,7 +52,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.cluster.metadata.SnapshotId; -import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.common.Priority; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; @@ -63,7 +62,6 @@ import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.RepositoryException; -import org.elasticsearch.test.junit.annotations.TestLogging; import java.nio.channels.SeekableByteChannel; import java.nio.file.Files; @@ -1932,7 +1930,6 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas return awaitBusy(() -> client().admin().cluster().prepareHealth(index).execute().actionGet().getRelocatingShards() > 0, timeout.millis(), TimeUnit.MILLISECONDS); } - @TestLogging("cluster:DEBUG") public void testBatchingShardUpdateTask() throws Exception { final Client client = client(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java index bbc2d0789de..ad8b1e68230 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java @@ -55,7 +55,6 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.test.junit.annotations.TestLogging; import org.hamcrest.Matchers; import java.io.IOException; @@ -109,7 +108,6 @@ public class SimpleSortTests extends ESIntegTestCase { return pluginList(GroovyPlugin.class, InternalSettingsPlugin.class); } - @TestLogging("action.search.type:TRACE") @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/9421") public void testIssue8226() { int numIndices = between(5, 10); From 2b2552c301435e1d7c85055a7f7ef7782ff215d3 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 26 Jan 2016 21:15:30 +0100 Subject: [PATCH 337/347] test: cleanup static test resources --- .../ingest/geoip/GeoIpProcessorFactoryTests.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index b59242ece84..271476cc2f6 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -23,6 +23,7 @@ import com.maxmind.geoip2.DatabaseReader; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; +import org.junit.AfterClass; import org.junit.BeforeClass; import java.io.ByteArrayInputStream; @@ -55,6 +56,14 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { databaseReaders = IngestGeoIpPlugin.loadDatabaseReaders(geoIpConfigDir); } + @AfterClass + public static void closeDatabaseReaders() throws IOException { + for (DatabaseReader reader : databaseReaders.values()) { + reader.close(); + } + databaseReaders = null; + } + public void testBuildDefaults() throws Exception { GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); From 392814ea6f5f62f5f1d2232645db92da01d59914 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 14 Jan 2016 18:40:11 -0500 Subject: [PATCH 338/347] Shard failure requests for non-existent shards This commit adds handling on the master side for shard failure requests for shards that do not exist at the time that they are processed on the master node (whether it be from errant requests, duplicate requests, or both the primary and replica notifying the master of a shard failure). This change is made because such shard failure requests should always be considered successful (the failed shard is not there anymore), but could be marked as failed if batched with a shard failure request that does in fact fail. This avoids the possibility of an unexpected catastrophic failure while applying the failed shards from causing such a request to also be marked as failed setting in motion additional failures. Closes #16089 --- .../cluster/ClusterStateTaskExecutor.java | 2 +- .../shard/NoOpShardStateActionListener.java | 23 -- .../action/shard/ShardStateAction.java | 54 +++- .../cluster/IndicesClusterStateService.java | 4 +- ...rdFailedClusterStateTaskExecutorTests.java | 241 ++++++++++++++++++ .../action/shard/ShardStateActionTests.java | 41 ++- 6 files changed, 325 insertions(+), 40 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/cluster/action/shard/NoOpShardStateActionListener.java create mode 100644 core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java index fb22c2ca368..e5d3f06f1ec 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java @@ -120,7 +120,7 @@ public interface ClusterStateTaskExecutor { } public boolean isSuccess() { - return failure != null; + return this == SUCCESS; } /** diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/NoOpShardStateActionListener.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/NoOpShardStateActionListener.java deleted file mode 100644 index ed0a7f56b9c..00000000000 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/NoOpShardStateActionListener.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster.action.shard; - -public class NoOpShardStateActionListener implements ShardStateAction.Listener { -} diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 170d6fa0899..276edc9b23d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.MasterNodeChangePredicate; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; @@ -61,6 +62,8 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Locale; +import java.util.Map; +import java.util.stream.Collectors; import static org.elasticsearch.cluster.routing.ShardRouting.readShardRoutingEntry; @@ -209,12 +212,12 @@ public class ShardStateAction extends AbstractComponent { } } - private static class ShardFailedClusterStateTaskExecutor implements ClusterStateTaskExecutor { + static class ShardFailedClusterStateTaskExecutor implements ClusterStateTaskExecutor { private final AllocationService allocationService; private final RoutingService routingService; private final ESLogger logger; - public ShardFailedClusterStateTaskExecutor(AllocationService allocationService, RoutingService routingService, ESLogger logger) { + ShardFailedClusterStateTaskExecutor(AllocationService allocationService, RoutingService routingService, ESLogger logger) { this.allocationService = allocationService; this.routingService = routingService; this.logger = logger; @@ -223,23 +226,56 @@ public class ShardStateAction extends AbstractComponent { @Override public BatchResult execute(ClusterState currentState, List tasks) throws Exception { BatchResult.Builder batchResultBuilder = BatchResult.builder(); - List failedShards = new ArrayList<>(tasks.size()); - for (ShardRoutingEntry task : tasks) { - failedShards.add(new FailedRerouteAllocation.FailedShard(task.shardRouting, task.message, task.failure)); - } + + // partition tasks into those that correspond to shards + // that exist versus do not exist + Map> partition = + tasks.stream().collect(Collectors.partitioningBy(task -> shardExists(currentState, task))); + + // tasks that correspond to non-existent shards are marked + // as successful + batchResultBuilder.successes(partition.get(false)); + ClusterState maybeUpdatedState = currentState; + List tasksToFail = partition.get(true); try { - RoutingAllocation.Result result = allocationService.applyFailedShards(currentState, failedShards); + List failedShards = + tasksToFail + .stream() + .map(task -> new FailedRerouteAllocation.FailedShard(task.shardRouting, task.message, task.failure)) + .collect(Collectors.toList()); + RoutingAllocation.Result result = applyFailedShards(currentState, failedShards); if (result.changed()) { maybeUpdatedState = ClusterState.builder(currentState).routingResult(result).build(); } - batchResultBuilder.successes(tasks); + batchResultBuilder.successes(tasksToFail); } catch (Throwable t) { - batchResultBuilder.failures(tasks, t); + // failures are communicated back to the requester + // cluster state will not be updated in this case + batchResultBuilder.failures(tasksToFail, t); } + return batchResultBuilder.build(maybeUpdatedState); } + // visible for testing + RoutingAllocation.Result applyFailedShards(ClusterState currentState, List failedShards) { + return allocationService.applyFailedShards(currentState, failedShards); + } + + private boolean shardExists(ClusterState currentState, ShardRoutingEntry task) { + RoutingNodes.RoutingNodeIterator routingNodeIterator = + currentState.getRoutingNodes().routingNodeIter(task.getShardRouting().currentNodeId()); + if (routingNodeIterator != null) { + for (ShardRouting maybe : routingNodeIterator) { + if (task.getShardRouting().isSameAllocation(maybe)) { + return true; + } + } + } + return false; + } + @Override public void clusterStatePublished(ClusterState newClusterState) { int numberOfUnassignedShards = newClusterState.getRoutingNodes().unassigned().size(); diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 6052d109565..34da596646d 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -27,7 +27,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction; import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction; -import org.elasticsearch.cluster.action.shard.NoOpShardStateActionListener; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; @@ -92,7 +91,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent tasks = Collections.emptyList(); + ClusterStateTaskExecutor.BatchResult result = + executor.execute(clusterState, tasks); + assertTasksSuccessful(tasks, result, clusterState, false); + } + + public void testDuplicateFailuresAreOkay() throws Exception { + String reason = "test duplicate failures are okay"; + ClusterState currentState = createClusterStateWithStartedShards(reason); + List tasks = createExistingShards(currentState, reason); + ClusterStateTaskExecutor.BatchResult result = executor.execute(currentState, tasks); + assertTasksSuccessful(tasks, result, clusterState, true); + } + + public void testNonExistentShardsAreMarkedAsSuccessful() throws Exception { + String reason = "test non existent shards are marked as successful"; + ClusterState currentState = createClusterStateWithStartedShards(reason); + List tasks = createNonExistentShards(currentState, reason); + ClusterStateTaskExecutor.BatchResult result = executor.execute(clusterState, tasks); + assertTasksSuccessful(tasks, result, clusterState, false); + } + + public void testTriviallySuccessfulTasksBatchedWithFailingTasks() throws Exception { + String reason = "test trivially successful tasks batched with failing tasks"; + ClusterState currentState = createClusterStateWithStartedShards(reason); + List failingTasks = createExistingShards(currentState, reason); + List nonExistentTasks = createNonExistentShards(currentState, reason); + ShardStateAction.ShardFailedClusterStateTaskExecutor failingExecutor = new ShardStateAction.ShardFailedClusterStateTaskExecutor(allocationService, null, logger) { + @Override + RoutingAllocation.Result applyFailedShards(ClusterState currentState, List failedShards) { + throw new RuntimeException("simulated applyFailedShards failure"); + } + }; + List tasks = new ArrayList<>(); + tasks.addAll(failingTasks); + tasks.addAll(nonExistentTasks); + ClusterStateTaskExecutor.BatchResult result = failingExecutor.execute(currentState, tasks); + Map taskResultMap = + failingTasks.stream().collect(Collectors.toMap(Function.identity(), task -> false)); + taskResultMap.putAll(nonExistentTasks.stream().collect(Collectors.toMap(Function.identity(), task -> true))); + assertTaskResults(taskResultMap, result, currentState, false); + } + + private ClusterState createClusterStateWithStartedShards(String reason) { + int numberOfNodes = 1 + numberOfReplicas; + DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(); + IntStream.rangeClosed(1, numberOfNodes).mapToObj(node -> newNode("node" + node)).forEach(nodes::put); + ClusterState stateAfterAddingNode = + ClusterState.builder(clusterState).nodes(nodes).build(); + RoutingTable afterReroute = + allocationService.reroute(stateAfterAddingNode, reason).routingTable(); + ClusterState stateAfterReroute = ClusterState.builder(stateAfterAddingNode).routingTable(afterReroute).build(); + RoutingNodes routingNodes = stateAfterReroute.getRoutingNodes(); + RoutingTable afterStart = + allocationService.applyStartedShards(stateAfterReroute, routingNodes.shardsWithState(ShardRoutingState.INITIALIZING)).routingTable(); + return ClusterState.builder(stateAfterReroute).routingTable(afterStart).build(); + } + + private List createExistingShards(ClusterState currentState, String reason) { + List shards = new ArrayList<>(); + GroupShardsIterator shardGroups = + currentState.routingTable().allAssignedShardsGrouped(new String[] { INDEX }, true); + for (ShardIterator shardIt : shardGroups) { + for (ShardRouting shard : shardIt.asUnordered()) { + shards.add(shard); + } + } + List failures = randomSubsetOf(randomIntBetween(1, 1 + shards.size() / 4), shards.toArray(new ShardRouting[0])); + String indexUUID = metaData.index(INDEX).getIndexUUID(); + int numberOfTasks = randomIntBetween(failures.size(), 2 * failures.size()); + List shardsToFail = new ArrayList<>(numberOfTasks); + for (int i = 0; i < numberOfTasks; i++) { + shardsToFail.add(randomFrom(failures)); + } + return toTasks(shardsToFail, indexUUID, reason); + } + + private List createNonExistentShards(ClusterState currentState, String reason) { + // add shards from a non-existent index + MetaData nonExistentMetaData = + MetaData.builder() + .put(IndexMetaData.builder("non-existent").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(numberOfReplicas)) + .build(); + RoutingTable routingTable = RoutingTable.builder().addAsNew(nonExistentMetaData.index("non-existent")).build(); + String nonExistentIndexUUID = nonExistentMetaData.index("non-existent").getIndexUUID(); + + List existingShards = createExistingShards(currentState, reason); + List shardsWithMismatchedAllocationIds = new ArrayList<>(); + for (ShardStateAction.ShardRoutingEntry existingShard : existingShards) { + ShardRouting sr = existingShard.getShardRouting(); + ShardRouting nonExistentShardRouting = + TestShardRouting.newShardRouting(sr.index(), sr.id(), sr.currentNodeId(), sr.relocatingNodeId(), sr.restoreSource(), sr.primary(), sr.state(), sr.version()); + shardsWithMismatchedAllocationIds.add(new ShardStateAction.ShardRoutingEntry(nonExistentShardRouting, existingShard.indexUUID, existingShard.message, existingShard.failure)); + } + + List tasks = new ArrayList<>(); + tasks.addAll(toTasks(routingTable.allShards(), nonExistentIndexUUID, reason)); + tasks.addAll(shardsWithMismatchedAllocationIds); + return tasks; + } + + private static void assertTasksSuccessful( + List tasks, + ClusterStateTaskExecutor.BatchResult result, + ClusterState clusterState, + boolean clusterStateChanged + ) { + Map taskResultMap = + tasks.stream().collect(Collectors.toMap(Function.identity(), task -> true)); + assertTaskResults(taskResultMap, result, clusterState, clusterStateChanged); + } + + private static void assertTaskResults( + Map taskResultMap, + ClusterStateTaskExecutor.BatchResult result, + ClusterState clusterState, + boolean clusterStateChanged + ) { + // there should be as many task results as tasks + assertEquals(taskResultMap.size(), result.executionResults.size()); + + for (Map.Entry entry : taskResultMap.entrySet()) { + // every task should have a corresponding task result + assertTrue(result.executionResults.containsKey(entry.getKey())); + + // the task results are as expected + assertEquals(entry.getValue(), result.executionResults.get(entry.getKey()).isSuccess()); + } + + // every shard that we requested to be successfully failed is + // gone + List shards = clusterState.getRoutingTable().allShards(); + for (Map.Entry entry : taskResultMap.entrySet()) { + if (entry.getValue()) { + for (ShardRouting shard : shards) { + if (entry.getKey().getShardRouting().allocationId() != null) { + assertThat(shard.allocationId(), not(equalTo(entry.getKey().getShardRouting().allocationId()))); + } + } + } + } + + if (clusterStateChanged) { + assertNotSame(clusterState, result.resultingState); + } else { + assertSame(clusterState, result.resultingState); + } + } + + private static List toTasks(List shards, String indexUUID, String message) { + return shards + .stream() + .map(shard -> new ShardStateAction.ShardRoutingEntry(shard, indexUUID, message, new CorruptIndexException("simulated", indexUUID))) + .collect(Collectors.toList()); + } + +} diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index c59405f2345..30d4e48551f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -27,19 +27,19 @@ import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.RoutingService; +import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; +import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.NodeDisconnectedException; import org.elasticsearch.transport.NodeNotConnectedException; -import org.elasticsearch.transport.RemoteTransportException; -import org.elasticsearch.transport.SendRequestTransportException; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; @@ -48,8 +48,6 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; -import java.util.ArrayList; -import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -293,6 +291,41 @@ public class ShardStateActionTests extends ESTestCase { assertTrue(failure.get()); } + public void testShardNotFound() throws InterruptedException { + final String index = "test"; + + clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5))); + + String indexUUID = clusterService.state().metaData().index(index).getIndexUUID(); + + AtomicBoolean success = new AtomicBoolean(); + CountDownLatch latch = new CountDownLatch(1); + + ShardRouting failedShard = getRandomShardRouting(index); + RoutingTable routingTable = RoutingTable.builder(clusterService.state().getRoutingTable()).remove(index).build(); + clusterService.setState(ClusterState.builder(clusterService.state()).routingTable(routingTable)); + shardStateAction.shardFailed(failedShard, indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { + @Override + public void onSuccess() { + success.set(true); + latch.countDown(); + } + + @Override + public void onFailure(Throwable t) { + success.set(false); + latch.countDown(); + assert false; + } + }); + + CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); + transport.handleResponse(capturedRequests[0].requestId, TransportResponse.Empty.INSTANCE); + + latch.await(); + assertTrue(success.get()); + } + private ShardRouting getRandomShardRouting(String index) { IndexRoutingTable indexRoutingTable = clusterService.state().routingTable().index(index); ShardsIterator shardsIterator = indexRoutingTable.randomAllActiveShardsIt(); From 15567506b28cbd4d16cea8cc5d57b4d1cdfb59d8 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 26 Jan 2016 22:37:50 +0100 Subject: [PATCH 339/347] ingest: The IngestDocument copy constructor should make a deep copy instead of shallow copy Closes #16246 --- .../ingest/core/IngestDocument.java | 31 ++++++++++++++++++- .../ingest/core/IngestDocumentTests.java | 29 +++++++++++++++-- 2 files changed, 56 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java b/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java index c8f87faa53e..4b0f6ac8662 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java @@ -81,7 +81,7 @@ public final class IngestDocument { * Copy constructor that creates a new {@link IngestDocument} which has exactly the same properties as the one provided as argument */ public IngestDocument(IngestDocument other) { - this(new HashMap<>(other.sourceAndMetadata), new HashMap<>(other.ingestMetadata)); + this(deepCopyMap(other.sourceAndMetadata), deepCopyMap(other.ingestMetadata)); } /** @@ -470,6 +470,35 @@ public final class IngestDocument { return this.sourceAndMetadata; } + @SuppressWarnings("unchecked") + private static Map deepCopyMap(Map source) { + return (Map) deepCopy(source); + } + + private static Object deepCopy(Object value) { + if (value instanceof Map) { + Map mapValue = (Map) value; + Map copy = new HashMap<>(mapValue.size()); + for (Map.Entry entry : mapValue.entrySet()) { + copy.put(entry.getKey(), deepCopy(entry.getValue())); + } + return copy; + } else if (value instanceof List) { + List listValue = (List) value; + List copy = new ArrayList<>(listValue.size()); + for (Object itemValue : listValue) { + copy.add(deepCopy(itemValue)); + } + return copy; + } else if (value == null || value instanceof String || value instanceof Integer || + value instanceof Long || value instanceof Float || + value instanceof Double || value instanceof Boolean) { + return value; + } else { + throw new IllegalArgumentException("unexpected value type [" + value.getClass() + "]"); + } + } + @Override public boolean equals(Object obj) { if (obj == this) { return true; } diff --git a/core/src/test/java/org/elasticsearch/ingest/core/IngestDocumentTests.java b/core/src/test/java/org/elasticsearch/ingest/core/IngestDocumentTests.java index 56d1fa76c64..1282c4adf1e 100644 --- a/core/src/test/java/org/elasticsearch/ingest/core/IngestDocumentTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/core/IngestDocumentTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.core; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -970,7 +969,31 @@ public class IngestDocumentTests extends ESTestCase { public void testCopyConstructor() { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); IngestDocument copy = new IngestDocument(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata(), not(sameInstance(copy.getSourceAndMetadata()))); - assertThat(ingestDocument.getSourceAndMetadata(), equalTo(copy.getSourceAndMetadata())); + recursiveEqualsButNotSameCheck(ingestDocument.getSourceAndMetadata(), copy.getSourceAndMetadata()); } + + private void recursiveEqualsButNotSameCheck(Object a, Object b) { + assertThat(a, not(sameInstance(b))); + assertThat(a, equalTo(b)); + if (a instanceof Map) { + Map mapA = (Map) a; + Map mapB = (Map) b; + for (Map.Entry entry : mapA.entrySet()) { + if (entry.getValue() instanceof List || entry.getValue() instanceof Map) { + recursiveEqualsButNotSameCheck(entry.getValue(), mapB.get(entry.getKey())); + } + } + } else if (a instanceof List) { + List listA = (List) a; + List listB = (List) b; + for (int i = 0; i < listA.size(); i++) { + Object value = listA.get(i); + if (value instanceof List || value instanceof Map) { + recursiveEqualsButNotSameCheck(value, listB.get(i)); + } + } + } + + } + } From ff0e8272cbf75aa41653deec0739a41bd6f68977 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Tue, 26 Jan 2016 14:12:42 -0800 Subject: [PATCH 340/347] [ingest] update test to verify that documents are deep-copied between verbose results --- .../test/ingest/40_simulate.yaml | 38 +++++++++++++++---- 1 file changed, 31 insertions(+), 7 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml index 3153ba85a59..e61ad4e60a6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml @@ -207,7 +207,7 @@ { "set" : { "tag" : "processor[set]-0", - "field" : "field2", + "field" : "field2.value", "value" : "_value" } }, @@ -216,6 +216,16 @@ "field" : "field3", "value" : "third_val" } + }, + { + "uppercase" : { + "field" : "field2.value" + } + }, + { + "lowercase" : { + "field" : "foo.bar.0.item" + } } ] }, @@ -225,25 +235,39 @@ "_type": "type", "_id": "id", "_source": { - "foo": "bar" + "foo": { + "bar" : [ {"item": "HELLO"} ] + } } } ] } - length: { docs: 1 } - - length: { docs.0.processor_results: 2 } + - length: { docs.0.processor_results: 4 } - match: { docs.0.processor_results.0.tag: "processor[set]-0" } - length: { docs.0.processor_results.0.doc._source: 2 } - - match: { docs.0.processor_results.0.doc._source.foo: "bar" } - - match: { docs.0.processor_results.0.doc._source.field2: "_value" } + - match: { docs.0.processor_results.0.doc._source.foo.bar.0.item: "HELLO" } + - match: { docs.0.processor_results.0.doc._source.field2.value: "_value" } - length: { docs.0.processor_results.0.doc._ingest: 1 } - is_true: docs.0.processor_results.0.doc._ingest.timestamp - length: { docs.0.processor_results.1.doc._source: 3 } - - match: { docs.0.processor_results.1.doc._source.foo: "bar" } - - match: { docs.0.processor_results.1.doc._source.field2: "_value" } + - match: { docs.0.processor_results.1.doc._source.foo.bar.0.item: "HELLO" } + - match: { docs.0.processor_results.1.doc._source.field2.value: "_value" } - match: { docs.0.processor_results.1.doc._source.field3: "third_val" } - length: { docs.0.processor_results.1.doc._ingest: 1 } - is_true: docs.0.processor_results.1.doc._ingest.timestamp + - length: { docs.0.processor_results.2.doc._source: 3 } + - match: { docs.0.processor_results.2.doc._source.foo.bar.0.item: "HELLO" } + - match: { docs.0.processor_results.2.doc._source.field2.value: "_VALUE" } + - match: { docs.0.processor_results.2.doc._source.field3: "third_val" } + - length: { docs.0.processor_results.2.doc._ingest: 1 } + - is_true: docs.0.processor_results.2.doc._ingest.timestamp + - length: { docs.0.processor_results.3.doc._source: 3 } + - match: { docs.0.processor_results.3.doc._source.foo.bar.0.item: "hello" } + - match: { docs.0.processor_results.3.doc._source.field2.value: "_VALUE" } + - match: { docs.0.processor_results.3.doc._source.field3: "third_val" } + - length: { docs.0.processor_results.3.doc._ingest: 1 } + - is_true: docs.0.processor_results.3.doc._ingest.timestamp --- "Test simulate with exception thrown": From f959d39ac3a7dbc54f93764c4b3408d7c6f346d6 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 21 Jan 2016 10:24:35 +0100 Subject: [PATCH 341/347] Fix default doc values to be enabled when a field is not indexed. Doc values currently default to `true` if the field is indexed and not analyzed. So setting `index:no` automatically disables doc values, which is not explicit in the documentation. This commit makes doc values default to true for numerics, booleans regardless of whether they are indexed. Not indexed strings still don't have doc values, since we can't know whether it is rather a text or keyword field. This potential source of confusion should go away when we split `string` into `text` and `keyword`. --- .../index/mapper/FieldMapper.java | 19 +++-- .../index/mapper/core/BooleanFieldMapper.java | 4 +- .../index/mapper/core/StringFieldMapper.java | 1 + .../mapper/core/BooleanFieldMapperTests.java | 81 +++++++++++++++++++ .../mapper/numeric/SimpleNumericTests.java | 71 +++++++++++++--- .../string/SimpleStringMappingTests.java | 72 +++++++++++++++++ docs/reference/migration/migrate_3_0.asciidoc | 5 ++ 7 files changed, 231 insertions(+), 22 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index d0d75709767..3725327c235 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -24,6 +24,7 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; +import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.lucene.Lucene; @@ -223,6 +224,15 @@ public abstract class FieldMapper extends Mapper implements Cloneable { return context.path().pathAsText(name); } + protected boolean defaultDocValues(Version indexCreated) { + if (indexCreated.onOrAfter(Version.V_3_0_0)) { + // add doc values by default to keyword (boolean, numerics, etc.) fields + return fieldType.tokenized() == false; + } else { + return fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE; + } + } + protected void setupFieldType(BuilderContext context) { fieldType.setName(buildFullName(context)); if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) { @@ -233,14 +243,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { Settings settings = Settings.builder().put(fieldType.fieldDataType().getSettings()).put(fieldDataSettings).build(); fieldType.setFieldDataType(new FieldDataType(fieldType.fieldDataType().getType(), settings)); } - boolean defaultDocValues = fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE; - // backcompat for "fielddata: format: docvalues" for now... - boolean fieldDataDocValues = fieldType.fieldDataType() != null - && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldType.fieldDataType().getFormat(context.indexSettings())); - if (fieldDataDocValues && docValuesSet && fieldType.hasDocValues() == false) { - // this forces the doc_values setting to be written, so fielddata does not mask the original setting - defaultDocValues = true; - } + boolean defaultDocValues = defaultDocValues(context.indexCreatedVersion()); defaultFieldType.setHasDocValues(defaultDocValues); if (docValuesSet == false) { fieldType.setHasDocValues(defaultDocValues || fieldDataDocValues); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java index 76f8eb34a71..482ff694f7c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java @@ -225,7 +225,9 @@ public class BooleanFieldMapper extends FieldMapper { if (value == null) { return; } - fields.add(new Field(fieldType().name(), value ? "T" : "F", fieldType())); + if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { + fields.add(new Field(fieldType().name(), value ? "T" : "F", fieldType())); + } if (fieldType().hasDocValues()) { fields.add(new SortedNumericDocValuesField(fieldType().name(), value ? 1 : 0)); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index 46b4097c2c0..4471db8954d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java index 2175f2ce3e7..d66b1508dc3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReader; @@ -28,8 +29,11 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -39,10 +43,15 @@ import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.ParseContext.Document; +import org.elasticsearch.index.mapper.string.SimpleStringMappingTests; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; import java.io.IOException; +import java.util.Collection; public class BooleanFieldMapperTests extends ESSingleNodeTestCase { @@ -55,6 +64,11 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { parser = indexService.mapperService().documentMapperParser(); } + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testDefaults() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "boolean").endObject().endObject() @@ -135,4 +149,71 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { ParsedDocument doc = mapper.parse("test", "type", "1", source); assertNotNull(doc.rootDoc().getField("field.as_string")); } + + public void testDocValues() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("bool1") + .field("type", "boolean") + .endObject() + .startObject("bool2") + .field("type", "boolean") + .field("index", false) + .endObject() + .startObject("bool3") + .field("type", "boolean") + .field("index", true) + .endObject() + .endObject() + .endObject().endObject().string(); + + DocumentMapper defaultMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + + ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("bool1", true) + .field("bool2", true) + .field("bool3", true) + .endObject() + .bytes()); + Document doc = parsedDoc.rootDoc(); + assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "bool1")); + assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "bool2")); + assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "bool3")); + } + + public void testBwCompatDocValues() throws Exception { + Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build(); + indexService = createIndex("test_old", oldIndexSettings); + parser = indexService.mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("bool1") + .field("type", "boolean") + .endObject() + .startObject("bool2") + .field("type", "boolean") + .field("index", "no") + .endObject() + .startObject("bool3") + .field("type", "boolean") + .field("index", "not_analyzed") + .endObject() + .endObject() + .endObject().endObject().string(); + + DocumentMapper defaultMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + + ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("bool1", true) + .field("bool2", true) + .field("bool3", true) + .endObject() + .bytes()); + Document doc = parsedDoc.rootDoc(); + assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "bool1")); + assertEquals(DocValuesType.NONE, SimpleStringMappingTests.docValuesType(doc, "bool2")); + assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "bool3")); + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java index e68817e9ea0..1c024a0d549 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java @@ -281,17 +281,19 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { public void testDocValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") - .startObject("int") + .startObject("int1") .field("type", "integer") - .startObject("fielddata") - .field("format", "doc_values") - .endObject() .endObject() - .startObject("double") + .startObject("int2") + .field("type", "integer") + .field("index", false) + .endObject() + .startObject("double1") .field("type", "double") - .startObject("fielddata") - .field("format", "doc_values") - .endObject() + .endObject() + .startObject("double2") + .field("type", "integer") + .field("index", false) .endObject() .endObject() .endObject().endObject().string(); @@ -300,13 +302,56 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() - .field("int", "1234") - .field("double", "1234") + .field("int1", "1234") + .field("double1", "1234") + .field("int2", "1234") + .field("double2", "1234") .endObject() .bytes()); - final Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "int")); - assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "double")); + Document doc = parsedDoc.rootDoc(); + assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "int1")); + assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "double1")); + assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "int2")); + assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "double2")); + + } + + public void testBwCompatDocValues() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("int1") + .field("type", "integer") + .endObject() + .startObject("int2") + .field("type", "integer") + .field("index", "no") + .endObject() + .startObject("double1") + .field("type", "double") + .endObject() + .startObject("double2") + .field("type", "integer") + .field("index", "no") + .endObject() + .endObject() + .endObject().endObject().string(); + + Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build(); + DocumentMapper defaultMapper = createIndex("test", oldIndexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + + ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("int1", "1234") + .field("double1", "1234") + .field("int2", "1234") + .field("double2", "1234") + .endObject() + .bytes()); + Document doc = parsedDoc.rootDoc(); + assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "int1")); + assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "double1")); + assertEquals(DocValuesType.NONE, SimpleStringMappingTests.docValuesType(doc, "int2")); + assertEquals(DocValuesType.NONE, SimpleStringMappingTests.docValuesType(doc, "double2")); } public void testDocValuesOnNested() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index d32dcad5434..1f3ba975d02 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -45,11 +45,14 @@ import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper.Builder; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.junit.Before; import java.util.Arrays; +import java.util.Collection; import java.util.Map; import static java.util.Collections.emptyMap; @@ -63,6 +66,11 @@ import static org.hamcrest.Matchers.nullValue; public class SimpleStringMappingTests extends ESSingleNodeTestCase { private static Settings DOC_VALUES_SETTINGS = Settings.builder().put(FieldDataType.FORMAT_KEY, FieldDataType.DOC_VALUES_FORMAT_VALUE).build(); + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + IndexService indexService; DocumentMapperParser parser; @@ -417,6 +425,70 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { assertFalse(new Builder("anything").index(false).tokenized(false).docValues(false).build(ctx).fieldType().hasDocValues()); assertTrue(new Builder("anything").index(false).docValues(true).build(ctx).fieldType().hasDocValues()); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("str1") + .field("type", "string") + .field("index", "no") + .endObject() + .startObject("str2") + .field("type", "string") + .field("index", "not_analyzed") + .endObject() + .startObject("str3") + .field("type", "string") + .field("index", "analyzed") + .endObject() + .startObject("str4") + .field("type", "string") + .field("index", "not_analyzed") + .field("doc_values", false) + .endObject() + .startObject("str5") + .field("type", "string") + .field("index", "no") + .field("doc_values", false) + .endObject() + .endObject() + .endObject().endObject().string(); + + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); + + ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("str1", "1234") + .field("str2", "1234") + .field("str3", "1234") + .field("str4", "1234") + .field("str5", "1234") + .endObject() + .bytes()); + final Document doc = parsedDoc.rootDoc(); + assertEquals(DocValuesType.NONE, docValuesType(doc, "str1")); + assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str2")); + assertEquals(DocValuesType.NONE, docValuesType(doc, "str3")); + assertEquals(DocValuesType.NONE, docValuesType(doc, "str4")); + assertEquals(DocValuesType.NONE, docValuesType(doc, "str5")); + + } + + public void testBwCompatDocValues() throws Exception { + Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build(); + indexService = createIndex("test_old", oldIndexSettings); + parser = indexService.mapperService().documentMapperParser(); + // doc values only work on non-analyzed content + final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); + try { + new StringFieldMapper.Builder("anything").docValues(true).build(ctx); + fail(); + } catch (Exception e) { /* OK */ } + + assertFalse(new Builder("anything").index(false).build(ctx).fieldType().hasDocValues()); + assertTrue(new Builder("anything").index(true).tokenized(false).build(ctx).fieldType().hasDocValues()); + assertFalse(new Builder("anything").index(true).tokenized(true).build(ctx).fieldType().hasDocValues()); + assertFalse(new Builder("anything").index(false).tokenized(false).docValues(false).build(ctx).fieldType().hasDocValues()); + assertTrue(new Builder("anything").index(false).docValues(true).build(ctx).fieldType().hasDocValues()); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("str1") diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 699805ac7e1..9b7de2aff19 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -257,6 +257,11 @@ amount of heap as long as the total indexing buffer heap used across all shards [[breaking_30_mapping_changes]] === Mapping changes +==== Default doc values settings + +Doc values are now also on by default on numeric and boolean fields that are +not indexed. + ==== Transform removed The `transform` feature from mappings has been removed. It made issues very hard to debug. From 35709f62b65cde39f8ba420559d091e247a6adce Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 21 Jan 2016 14:22:20 +0100 Subject: [PATCH 342/347] Be stricter about parsing boolean values in mappings. Parsing is currently very lenient, which has the bad side-effect that if you have a typo and pass eg. `store: fasle` this will actually be interpreted as `store: true`. Since mappings can't be changed after the fact, it is quite bad if it happens on an index that already contains data. Note that this does not cover all settings that accept a boolean, but since the PR was quite hard to build and already covers some main settirgs like `store` or `doc_values` this would already be a good incremental improvement. --- .../create/CreateSnapshotRequest.java | 6 +- .../restore/RestoreSnapshotRequest.java | 8 +- .../action/support/IndicesOptions.java | 6 +- .../cluster/metadata/MappingMetaData.java | 8 +- .../xcontent/support/XContentMapValues.java | 23 +++++- .../index/mapper/core/BooleanFieldMapper.java | 4 +- .../index/mapper/core/TypeParsers.java | 51 ++++++++----- .../mapper/geo/BaseGeoPointFieldMapper.java | 10 +-- .../mapper/geo/GeoPointFieldMapperLegacy.java | 2 +- .../index/mapper/geo/GeoShapeFieldMapper.java | 6 +- .../index/mapper/internal/AllFieldMapper.java | 6 +- .../internal/FieldNamesFieldMapper.java | 4 +- .../mapper/internal/RoutingFieldMapper.java | 4 +- .../mapper/internal/SourceFieldMapper.java | 4 +- .../index/mapper/internal/TTLFieldMapper.java | 4 +- .../mapper/internal/TimestampFieldMapper.java | 6 +- .../index/mapper/object/ObjectMapper.java | 12 +-- .../index/mapper/object/RootObjectMapper.java | 6 +- .../action/search/RestMultiSearchAction.java | 4 +- .../action/termvectors/GetTermVectorsIT.java | 2 +- .../cluster/SimpleClusterStateIT.java | 4 +- .../org/elasticsearch/get/GetActionIT.java | 6 +- .../index/mapper/DynamicMappingTests.java | 2 +- .../mapper/binary/BinaryMappingTests.java | 2 +- .../ExternalValuesMapperIntegrationIT.java | 4 +- .../SimpleExternalMappingTests.java | 4 +- .../mapper/geo/GeoPointFieldMapperTests.java | 12 +-- .../lucene/StoredNumericValuesTests.java | 6 +- .../mapping/ConcurrentDynamicTemplateIT.java | 2 +- .../template/IndexTemplateBlocksIT.java | 4 +- .../template/SimpleIndexTemplateIT.java | 32 ++++---- .../search/highlight/HighlighterSearchIT.java | 74 +++++++++---------- .../search/innerhits/InnerHitsIT.java | 8 +- .../search/nested/SimpleNestedIT.java | 2 +- .../index/mapper/all/mapping.json | 4 +- .../mapping_boost_omit_positions_on_all.json | 4 +- .../mapper/all/mapping_offsets_on_all.json | 4 +- .../all/mapping_omit_positions_on_all.json | 4 +- .../index/mapper/all/noboost-mapping.json | 4 +- .../index/mapper/all/store-mapping.json | 6 +- .../genericstore/test-mapping.json | 2 +- .../pathmatch/test-mapping.json | 6 +- .../dynamictemplate/simple/test-mapping.json | 4 +- .../multifield/merge/test-mapping1.json | 2 +- .../multifield/merge/test-mapping2.json | 6 +- .../multifield/merge/test-mapping3.json | 8 +- .../multifield/merge/test-mapping4.json | 4 +- .../mapper/multifield/merge/upgrade1.json | 4 +- .../mapper/multifield/merge/upgrade2.json | 6 +- .../mapper/multifield/merge/upgrade3.json | 2 +- ...est-multi-field-type-no-default-field.json | 4 +- .../mapper/multifield/test-multi-fields.json | 12 +-- .../index/mapper/simple/test-mapping.json | 4 +- .../messy/tests/SearchFieldsTests.java | 28 +++---- .../messy/tests/SimpleSortTests.java | 2 +- .../messy/tests/SuggestSearchTests.java | 10 +-- .../unit/simple/test-mapping-all-fields.json | 14 ++-- .../test/mapper_attachments/30_mapping.yaml | 4 +- .../test/mapper_attachments/40_highlight.yaml | 2 +- .../index/mapper/size/SizeFieldMapper.java | 6 +- 60 files changed, 262 insertions(+), 232 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index 41d3f9c3593..13c706531b7 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -45,7 +45,7 @@ import static org.elasticsearch.common.Strings.hasLength; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; /** * Create snapshot request @@ -379,14 +379,14 @@ public class CreateSnapshotRequest extends MasterNodeRequest) entry.getValue()); } else if (name.equals("include_global_state")) { - includeGlobalState = nodeBooleanValue(entry.getValue()); + includeGlobalState = lenientNodeBooleanValue(entry.getValue()); } } indicesOptions(IndicesOptions.fromMap((Map) source, IndicesOptions.lenientExpandOpen())); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index 0f79ceb0f96..59dad56531e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -43,7 +43,7 @@ import static org.elasticsearch.common.Strings.hasLength; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; /** * Restore snapshot request @@ -498,16 +498,16 @@ public class RestoreSnapshotRequest extends MasterNodeRequest) entry.getValue()); } else if (name.equals("include_global_state")) { - includeGlobalState = nodeBooleanValue(entry.getValue()); + includeGlobalState = lenientNodeBooleanValue(entry.getValue()); } else if (name.equals("include_aliases")) { - includeAliases = nodeBooleanValue(entry.getValue()); + includeAliases = lenientNodeBooleanValue(entry.getValue()); } else if (name.equals("rename_pattern")) { if (entry.getValue() instanceof String) { renamePattern((String) entry.getValue()); diff --git a/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index 793dbe08401..2bc49f7e9f8 100644 --- a/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -26,7 +26,7 @@ import org.elasticsearch.rest.RestRequest; import java.io.IOException; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringArrayValue; /** @@ -195,8 +195,8 @@ public class IndicesOptions { //note that allowAliasesToMultipleIndices is not exposed, always true (only for internal use) return fromOptions( - nodeBooleanValue(ignoreUnavailableString, defaultSettings.ignoreUnavailable()), - nodeBooleanValue(allowNoIndicesString, defaultSettings.allowNoIndices()), + lenientNodeBooleanValue(ignoreUnavailableString, defaultSettings.ignoreUnavailable()), + lenientNodeBooleanValue(allowNoIndicesString, defaultSettings.allowNoIndices()), expandWildcardsOpen, expandWildcardsClosed, defaultSettings.allowAliasesToMultipleIndices(), diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index a26e95c40e0..a88f1609b9e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -41,7 +41,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; /** * Mapping configuration for a type. @@ -237,7 +237,7 @@ public class MappingMetaData extends AbstractDiffable { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("required")) { - required = nodeBooleanValue(fieldNode); + required = lenientNodeBooleanValue(fieldNode); } } this.routing = new Routing(required); @@ -254,13 +254,13 @@ public class MappingMetaData extends AbstractDiffable { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - enabled = nodeBooleanValue(fieldNode); + enabled = lenientNodeBooleanValue(fieldNode); } else if (fieldName.equals("format")) { format = fieldNode.toString(); } else if (fieldName.equals("default") && fieldNode != null) { defaultTimestamp = fieldNode.toString(); } else if (fieldName.equals("ignore_missing")) { - ignoreMissing = nodeBooleanValue(fieldNode); + ignoreMissing = lenientNodeBooleanValue(fieldNode); } } this.timestamp = new Timestamp(enabled, format, defaultTimestamp, ignoreMissing); diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java b/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java index 73f16b2e789..4612d3f05d0 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java @@ -347,14 +347,20 @@ public class XContentMapValues { return Long.parseLong(node.toString()); } - public static boolean nodeBooleanValue(Object node, boolean defaultValue) { + /** + * This method is very lenient, use {@link #nodeBooleanValue} instead. + */ + public static boolean lenientNodeBooleanValue(Object node, boolean defaultValue) { if (node == null) { return defaultValue; } - return nodeBooleanValue(node); + return lenientNodeBooleanValue(node); } - public static boolean nodeBooleanValue(Object node) { + /** + * This method is very lenient, use {@link #nodeBooleanValue} instead. + */ + public static boolean lenientNodeBooleanValue(Object node) { if (node instanceof Boolean) { return (Boolean) node; } @@ -365,6 +371,17 @@ public class XContentMapValues { return !(value.equals("false") || value.equals("0") || value.equals("off")); } + public static boolean nodeBooleanValue(Object node) { + switch (node.toString()) { + case "true": + return true; + case "false": + return false; + default: + throw new IllegalArgumentException("Can't parse boolean value [" + node + "], expected [true] or [false]"); + } + } + public static TimeValue nodeTimeValue(Object node, TimeValue defaultValue) { if (node == null) { return defaultValue; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java index 482ff694f7c..29d2ce2176f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java @@ -40,7 +40,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.index.mapper.MapperBuilders.booleanField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; @@ -106,7 +106,7 @@ public class BooleanFieldMapper extends FieldMapper { if (propNode == null) { throw new MapperParsingException("Property [null_value] cannot be null."); } - builder.nullValue(nodeBooleanValue(propNode)); + builder.nullValue(lenientNodeBooleanValue(propNode)); iterator.remove(); } else if (parseMultiField(builder, name, parserContext, propName, propNode)) { iterator.remove(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index d7f3570a53c..e3df1f7dbf1 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; @@ -45,7 +46,7 @@ import java.util.Map; import java.util.Map.Entry; import static org.elasticsearch.common.xcontent.support.XContentMapValues.isArray; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue; @@ -62,6 +63,14 @@ public class TypeParsers { public static final String INDEX_OPTIONS_POSITIONS = "positions"; public static final String INDEX_OPTIONS_OFFSETS = "offsets"; + private static boolean nodeBooleanValue(Object node, Mapper.TypeParser.ParserContext parserContext) { + if (parserContext.indexVersionCreated().onOrAfter(Version.V_3_0_0)) { + return XContentMapValues.nodeBooleanValue(node); + } else { + return XContentMapValues.lenientNodeBooleanValue(node); + } + } + public static void parseNumberField(NumberFieldMapper.Builder builder, String name, Map numberNode, Mapper.TypeParser.ParserContext parserContext) { parseField(builder, name, numberNode, parserContext); for (Iterator> iterator = numberNode.entrySet().iterator(); iterator.hasNext();) { @@ -72,13 +81,13 @@ public class TypeParsers { builder.precisionStep(nodeIntegerValue(propNode)); iterator.remove(); } else if (propName.equals("ignore_malformed")) { - builder.ignoreMalformed(nodeBooleanValue(propNode)); + builder.ignoreMalformed(nodeBooleanValue(propNode, parserContext)); iterator.remove(); } else if (propName.equals("coerce")) { - builder.coerce(nodeBooleanValue(propNode)); + builder.coerce(nodeBooleanValue(propNode, parserContext)); iterator.remove(); } else if (propName.equals("omit_norms")) { - builder.omitNorms(nodeBooleanValue(propNode)); + builder.omitNorms(nodeBooleanValue(propNode, parserContext)); iterator.remove(); } else if (propName.equals("similarity")) { SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); @@ -102,16 +111,16 @@ public class TypeParsers { parseTermVector(name, propNode.toString(), builder); iterator.remove(); } else if (propName.equals("store_term_vectors")) { - builder.storeTermVectors(nodeBooleanValue(propNode)); + builder.storeTermVectors(nodeBooleanValue(propNode, parserContext)); iterator.remove(); } else if (propName.equals("store_term_vector_offsets")) { - builder.storeTermVectorOffsets(nodeBooleanValue(propNode)); + builder.storeTermVectorOffsets(nodeBooleanValue(propNode, parserContext)); iterator.remove(); } else if (propName.equals("store_term_vector_positions")) { - builder.storeTermVectorPositions(nodeBooleanValue(propNode)); + builder.storeTermVectorPositions(nodeBooleanValue(propNode, parserContext)); iterator.remove(); } else if (propName.equals("store_term_vector_payloads")) { - builder.storeTermVectorPayloads(nodeBooleanValue(propNode)); + builder.storeTermVectorPayloads(nodeBooleanValue(propNode, parserContext)); iterator.remove(); } else if (propName.equals("analyzer")) { NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); @@ -160,19 +169,19 @@ public class TypeParsers { final String propName = Strings.toUnderscoreCase(entry.getKey()); final Object propNode = entry.getValue(); if (propName.equals("store")) { - builder.store(parseStore(name, propNode.toString())); + builder.store(parseStore(name, propNode.toString(), parserContext)); iterator.remove(); } else if (propName.equals("index")) { parseIndex(name, propNode.toString(), builder); iterator.remove(); } else if (propName.equals(DOC_VALUES)) { - builder.docValues(nodeBooleanValue(propNode)); + builder.docValues(nodeBooleanValue(propNode, parserContext)); iterator.remove(); } else if (propName.equals("boost")) { builder.boost(nodeFloatValue(propNode)); iterator.remove(); } else if (propName.equals("omit_norms")) { - builder.omitNorms(nodeBooleanValue(propNode)); + builder.omitNorms(nodeBooleanValue(propNode, parserContext)); iterator.remove(); } else if (propName.equals("norms")) { final Map properties = nodeMapValue(propNode, "norms"); @@ -181,7 +190,7 @@ public class TypeParsers { final String propName2 = Strings.toUnderscoreCase(entry2.getKey()); final Object propNode2 = entry2.getValue(); if (propName2.equals("enabled")) { - builder.omitNorms(!nodeBooleanValue(propNode2)); + builder.omitNorms(!lenientNodeBooleanValue(propNode2)); propsIterator.remove(); } else if (propName2.equals(Loading.KEY)) { builder.normsLoading(Loading.parse(nodeStringValue(propNode2, null), null)); @@ -194,7 +203,7 @@ public class TypeParsers { builder.indexOptions(nodeIndexOptionValue(propNode)); iterator.remove(); } else if (propName.equals("include_in_all")) { - builder.includeInAll(nodeBooleanValue(propNode)); + builder.includeInAll(nodeBooleanValue(propNode, parserContext)); iterator.remove(); } else if (propName.equals("similarity")) { SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); @@ -334,13 +343,17 @@ public class TypeParsers { } } - public static boolean parseStore(String fieldName, String store) throws MapperParsingException { - if ("no".equals(store)) { - return false; - } else if ("yes".equals(store)) { - return true; + public static boolean parseStore(String fieldName, String store, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException { + if (parserContext.indexVersionCreated().onOrAfter(Version.V_3_0_0)) { + return XContentMapValues.nodeBooleanValue(store); } else { - return nodeBooleanValue(store); + if ("no".equals(store)) { + return false; + } else if ("yes".equals(store)) { + return true; + } else { + return lenientNodeBooleanValue(store); + } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 29a2aca7bec..0a992aeb27a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -199,17 +199,17 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr String propName = Strings.toUnderscoreCase(entry.getKey()); Object propNode = entry.getValue(); if (propName.equals("lat_lon")) { - builder.enableLatLon(XContentMapValues.nodeBooleanValue(propNode)); + builder.enableLatLon(XContentMapValues.lenientNodeBooleanValue(propNode)); iterator.remove(); } else if (propName.equals("precision_step")) { builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode)); iterator.remove(); } else if (propName.equals("geohash")) { - builder.enableGeoHash(XContentMapValues.nodeBooleanValue(propNode)); + builder.enableGeoHash(XContentMapValues.lenientNodeBooleanValue(propNode)); iterator.remove(); } else if (propName.equals("geohash_prefix")) { - builder.geoHashPrefix(XContentMapValues.nodeBooleanValue(propNode)); - if (XContentMapValues.nodeBooleanValue(propNode)) { + builder.geoHashPrefix(XContentMapValues.lenientNodeBooleanValue(propNode)); + if (XContentMapValues.lenientNodeBooleanValue(propNode)) { builder.enableGeoHash(true); } iterator.remove(); @@ -221,7 +221,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } iterator.remove(); } else if (propName.equals(Names.IGNORE_MALFORMED)) { - builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(propNode)); + builder.ignoreMalformed(XContentMapValues.lenientNodeBooleanValue(propNode)); iterator.remove(); } else if (parseMultiField(builder, name, parserContext, propName, propNode)) { iterator.remove(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java index c008be6f673..dcd57a42dc5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java @@ -132,7 +132,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement String propName = Strings.toUnderscoreCase(entry.getKey()); Object propNode = entry.getValue(); if (propName.equals(Names.COERCE)) { - builder.coerce = XContentMapValues.nodeBooleanValue(propNode); + builder.coerce = XContentMapValues.lenientNodeBooleanValue(propNode); iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index 0de2cd2b60b..c98744bb759 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -52,7 +52,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.index.mapper.MapperBuilders.geoShapeField; @@ -184,11 +184,11 @@ public class GeoShapeFieldMapper extends FieldMapper { builder.fieldType().setStrategyName(fieldNode.toString()); iterator.remove(); } else if (Names.COERCE.equals(fieldName)) { - builder.coerce(nodeBooleanValue(fieldNode)); + builder.coerce(lenientNodeBooleanValue(fieldNode)); iterator.remove(); } else if (Names.STRATEGY_POINTS_ONLY.equals(fieldName) && builder.fieldType().strategyName.equals(SpatialStrategy.TERM.getStrategyName()) == false) { - builder.fieldType().setPointsOnly(XContentMapValues.nodeBooleanValue(fieldNode)); + builder.fieldType().setPointsOnly(XContentMapValues.lenientNodeBooleanValue(fieldNode)); iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java index d9a345caf28..97c2fa3933b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java @@ -46,7 +46,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField; @@ -133,7 +133,7 @@ public class AllFieldMapper extends MetadataFieldMapper { // the AllFieldMapper ctor in the builder since it is not valid. Here we validate // the doc values settings (old and new) are rejected Object docValues = node.get("doc_values"); - if (docValues != null && nodeBooleanValue(docValues)) { + if (docValues != null && lenientNodeBooleanValue(docValues)) { throw new MapperParsingException("Field [" + name + "] is always tokenized and cannot have doc values"); } // convoluted way of specifying doc values @@ -152,7 +152,7 @@ public class AllFieldMapper extends MetadataFieldMapper { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - builder.enabled(nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); + builder.enabled(lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java index 17d1c2b9f08..03ebcb9fe95 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java @@ -40,7 +40,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; /** * A mapper that indexes the field names of a document under _field_names. This mapper is typically useful in order @@ -112,7 +112,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - builder.enabled(nodeBooleanValue(fieldNode)); + builder.enabled(lenientNodeBooleanValue(fieldNode)); iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java index ee06b51ecfc..b1d24e53ab5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java @@ -38,7 +38,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; /** * @@ -95,7 +95,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("required")) { - builder.required(nodeBooleanValue(fieldNode)); + builder.required(lenientNodeBooleanValue(fieldNode)); iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index b0de09edafb..1925b2b2faa 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -51,7 +51,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; /** * @@ -122,7 +122,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - builder.enabled(nodeBooleanValue(fieldNode)); + builder.enabled(lenientNodeBooleanValue(fieldNode)); iterator.remove(); } else if ("format".equals(fieldName) && parserContext.indexVersionCreated().before(Version.V_3_0_0)) { // ignore on old indices, reject on and after 3.0 diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java index 4612b9fb85f..dbf63a7f801 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java @@ -44,7 +44,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeTimeValue; public class TTLFieldMapper extends MetadataFieldMapper { @@ -108,7 +108,7 @@ public class TTLFieldMapper extends MetadataFieldMapper { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - EnabledAttributeMapper enabledState = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; + EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; builder.enabled(enabledState); iterator.remove(); } else if (fieldName.equals("default")) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index e750f973add..570155a79c5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -43,7 +43,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseDateTimeFormatter; public class TimestampFieldMapper extends MetadataFieldMapper { @@ -134,7 +134,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - EnabledAttributeMapper enabledState = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; + EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; builder.enabled(enabledState); iterator.remove(); } else if (fieldName.equals("format")) { @@ -149,7 +149,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper { } iterator.remove(); } else if (fieldName.equals("ignore_missing")) { - ignoreMissing = nodeBooleanValue(fieldNode); + ignoreMissing = lenientNodeBooleanValue(fieldNode); builder.ignoreMissing(ignoreMissing); iterator.remove(); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java index 9f3b503ab49..b5934a40116 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java @@ -49,7 +49,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.index.mapper.MapperBuilders.object; /** @@ -191,11 +191,11 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, if (value.equalsIgnoreCase("strict")) { builder.dynamic(Dynamic.STRICT); } else { - builder.dynamic(nodeBooleanValue(fieldNode) ? Dynamic.TRUE : Dynamic.FALSE); + builder.dynamic(lenientNodeBooleanValue(fieldNode) ? Dynamic.TRUE : Dynamic.FALSE); } return true; } else if (fieldName.equals("enabled")) { - builder.enabled(nodeBooleanValue(fieldNode)); + builder.enabled(lenientNodeBooleanValue(fieldNode)); return true; } else if (fieldName.equals("properties")) { if (fieldNode instanceof Collection && ((Collection) fieldNode).isEmpty()) { @@ -207,7 +207,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, } return true; } else if (fieldName.equals("include_in_all")) { - builder.includeInAll(nodeBooleanValue(fieldNode)); + builder.includeInAll(lenientNodeBooleanValue(fieldNode)); return true; } return false; @@ -230,12 +230,12 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, } fieldNode = node.get("include_in_parent"); if (fieldNode != null) { - nestedIncludeInParent = nodeBooleanValue(fieldNode); + nestedIncludeInParent = lenientNodeBooleanValue(fieldNode); node.remove("include_in_parent"); } fieldNode = node.get("include_in_root"); if (fieldNode != null) { - nestedIncludeInRoot = nodeBooleanValue(fieldNode); + nestedIncludeInRoot = lenientNodeBooleanValue(fieldNode); node.remove("include_in_root"); } if (nested) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java index 64a60305b10..5e87130865c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java @@ -42,7 +42,7 @@ import java.util.List; import java.util.Map; import java.util.Set; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseDateTimeFormatter; /** @@ -189,10 +189,10 @@ public class RootObjectMapper extends ObjectMapper { } return true; } else if (fieldName.equals("date_detection")) { - ((Builder) builder).dateDetection = nodeBooleanValue(fieldNode); + ((Builder) builder).dateDetection = lenientNodeBooleanValue(fieldNode); return true; } else if (fieldName.equals("numeric_detection")) { - ((Builder) builder).numericDetection = nodeBooleanValue(fieldNode); + ((Builder) builder).numericDetection = lenientNodeBooleanValue(fieldNode); return true; } return false; diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index ff51263e08b..72ff389fa08 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -46,7 +46,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringArrayValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringValue; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -159,7 +159,7 @@ public class RestMultiSearchAction extends BaseRestHandler { } else if ("search_type".equals(entry.getKey()) || "searchType".equals(entry.getKey())) { searchRequest.searchType(nodeStringValue(value, null)); } else if ("request_cache".equals(entry.getKey()) || "requestCache".equals(entry.getKey())) { - searchRequest.requestCache(nodeBooleanValue(value)); + searchRequest.requestCache(lenientNodeBooleanValue(value)); } else if ("preference".equals(entry.getKey())) { searchRequest.preference(nodeStringValue(value, null)); } else if ("routing".equals(entry.getKey())) { diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java index 0c542698b5f..16fd9f4b718 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java @@ -152,7 +152,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { .addMapping("type1", "field0", "type=integer,", // no tvs "field1", "type=string,index=no", // no tvs - "field2", "type=string,index=no,store=yes", // no tvs + "field2", "type=string,index=no,store=true", // no tvs "field3", "type=string,index=no,term_vector=yes", // no tvs "field4", "type=string,index=not_analyzed", // yes tvs "field5", "type=string,index=analyzed")); // yes tvs diff --git a/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java b/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java index d78356cbf64..e838989e2ac 100644 --- a/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java @@ -90,8 +90,8 @@ public class SimpleClusterStateIT extends ESIntegTestCase { .setTemplate("te*") .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "string").field("store", true).endObject() + .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject() .endObject().endObject().endObject()) .get(); diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index dee48c5f288..43a4e4f1470 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -253,12 +253,12 @@ public class GetActionIT extends ESIntegTestCase { public void testGetDocWithMultivaluedFields() throws Exception { String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") - .startObject("field").field("type", "string").field("store", "yes").endObject() + .startObject("field").field("type", "string").field("store", true).endObject() .endObject() .endObject().endObject().string(); String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties") - .startObject("field").field("type", "string").field("store", "yes").endObject() + .startObject("field").field("type", "string").field("store", true).endObject() .endObject() .endObject().endObject().string(); assertAcked(prepareCreate("test") @@ -751,7 +751,7 @@ public class GetActionIT extends ESIntegTestCase { .startObject("field1").field("type", "object").startObject("properties") .startObject("field2").field("type", "object").startObject("properties") .startObject("field3").field("type", "object").startObject("properties") - .startObject("field4").field("type", "string").field("store", "yes") + .startObject("field4").field("type", "string").field("store", true) .endObject().endObject() .endObject().endObject() .endObject().endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 1a4fb0d9c4c..6de49877ce1 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -374,7 +374,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { public void testReuseExistingMappings() throws IOException, Exception { IndexService indexService = createIndex("test", Settings.EMPTY, "type", - "my_field1", "type=string,store=yes", + "my_field1", "type=string,store=true", "my_field2", "type=integer,precision_step=10", "my_field3", "type=long,doc_values=false", "my_field4", "type=float,index_options=freqs", diff --git a/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java index 308478ad49d..7be0cc8031b 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java @@ -63,7 +63,7 @@ public class BinaryMappingTests extends ESSingleNodeTestCase { .startObject("properties") .startObject("field") .field("type", "binary") - .field("store", "yes") + .field("store", true) .endObject() .endObject() .endObject().endObject().string(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java index 7e519c3b722..f581f1f6a41 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java @@ -89,12 +89,12 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase { .startObject("fields") .startObject("g") .field("type", "string") - .field("store", "yes") + .field("store", true) .startObject("fields") .startObject("raw") .field("type", "string") .field("index", "not_analyzed") - .field("store", "yes") + .field("store", true) .endObject() .endObject() .endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java index c42924132ff..96c099df6d3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java @@ -118,12 +118,12 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { .startObject("fields") .startObject("field") .field("type", "string") - .field("store", "yes") + .field("store", true) .startObject("fields") .startObject("raw") .field("type", "string") .field("index", "not_analyzed") - .field("store", "yes") + .field("store", true) .endObject() .endObject() .endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index 2ea19b02450..db5781a77eb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -330,7 +330,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { public void testLatLonValuesStored() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("store", "yes").endObject().endObject().endObject().endObject().string(); + .field("store", true).endObject().endObject().endObject().endObject().string(); Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); @@ -357,7 +357,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { public void testArrayLatLonValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("store", "yes").endObject().endObject().endObject().endObject().string(); + .field("store", true).endObject().endObject().endObject().endObject().string(); Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); @@ -416,7 +416,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { public void testLatLonInOneValueStored() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject() + .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", true).endObject().endObject() .endObject().endObject().string(); Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); @@ -443,7 +443,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { public void testLatLonInOneValueArray() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("store", "yes").endObject().endObject().endObject().endObject().string(); + .field("store", true).endObject().endObject().endObject().endObject().string(); Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); @@ -528,7 +528,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { public void testLonLatArrayStored() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("store", "yes").endObject().endObject().endObject().endObject().string(); + .field("store", true).endObject().endObject().endObject().endObject().string(); Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); @@ -554,7 +554,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { public void testLonLatArrayArrayStored() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("store", "yes").endObject().endObject().endObject().endObject().string(); + .field("store", true).endObject().endObject().endObject().endObject().string(); Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java index 89e6630ffa2..d74b445ebbd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java @@ -54,9 +54,9 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase { .startObject() .startObject("type") .startObject("properties") - .startObject("field1").field("type", "integer").field("store", "yes").endObject() - .startObject("field2").field("type", "float").field("store", "yes").endObject() - .startObject("field3").field("type", "long").field("store", "yes").endObject() + .startObject("field1").field("type", "integer").field("store", true).endObject() + .startObject("field2").field("type", "float").field("store", true).endObject() + .startObject("field3").field("type", "long").field("store", true).endObject() .endObject() .endObject() .endObject() diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java index 0946d51a45c..feb9863ec1e 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java @@ -44,7 +44,7 @@ public class ConcurrentDynamicTemplateIT extends ESIntegTestCase { final String fieldName = "field"; final String mapping = "{ \"" + mappingType + "\": {" + "\"dynamic_templates\": [" - + "{ \"" + fieldName + "\": {" + "\"path_match\": \"*\"," + "\"mapping\": {" + "\"type\": \"string\"," + "\"store\": \"yes\"," + + "{ \"" + fieldName + "\": {" + "\"path_match\": \"*\"," + "\"mapping\": {" + "\"type\": \"string\"," + "\"store\": true," + "\"index\": \"analyzed\", \"analyzer\": \"whitespace\" } } } ] } }"; // The 'fieldNames' array is used to help with retrieval of index terms // after testing diff --git a/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateBlocksIT.java b/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateBlocksIT.java index c46c0385292..11e2d7d2ac4 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateBlocksIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateBlocksIT.java @@ -38,8 +38,8 @@ public class IndexTemplateBlocksIT extends ESIntegTestCase { .setTemplate("te*") .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "string").field("store", true).endObject() + .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject() .endObject().endObject().endObject()) .execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index 0a51f5d4e2c..63db0450551 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -78,8 +78,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { .setSettings(indexSettings()) .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "string").field("store", true).endObject() + .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject() .endObject().endObject().endObject()) .get(); @@ -88,7 +88,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { .setSettings(indexSettings()) .setOrder(1) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field2").field("type", "string").field("store", "no").endObject() + .startObject("field2").field("type", "string").field("store", false).endObject() .endObject().endObject().endObject()) .get(); @@ -99,7 +99,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { .setCreate(true) .setOrder(1) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field2").field("type", "string").field("store", "no").endObject() + .startObject("field2").field("type", "string").field("store", false).endObject() .endObject().endObject().endObject()) , IndexTemplateAlreadyExistsException.class ); @@ -145,8 +145,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { .setTemplate("te*") .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "string").field("store", true).endObject() + .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject() .endObject().endObject().endObject()) .execute().actionGet(); @@ -170,8 +170,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { .setTemplate("te*") .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "string").field("store", true).endObject() + .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject() .endObject().endObject().endObject()) .execute().actionGet(); @@ -190,8 +190,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { .setTemplate("te*") .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "string").field("store", true).endObject() + .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject() .endObject().endObject().endObject()) .execute().actionGet(); @@ -213,8 +213,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { .setTemplate("te*") .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "string").field("store", true).endObject() + .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject() .endObject().endObject().endObject()) .execute().actionGet(); @@ -223,8 +223,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { .setTemplate("te*") .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "string").field("store", true).endObject() + .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject() .endObject().endObject().endObject()) .execute().actionGet(); @@ -233,8 +233,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { .setTemplate("te*") .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "string").field("store", true).endObject() + .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject() .endObject().endObject().endObject()) .execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index 10c7c54b83b..fc7a779b407 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -174,13 +174,13 @@ public class HighlighterSearchIT extends ESIntegTestCase { .field("index_options", "offsets") .field("term_vector", "with_positions_offsets") .field("type", "string") - .field("store", "no") + .field("store", false) .endObject() .startObject("text") .field("index_options", "offsets") .field("term_vector", "with_positions_offsets") .field("type", "string") - .field("store", "yes") + .field("store", true) .endObject() .endObject() .endObject(); @@ -205,7 +205,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { // see #3486 public void testHighTermFrequencyDoc() throws IOException { assertAcked(prepareCreate("test") - .addMapping("test", "name", "type=string,term_vector=with_positions_offsets,store=" + (randomBoolean() ? "yes" : "no"))); + .addMapping("test", "name", "type=string,term_vector=with_positions_offsets,store=" + randomBoolean())); ensureYellow(); StringBuilder builder = new StringBuilder(); for (int i = 0; i < 6000; i++) { @@ -471,8 +471,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") // we don't store title and don't use term vector, now lets see if it works... - .startObject("title").field("type", "string").field("store", "no").field("term_vector", "no").endObject() - .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", "no").field("term_vector", "no").endObject().endObject().endObject() + .startObject("title").field("type", "string").field("store", false).field("term_vector", "no").endObject() + .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", false).field("term_vector", "no").endObject().endObject().endObject() .endObject().endObject().endObject())); ensureYellow(); @@ -510,8 +510,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") // we don't store title, now lets see if it works... - .startObject("title").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").endObject() - .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").endObject().endObject().endObject() + .startObject("title").field("type", "string").field("store", false).field("term_vector", "with_positions_offsets").endObject() + .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", false).field("term_vector", "with_positions_offsets").endObject().endObject().endObject() .endObject().endObject().endObject())); ensureYellow(); @@ -549,8 +549,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") // we don't store title, now lets see if it works... - .startObject("title").field("type", "string").field("store", "no").field("index_options", "offsets").endObject() - .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", "no").field("index_options", "offsets").endObject().endObject().endObject() + .startObject("title").field("type", "string").field("store", false).field("index_options", "offsets").endObject() + .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", false).field("index_options", "offsets").endObject().endObject().endObject() .endObject().endObject().endObject())); ensureYellow(); @@ -598,7 +598,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testHighlightIssue1994() throws Exception { assertAcked(prepareCreate("test") - .addMapping("type1", "title", "type=string,store=no", "titleTV", "type=string,store=no,term_vector=with_positions_offsets")); + .addMapping("type1", "title", "type=string,store=false", "titleTV", "type=string,store=false,term_vector=with_positions_offsets")); ensureYellow(); indexRandom(false, client().prepareIndex("test", "type1", "1") @@ -683,7 +683,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .addMapping("type1", jsonBuilder().startObject().startObject("type1") .startObject("_source").field("enabled", false).endObject() .startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").field("index_options", "offsets") + .startObject("field1").field("type", "string").field("store", true).field("index_options", "offsets") .field("term_vector", "with_positions_offsets").endObject() .endObject().endObject().endObject())); @@ -915,7 +915,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .startObject("foo") .field("type", "string") .field("termVector", "with_positions_offsets") - .field("store", "yes") + .field("store", true) .field("analyzer", "english") .startObject("fields") .startObject("plain") @@ -928,7 +928,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .startObject("bar") .field("type", "string") .field("termVector", "with_positions_offsets") - .field("store", "yes") + .field("store", true) .field("analyzer", "english") .startObject("fields") .startObject("plain") @@ -1101,7 +1101,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { public XContentBuilder type1TermVectorMapping() throws IOException { return XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("_all").field("store", "yes").field("termVector", "with_positions_offsets").endObject() + .startObject("_all").field("store", true).field("termVector", "with_positions_offsets").endObject() .startObject("properties") .startObject("field1").field("type", "string").field("termVector", "with_positions_offsets").endObject() .startObject("field2").field("type", "string").field("termVector", "with_positions_offsets").endObject() @@ -1111,7 +1111,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testSameContent() throws Exception { assertAcked(prepareCreate("test") - .addMapping("type1", "title", "type=string,store=yes,term_vector=with_positions_offsets")); + .addMapping("type1", "title", "type=string,store=true,term_vector=with_positions_offsets")); ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; @@ -1133,7 +1133,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testFastVectorHighlighterOffsetParameter() throws Exception { assertAcked(prepareCreate("test") - .addMapping("type1", "title", "type=string,store=yes,term_vector=with_positions_offsets").get()); + .addMapping("type1", "title", "type=string,store=true,term_vector=with_positions_offsets").get()); ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; @@ -1156,7 +1156,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testEscapeHtml() throws Exception { assertAcked(prepareCreate("test") - .addMapping("type1", "title", "type=string,store=yes")); + .addMapping("type1", "title", "type=string,store=true")); ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; @@ -1178,7 +1178,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testEscapeHtmlVector() throws Exception { assertAcked(prepareCreate("test") - .addMapping("type1", "title", "type=string,store=yes,term_vector=with_positions_offsets")); + .addMapping("type1", "title", "type=string,store=true,term_vector=with_positions_offsets")); ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; @@ -1201,9 +1201,9 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperVectorWithStore() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "string").field("store", "yes").field("term_vector", "with_positions_offsets").field("analyzer", "classic") + .startObject("title").field("type", "string").field("store", true).field("term_vector", "with_positions_offsets").field("analyzer", "classic") .startObject("fields") - .startObject("key").field("type", "string").field("store", "yes").field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject() + .startObject("key").field("type", "string").field("store", true).field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); ensureGreen(); @@ -1229,9 +1229,9 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperVectorFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").field("analyzer", "classic") + .startObject("title").field("type", "string").field("store", false).field("term_vector", "with_positions_offsets").field("analyzer", "classic") .startObject("fields") - .startObject("key").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject() + .startObject("key").field("type", "string").field("store", false).field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); ensureGreen(); @@ -1259,9 +1259,9 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperNoVectorWithStore() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "string").field("store", "yes").field("term_vector", "no").field("analyzer", "classic") + .startObject("title").field("type", "string").field("store", true).field("term_vector", "no").field("analyzer", "classic") .startObject("fields") - .startObject("key").field("type", "string").field("store", "yes").field("term_vector", "no").field("analyzer", "whitespace").endObject() + .startObject("key").field("type", "string").field("store", true).field("term_vector", "no").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); @@ -1289,9 +1289,9 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperNoVectorFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "string").field("store", "no").field("term_vector", "no").field("analyzer", "classic") + .startObject("title").field("type", "string").field("store", false).field("term_vector", "no").field("analyzer", "classic") .startObject("fields") - .startObject("key").field("type", "string").field("store", "no").field("term_vector", "no").field("analyzer", "whitespace").endObject() + .startObject("key").field("type", "string").field("store", false).field("term_vector", "no").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); ensureGreen(); @@ -1317,7 +1317,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testFastVectorHighlighterShouldFailIfNoTermVectors() throws Exception { assertAcked(prepareCreate("test") - .addMapping("type1", "title", "type=string,store=yes,term_vector=no")); + .addMapping("type1", "title", "type=string,store=true,term_vector=no")); ensureGreen(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; @@ -1347,7 +1347,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testDisableFastVectorHighlighter() throws Exception { assertAcked(prepareCreate("test") - .addMapping("type1", "title", "type=string,store=yes,term_vector=with_positions_offsets,analyzer=classic")); + .addMapping("type1", "title", "type=string,store=true,term_vector=with_positions_offsets,analyzer=classic")); ensureGreen(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; @@ -1485,7 +1485,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .putArray("index.analysis.filter.synonym.synonyms", "quick => fast"); assertAcked(prepareCreate("test").setSettings(builder.build()).addMapping("type1", type1TermVectorMapping()) - .addMapping("type2", "_all", "store=yes,termVector=with_positions_offsets", + .addMapping("type2", "_all", "store=true,termVector=with_positions_offsets", "field4", "type=string,term_vector=with_positions_offsets,analyzer=synonym", "field3", "type=string,analyzer=synonym")); ensureGreen(); @@ -1622,7 +1622,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMissingStoredField() throws Exception { assertAcked(prepareCreate("test") - .addMapping("type1", "highlight_field", "type=string,store=yes")); + .addMapping("type1", "highlight_field", "type=string,store=true")); ensureGreen(); client().prepareIndex("test", "type1", "1") .setSource(jsonBuilder().startObject() @@ -1744,7 +1744,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { private static String randomStoreField() { if (randomBoolean()) { - return "store=yes,"; + return "store=true,"; } return ""; } @@ -2136,7 +2136,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMatchQueryHighlight() throws IOException { String[] highlighterTypes = new String[] {"fvh", "plain", "postings"}; XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("_all").field("store", "yes").field("index_options", "offsets").endObject() + .startObject("_all").field("store", true).field("index_options", "offsets").endObject() .startObject("properties") .startObject("field1").field("type", "string").field("index_options", "offsets").field("term_vector", "with_positions_offsets").endObject() .startObject("field2").field("type", "string").field("index_options", "offsets").field("term_vector", "with_positions_offsets").endObject() @@ -2226,9 +2226,9 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1") .startObject("properties") - .startObject("title").field("type", "string").field("store", "yes").field("index_options", "offsets").field("analyzer", "classic") + .startObject("title").field("type", "string").field("store", true).field("index_options", "offsets").field("analyzer", "classic") .startObject("fields") - .startObject("key").field("type", "string").field("store", "yes").field("index_options", "offsets").field("analyzer", "whitespace").endObject() + .startObject("key").field("type", "string").field("store", true).field("index_options", "offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); ensureGreen(); @@ -2258,9 +2258,9 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testPostingsHighlighterMultiMapperFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "string").field("store", "no").field("index_options", "offsets").field("analyzer", "classic") + .startObject("title").field("type", "string").field("store", false).field("index_options", "offsets").field("analyzer", "classic") .startObject("fields") - .startObject("key").field("type", "string").field("store", "no").field("index_options", "offsets").field("analyzer", "whitespace").endObject() + .startObject("key").field("type", "string").field("store", false).field("index_options", "offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); ensureGreen(); @@ -2287,7 +2287,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testPostingsHighlighterShouldFailIfNoOffsets() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "string").field("store", "yes").field("index_options", "docs").endObject() + .startObject("title").field("type", "string").field("store", true).field("index_options", "docs").endObject() .endObject().endObject().endObject())); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java index 1e71b868236..76b7a0a0e3e 100644 --- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java @@ -760,7 +760,7 @@ public class InnerHitsIT extends ESIntegTestCase { .startObject("comments") .field("type", "nested") .startObject("properties") - .startObject("message").field("type", "string").field("store", "yes").endObject() + .startObject("message").field("type", "string").field("store", true).endObject() .endObject() .endObject() .endObject() @@ -798,7 +798,7 @@ public class InnerHitsIT extends ESIntegTestCase { .startObject("comments") .field("type", "nested") .startObject("properties") - .startObject("message").field("type", "string").field("store", "yes").endObject() + .startObject("message").field("type", "string").field("store", true).endObject() .endObject() .endObject() .endObject() @@ -836,7 +836,7 @@ public class InnerHitsIT extends ESIntegTestCase { .startObject("comments") .field("type", "nested") .startObject("properties") - .startObject("message").field("type", "string").field("store", "yes").endObject() + .startObject("message").field("type", "string").field("store", true).endObject() .endObject() .endObject() .endObject() @@ -875,7 +875,7 @@ public class InnerHitsIT extends ESIntegTestCase { .startObject("comments") .field("type", "nested") .startObject("properties") - .startObject("message").field("type", "string").field("store", "yes").endObject() + .startObject("message").field("type", "string").field("store", true).endObject() .endObject() .endObject() .endObject() diff --git a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java index fd9ee9a3f10..23eafdb0b01 100644 --- a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -337,7 +337,7 @@ public class SimpleNestedIT extends ESIntegTestCase { .startObject("properties") .startObject("field1") .field("type", "long") - .field("store", "yes") + .field("store", true) .endObject() .endObject() .endObject() diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping.json index f956b84f957..eb9b7833c68 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping.json @@ -11,7 +11,7 @@ "properties":{ "first":{ "type":"string", - "store":"yes", + "store":true, "include_in_all":false }, "last":{ @@ -29,7 +29,7 @@ "properties":{ "location":{ "type":"string", - "store":"yes" + "store":true } } }, diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_boost_omit_positions_on_all.json b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_boost_omit_positions_on_all.json index 452ef9f083b..42bba4354c6 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_boost_omit_positions_on_all.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_boost_omit_positions_on_all.json @@ -11,7 +11,7 @@ "properties":{ "first":{ "type":"string", - "store":"yes", + "store":true, "include_in_all":false }, "last":{ @@ -29,7 +29,7 @@ "properties":{ "location":{ "type":"string", - "store":"yes" + "store":true } } }, diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_offsets_on_all.json b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_offsets_on_all.json index f6b0699beee..388ac13e9e6 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_offsets_on_all.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_offsets_on_all.json @@ -11,7 +11,7 @@ "properties":{ "first":{ "type":"string", - "store":"yes", + "store":true, "include_in_all":false }, "last":{ @@ -29,7 +29,7 @@ "properties":{ "location":{ "type":"string", - "store":"yes" + "store":true } } }, diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_omit_positions_on_all.json b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_omit_positions_on_all.json index f8e418ce8ed..57aad9e8fab 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_omit_positions_on_all.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_omit_positions_on_all.json @@ -11,7 +11,7 @@ "properties":{ "first":{ "type":"string", - "store":"yes", + "store":true, "include_in_all":false }, "last":{ @@ -28,7 +28,7 @@ "properties":{ "location":{ "type":"string", - "store":"yes" + "store":true } } }, diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/all/noboost-mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/all/noboost-mapping.json index 799a3ab460a..9b1119f1a2e 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/all/noboost-mapping.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/all/noboost-mapping.json @@ -10,7 +10,7 @@ "properties":{ "first":{ "type":"string", - "store":"yes", + "store":true, "include_in_all":false }, "last":{ @@ -27,7 +27,7 @@ "properties":{ "location":{ "type":"string", - "store":"yes" + "store":true } } }, diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/all/store-mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/all/store-mapping.json index 8f653a34845..66fed5928ad 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/all/store-mapping.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/all/store-mapping.json @@ -2,7 +2,7 @@ "person":{ "_all":{ "enabled":true, - "store":"yes" + "store":true }, "properties":{ "name":{ @@ -11,7 +11,7 @@ "properties":{ "first":{ "type":"string", - "store":"yes", + "store":true, "include_in_all":false }, "last":{ @@ -29,7 +29,7 @@ "properties":{ "location":{ "type":"string", - "store":"yes" + "store":true } } }, diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-mapping.json index d99067c2b52..70bf6dc7b5d 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-mapping.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-mapping.json @@ -5,7 +5,7 @@ "template_1":{ "match":"*", "mapping":{ - "store":"yes" + "store":true } } } diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json index dce33dadfcc..3c273e6ed9e 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json @@ -5,7 +5,7 @@ "template_1":{ "path_match":"obj1.obj2.*", "mapping":{ - "store":"no" + "store":false } } }, @@ -13,7 +13,7 @@ "template_2":{ "path_match":"obj1.*", "mapping":{ - "store":"yes" + "store":true } } }, @@ -27,4 +27,4 @@ } ] } -} \ No newline at end of file +} diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json index 9c8f8d8e6a3..7a7e96d12a3 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json @@ -7,12 +7,12 @@ "mapping":{ "type":"{dynamic_type}", "index":"analyzed", - "store":"yes", + "store":true, "fields":{ "org":{ "type":"{dynamic_type}", "index":"not_analyzed", - "store":"yes" + "store":true } } } diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json index 61f08af57e2..0c2f9ab3f81 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json @@ -4,7 +4,7 @@ "name":{ type:"string", index:"analyzed", - store:"yes" + store:true } } } diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json index 02ce8957a51..37064a038b1 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json @@ -4,12 +4,12 @@ "name":{ "type" :"string", "index" :"analyzed", - "store" :"yes", + "store" :true, "fields":{ "name":{ "type" :"string", "index" :"analyzed", - "store" :"yes" + "store" :true }, "indexed":{ "type" :"string", @@ -18,7 +18,7 @@ "not_indexed":{ "type" :"string", "index" :"no", - "store" :"yes" + "store" :true } } } diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json index ea076754467..564d4b5ac05 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json @@ -4,12 +4,12 @@ "name" : { "type" : "string", "index" : "analyzed", - "store" : "yes", + "store" : true, "fields": { "name" : { "type" : "string", "index" : "analyzed", - "store" : "yes" + "store" : true }, "indexed":{ type:"string", @@ -18,12 +18,12 @@ "not_indexed":{ type:"string", index:"no", - store:"yes" + store:true }, "not_indexed2":{ type:"string", index:"no", - store:"yes" + store:true } } } diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json index 384c2634cb2..7d2fea2d2f5 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json @@ -4,12 +4,12 @@ "name":{ type:"string", index:"analyzed", - store:"yes", + store:true, "fields":{ "not_indexed3":{ type:"string", index:"no", - store:"yes" + store:true } } } diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json index 595f62210dd..8224cd69cf8 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json @@ -4,7 +4,7 @@ "name":{ type:"string", index:"analyzed", - store:"yes", + store:true, "fields":{ "indexed":{ type:"string", @@ -13,7 +13,7 @@ "not_indexed":{ type:"string", index:"no", - store:"yes" + store:true } } } diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json index 3cfca9c313e..42315cc8e3e 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json @@ -4,7 +4,7 @@ "name":{ type:"string", index:"analyzed", - store:"yes", + store:true, "fields":{ "indexed":{ type:"string", @@ -13,12 +13,12 @@ "not_indexed":{ type:"string", index:"no", - store:"yes" + store:true }, "not_indexed2":{ type:"string", index:"no", - store:"yes" + store:true } } } diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json index 046b0c234d4..4d429475261 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json @@ -8,7 +8,7 @@ "not_indexed3":{ type:"string", index:"no", - store:"yes" + store:true } } } diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type-no-default-field.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type-no-default-field.json index 99b74c01670..19d7e45ad48 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type-no-default-field.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type-no-default-field.json @@ -11,7 +11,7 @@ "not_indexed": { "type": "string", "index": "no", - "store": "yes" + "store": true } } }, @@ -23,7 +23,7 @@ }, "stored": { "type": "long", - "store": "yes" + "store": true } } } diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-fields.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-fields.json index b1166658296..c69f4aed0e8 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-fields.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-fields.json @@ -4,22 +4,22 @@ "name": { "type": "string", "index": "analyzed", - "store": "yes", + "store": true, "fields": { "indexed": { "type": "string", "index": "analyzed", - "store": "no" + "store": false }, "not_indexed": { "type": "string", "index": "no", - "store": "yes" + "store": true }, "test1": { "type": "string", "index": "analyzed", - "store": "yes", + "store": true, "fielddata": { "loading": "eager" } @@ -27,7 +27,7 @@ "test2": { "type": "token_count", "index": "not_analyzed", - "store": "yes", + "store": true, "analyzer": "simple" } } @@ -47,4 +47,4 @@ } } } -} \ No newline at end of file +} diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/simple/test-mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/simple/test-mapping.json index e001673758a..a37946bf2e6 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/simple/test-mapping.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/simple/test-mapping.json @@ -15,7 +15,7 @@ properties:{ first:{ type:"string", - store:"yes" + store:true }, last:{ type:"string", @@ -30,7 +30,7 @@ properties:{ location:{ type:"string", - store:"yes" + store:true } } }, diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java index b78c1c264c3..217aa2f1a8f 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java @@ -87,9 +87,9 @@ public class SearchFieldsTests extends ESIntegTestCase { // _timestamp is randomly enabled via templates but we don't want it here to test stored fields behaviour .startObject("_timestamp").field("enabled", false).endObject() .startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "no").endObject() - .startObject("field3").field("type", "string").field("store", "yes").endObject() + .startObject("field1").field("type", "string").field("store", true).endObject() + .startObject("field2").field("type", "string").field("store", false).endObject() + .startObject("field3").field("type", "string").field("store", true).endObject() .endObject().endObject().endObject().string(); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet(); @@ -171,7 +171,7 @@ public class SearchFieldsTests extends ESIntegTestCase { client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("num1").field("type", "double").field("store", "yes").endObject() + .startObject("num1").field("type", "double").field("store", true).endObject() .endObject().endObject().endObject().string(); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet(); @@ -391,15 +391,15 @@ public class SearchFieldsTests extends ESIntegTestCase { client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_source").field("enabled", false).endObject().startObject("properties") - .startObject("byte_field").field("type", "byte").field("store", "yes").endObject() - .startObject("short_field").field("type", "short").field("store", "yes").endObject() - .startObject("integer_field").field("type", "integer").field("store", "yes").endObject() - .startObject("long_field").field("type", "long").field("store", "yes").endObject() - .startObject("float_field").field("type", "float").field("store", "yes").endObject() - .startObject("double_field").field("type", "double").field("store", "yes").endObject() - .startObject("date_field").field("type", "date").field("store", "yes").endObject() - .startObject("boolean_field").field("type", "boolean").field("store", "yes").endObject() - .startObject("binary_field").field("type", "binary").field("store", "yes").endObject() + .startObject("byte_field").field("type", "byte").field("store", true).endObject() + .startObject("short_field").field("type", "short").field("store", true).endObject() + .startObject("integer_field").field("type", "integer").field("store", true).endObject() + .startObject("long_field").field("type", "long").field("store", true).endObject() + .startObject("float_field").field("type", "float").field("store", true).endObject() + .startObject("double_field").field("type", "double").field("store", true).endObject() + .startObject("date_field").field("type", "date").field("store", true).endObject() + .startObject("boolean_field").field("type", "boolean").field("store", true).endObject() + .startObject("binary_field").field("type", "binary").field("store", true).endObject() .endObject().endObject().endObject().string(); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet(); @@ -487,7 +487,7 @@ public class SearchFieldsTests extends ESIntegTestCase { .startObject("field1").field("type", "object").startObject("properties") .startObject("field2").field("type", "object").startObject("properties") .startObject("field3").field("type", "object").startObject("properties") - .startObject("field4").field("type", "string").field("store", "yes") + .startObject("field4").field("type", "string").field("store", true) .endObject().endObject() .endObject().endObject() .endObject().endObject() diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java index ad8b1e68230..309da10d680 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java @@ -193,7 +193,7 @@ public class SimpleSortTests extends ESIntegTestCase { public void testIssue6639() throws ExecutionException, InterruptedException { assertAcked(prepareCreate("$index") - .addMapping("$type","{\"$type\": {\"properties\": {\"grantee\": {\"index\": \"not_analyzed\", \"term_vector\": \"with_positions_offsets\", \"type\": \"string\", \"analyzer\": \"snowball\", \"boost\": 1.0, \"store\": \"yes\"}}}}")); + .addMapping("$type","{\"$type\": {\"properties\": {\"grantee\": {\"index\": \"not_analyzed\", \"term_vector\": \"with_positions_offsets\", \"type\": \"string\", \"analyzer\": \"snowball\", \"boost\": 1.0, \"store\": true}}}}")); indexRandom(true, client().prepareIndex("$index", "$type", "data.activity.5").setSource("{\"django_ct\": \"data.activity\", \"grantee\": \"Grantee 1\"}"), client().prepareIndex("$index", "$type", "data.activity.6").setSource("{\"django_ct\": \"data.activity\", \"grantee\": \"Grantee 2\"}")); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java index 92bf7d03484..4fd83f9a850 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java @@ -456,7 +456,7 @@ public class SuggestSearchTests extends ESIntegTestCase { .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 2)); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("_all").field("store", "yes").field("termVector", "with_positions_offsets").endObject() + .startObject("_all").field("store", true).field("termVector", "with_positions_offsets").endObject() .startObject("properties") .startObject("body").field("type", "string").field("analyzer", "body").endObject() .startObject("body_reverse").field("type", "string").field("analyzer", "reverse").endObject() @@ -500,7 +500,7 @@ public class SuggestSearchTests extends ESIntegTestCase { .put("index.analysis.filter.my_shingle.max_shingle_size", 2)); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("_all") - .field("store", "yes") + .field("store", true) .field("termVector", "with_positions_offsets") .endObject() .startObject("properties") @@ -635,7 +635,7 @@ public class SuggestSearchTests extends ESIntegTestCase { .startObject() .startObject("type1") .startObject("_all") - .field("store", "yes") + .field("store", true) .field("termVector", "with_positions_offsets") .endObject() .startObject("properties") @@ -705,7 +705,7 @@ public class SuggestSearchTests extends ESIntegTestCase { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject().startObject("type1") - .startObject("_all").field("store", "yes").field("termVector", "with_positions_offsets").endObject() + .startObject("_all").field("store", true).field("termVector", "with_positions_offsets").endObject() .startObject("properties") .startObject("body").field("type", "string").field("analyzer", "body").endObject() .startObject("bigram").field("type", "string").field("analyzer", "bigram").endObject() @@ -898,7 +898,7 @@ public class SuggestSearchTests extends ESIntegTestCase { .startObject() .startObject("type1") .startObject("_all") - .field("store", "yes") + .field("store", true) .field("termVector", "with_positions_offsets") .endObject() .startObject("properties") diff --git a/plugins/mapper-attachments/src/test/resources/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping-all-fields.json b/plugins/mapper-attachments/src/test/resources/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping-all-fields.json index ea83b98ceec..feaa3a54555 100644 --- a/plugins/mapper-attachments/src/test/resources/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping-all-fields.json +++ b/plugins/mapper-attachments/src/test/resources/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping-all-fields.json @@ -4,14 +4,14 @@ "file":{ "type":"attachment", "fields" : { - "content" : {"store" : "yes"}, - "title" : {"store" : "yes"}, - "date" : {"store" : "yes"}, + "content" : {"store" : true}, + "title" : {"store" : true}, + "date" : {"store" : true}, "author" : {"analyzer" : "standard"}, - "keywords" : {"store" : "yes"}, - "content_type" : {"store" : "yes"}, - "content_length" : {"store" : "yes"}, - "language" : {"store" : "yes"} + "keywords" : {"store" : true}, + "content_type" : {"store" : true}, + "content_length" : {"store" : true}, + "language" : {"store" : true} } } } diff --git a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/30_mapping.yaml b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/30_mapping.yaml index 170a8bf7382..458990cc90c 100644 --- a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/30_mapping.yaml +++ b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/30_mapping.yaml @@ -29,9 +29,9 @@ "type": "attachment" "fields": "content_type": - "store": "yes" + "store": true "name": - "store": "yes" + "store": true - do: cluster.health: wait_for_status: yellow diff --git a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/40_highlight.yaml b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/40_highlight.yaml index 286dae8b976..a4eec42752d 100644 --- a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/40_highlight.yaml +++ b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/40_highlight.yaml @@ -14,7 +14,7 @@ setup: "fields": "content" : "type": "string" - "store" : "yes" + "store" : true "term_vector": "with_positions_offsets" - do: diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index baeba9f4bbd..6cd54eeaac0 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -38,7 +38,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseStore; public class SizeFieldMapper extends MetadataFieldMapper { @@ -92,10 +92,10 @@ public class SizeFieldMapper extends MetadataFieldMapper { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - builder.enabled(nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); + builder.enabled(lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); iterator.remove(); } else if (fieldName.equals("store") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - builder.store(parseStore(fieldName, fieldNode.toString())); + builder.store(parseStore(fieldName, fieldNode.toString(), parserContext)); iterator.remove(); } } From 2aaa5e6448fb96dd80c37827b292592d607398bc Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 21 Jan 2016 14:53:51 +0100 Subject: [PATCH 343/347] Remove the ability to enable doc values with the `fielddata.format` setting. Doc values can now only be enabled by setting `doc_values: true` in the mappings. Removing this feature also means that we can now fail mapping updates that try to disable doc values. --- .../index/mapper/FieldMapper.java | 2 +- .../index/mapper/MappedFieldType.java | 6 +-- .../fielddata/BinaryDVFieldDataTests.java | 2 +- .../index/mapper/FieldTypeTestCase.java | 46 +++++++------------ .../mapper/core/CompletionFieldTypeTests.java | 6 +-- .../index/mapper/core/DateFieldTypeTests.java | 6 +-- .../mapper/geo/GeoPointFieldTypeTests.java | 4 +- .../mapper/geo/GeoShapeFieldTypeTests.java | 12 ++--- .../internal/FieldNamesFieldTypeTests.java | 2 +- .../string/SimpleStringMappingTests.java | 42 ----------------- docs/reference/migration/migrate_3_0.asciidoc | 6 +++ .../messy/tests/EquivalenceTests.java | 4 +- 12 files changed, 43 insertions(+), 95 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 3725327c235..b5277b2575f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -246,7 +246,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { boolean defaultDocValues = defaultDocValues(context.indexCreatedVersion()); defaultFieldType.setHasDocValues(defaultDocValues); if (docValuesSet == false) { - fieldType.setHasDocValues(defaultDocValues || fieldDataDocValues); + fieldType.setHasDocValues(defaultDocValues); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 09d459fc4a2..f030ebed7ac 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -199,10 +199,8 @@ public abstract class MappedFieldType extends FieldType { if (stored() != other.stored()) { conflicts.add("mapper [" + name() + "] has different [store] values"); } - if (hasDocValues() == false && other.hasDocValues()) { - // don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitly set - // when the doc_values field data format is configured - conflicts.add("mapper [" + name() + "] has different [doc_values] values, cannot change from disabled to enabled"); + if (hasDocValues() != other.hasDocValues()) { + conflicts.add("mapper [" + name() + "] has different [doc_values] values"); } if (omitNorms() && !other.omitNorms()) { conflicts.add("mapper [" + name() + "] has different [omit_norms] values, cannot change from disable to enabled"); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java index ca207fbdc2c..30669e83409 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java @@ -46,7 +46,7 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase { .startObject("properties") .startObject("field") .field("type", "binary") - .startObject("fielddata").field("format", "doc_values").endObject() + .field("doc_values", true) .endObject() .endObject() .endObject().endObject().string(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index c8d7e4ac147..966edf82621 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -38,13 +38,10 @@ public abstract class FieldTypeTestCase extends ESTestCase { public final String property; /** true if this modifier only makes types incompatible in strict mode, false otherwise */ public final boolean strictOnly; - /** true if reversing the order of checkCompatibility arguments should result in the same conflicts, false otherwise **/ - public final boolean symmetric; - public Modifier(String property, boolean strictOnly, boolean symmetric) { + public Modifier(String property, boolean strictOnly) { this.property = property; this.strictOnly = strictOnly; - this.symmetric = symmetric; } /** Modifies the property */ @@ -57,25 +54,25 @@ public abstract class FieldTypeTestCase extends ESTestCase { } private final List modifiers = new ArrayList<>(Arrays.asList( - new Modifier("boost", true, true) { + new Modifier("boost", true) { @Override public void modify(MappedFieldType ft) { ft.setBoost(1.1f); } }, - new Modifier("doc_values", false, false) { + new Modifier("doc_values", false) { @Override public void modify(MappedFieldType ft) { ft.setHasDocValues(ft.hasDocValues() == false); } }, - new Modifier("analyzer", false, true) { + new Modifier("analyzer", false) { @Override public void modify(MappedFieldType ft) { ft.setIndexAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer())); } }, - new Modifier("analyzer", false, true) { + new Modifier("analyzer", false) { @Override public void modify(MappedFieldType ft) { ft.setIndexAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer())); @@ -85,13 +82,13 @@ public abstract class FieldTypeTestCase extends ESTestCase { other.setIndexAnalyzer(new NamedAnalyzer("foo", new StandardAnalyzer())); } }, - new Modifier("search_analyzer", true, true) { + new Modifier("search_analyzer", true) { @Override public void modify(MappedFieldType ft) { ft.setSearchAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer())); } }, - new Modifier("search_analyzer", true, true) { + new Modifier("search_analyzer", true) { @Override public void modify(MappedFieldType ft) { ft.setSearchAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer())); @@ -101,13 +98,13 @@ public abstract class FieldTypeTestCase extends ESTestCase { other.setSearchAnalyzer(new NamedAnalyzer("foo", new StandardAnalyzer())); } }, - new Modifier("search_quote_analyzer", true, true) { + new Modifier("search_quote_analyzer", true) { @Override public void modify(MappedFieldType ft) { ft.setSearchQuoteAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer())); } }, - new Modifier("search_quote_analyzer", true, true) { + new Modifier("search_quote_analyzer", true) { @Override public void modify(MappedFieldType ft) { ft.setSearchQuoteAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer())); @@ -117,13 +114,13 @@ public abstract class FieldTypeTestCase extends ESTestCase { other.setSearchQuoteAnalyzer(new NamedAnalyzer("foo", new StandardAnalyzer())); } }, - new Modifier("similarity", false, true) { + new Modifier("similarity", false) { @Override public void modify(MappedFieldType ft) { ft.setSimilarity(new BM25SimilarityProvider("foo", Settings.EMPTY)); } }, - new Modifier("similarity", false, true) { + new Modifier("similarity", false) { @Override public void modify(MappedFieldType ft) { ft.setSimilarity(new BM25SimilarityProvider("foo", Settings.EMPTY)); @@ -133,19 +130,19 @@ public abstract class FieldTypeTestCase extends ESTestCase { other.setSimilarity(new BM25SimilarityProvider("bar", Settings.EMPTY)); } }, - new Modifier("norms.loading", true, true) { + new Modifier("norms.loading", true) { @Override public void modify(MappedFieldType ft) { ft.setNormsLoading(MappedFieldType.Loading.LAZY); } }, - new Modifier("fielddata", true, true) { + new Modifier("fielddata", true) { @Override public void modify(MappedFieldType ft) { ft.setFieldDataType(new FieldDataType("foo", Settings.builder().put("loading", "eager").build())); } }, - new Modifier("null_value", true, true) { + new Modifier("null_value", true) { @Override public void modify(MappedFieldType ft) { ft.setNullValue(dummyNullValue); @@ -334,23 +331,14 @@ public abstract class FieldTypeTestCase extends ESTestCase { assertCompatible(modifier.property, ft1, ft2, false); assertNotCompatible(modifier.property, ft1, ft2, true, conflicts); assertCompatible(modifier.property, ft2, ft1, false); // always symmetric when not strict - if (modifier.symmetric) { - assertNotCompatible(modifier.property, ft2, ft1, true, conflicts); - } else { - assertCompatible(modifier.property, ft2, ft1, true); - } + assertNotCompatible(modifier.property, ft2, ft1, true, conflicts); } else { // not compatible whether strict or not String conflict = "different [" + modifier.property + "]"; assertNotCompatible(modifier.property, ft1, ft2, true, conflict); assertNotCompatible(modifier.property, ft1, ft2, false, conflict); - if (modifier.symmetric) { - assertNotCompatible(modifier.property, ft2, ft1, true, conflict); - assertNotCompatible(modifier.property, ft2, ft1, false, conflict); - } else { - assertCompatible(modifier.property, ft2, ft1, true); - assertCompatible(modifier.property, ft2, ft1, false); - } + assertNotCompatible(modifier.property, ft2, ft1, true, conflict); + assertNotCompatible(modifier.property, ft2, ft1, false, conflict); } } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/CompletionFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/CompletionFieldTypeTests.java index 7ec1814a59b..6f5225dd2c1 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/CompletionFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/CompletionFieldTypeTests.java @@ -34,21 +34,21 @@ public class CompletionFieldTypeTests extends FieldTypeTestCase { @Before public void setupProperties() { - addModifier(new Modifier("preserve_separators", false, true) { + addModifier(new Modifier("preserve_separators", false) { @Override public void modify(MappedFieldType ft) { CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft; cft.setPreserveSep(false); } }); - addModifier(new Modifier("preserve_position_increments", false, true) { + addModifier(new Modifier("preserve_position_increments", false) { @Override public void modify(MappedFieldType ft) { CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft; cft.setPreservePositionIncrements(false); } }); - addModifier(new Modifier("context_mappings", false, true) { + addModifier(new Modifier("context_mappings", false) { @Override public void modify(MappedFieldType ft) { CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldTypeTests.java index 3c37af6f49a..0e009891cf2 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldTypeTests.java @@ -35,19 +35,19 @@ public class DateFieldTypeTests extends FieldTypeTestCase { @Before public void setupProperties() { setDummyNullValue(10); - addModifier(new Modifier("format", true, true) { + addModifier(new Modifier("format", true) { @Override public void modify(MappedFieldType ft) { ((DateFieldMapper.DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("basic_week_date", Locale.ROOT)); } }); - addModifier(new Modifier("locale", true, true) { + addModifier(new Modifier("locale", true) { @Override public void modify(MappedFieldType ft) { ((DateFieldMapper.DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("date_optional_time", Locale.CANADA)); } }); - addModifier(new Modifier("numeric_resolution", true, true) { + addModifier(new Modifier("numeric_resolution", true) { @Override public void modify(MappedFieldType ft) { ((DateFieldMapper.DateFieldType)ft).setTimeUnit(TimeUnit.HOURS); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java index 19eb536e32e..6934d06a509 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java @@ -32,13 +32,13 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase { @Before public void setupProperties() { - addModifier(new Modifier("geohash", false, true) { + addModifier(new Modifier("geohash", false) { @Override public void modify(MappedFieldType ft) { ((BaseGeoPointFieldMapper.GeoPointFieldType)ft).setGeoHashEnabled(new StringFieldMapper.StringFieldType(), 1, true); } }); - addModifier(new Modifier("lat_lon", false, true) { + addModifier(new Modifier("lat_lon", false) { @Override public void modify(MappedFieldType ft) { ((BaseGeoPointFieldMapper.GeoPointFieldType)ft).setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldTypeTests.java index 7ce99aa737a..3407661b521 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldTypeTests.java @@ -31,37 +31,37 @@ public class GeoShapeFieldTypeTests extends FieldTypeTestCase { @Before public void setupProperties() { - addModifier(new Modifier("tree", false, true) { + addModifier(new Modifier("tree", false) { @Override public void modify(MappedFieldType ft) { ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setTree("quadtree"); } }); - addModifier(new Modifier("strategy", false, true) { + addModifier(new Modifier("strategy", false) { @Override public void modify(MappedFieldType ft) { ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setStrategyName("term"); } }); - addModifier(new Modifier("tree_levels", false, true) { + addModifier(new Modifier("tree_levels", false) { @Override public void modify(MappedFieldType ft) { ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setTreeLevels(10); } }); - addModifier(new Modifier("precision", false, true) { + addModifier(new Modifier("precision", false) { @Override public void modify(MappedFieldType ft) { ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setPrecisionInMeters(20); } }); - addModifier(new Modifier("distance_error_pct", true, true) { + addModifier(new Modifier("distance_error_pct", true) { @Override public void modify(MappedFieldType ft) { ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setDefaultDistanceErrorPct(0.5); } }); - addModifier(new Modifier("orientation", true, true) { + addModifier(new Modifier("orientation", true) { @Override public void modify(MappedFieldType ft) { ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setOrientation(ShapeBuilder.Orientation.LEFT); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldTypeTests.java index 83aa779a61d..fd0c344c26b 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldTypeTests.java @@ -30,7 +30,7 @@ public class FieldNamesFieldTypeTests extends FieldTypeTestCase { @Before public void setupProperties() { - addModifier(new Modifier("enabled", true, true) { + addModifier(new Modifier("enabled", true) { @Override public void modify(MappedFieldType ft) { FieldNamesFieldMapper.FieldNamesFieldType fnft = (FieldNamesFieldMapper.FieldNamesFieldType)ft; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index 1f3ba975d02..6114185ccf6 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -33,7 +33,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; @@ -64,7 +63,6 @@ import static org.hamcrest.Matchers.nullValue; /** */ public class SimpleStringMappingTests extends ESSingleNodeTestCase { - private static Settings DOC_VALUES_SETTINGS = Settings.builder().put(FieldDataType.FORMAT_KEY, FieldDataType.DOC_VALUES_FORMAT_VALUE).build(); @Override protected Collection> getPlugins() { @@ -371,46 +369,6 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true)); } - public void testDocValuesFielddata() throws Exception { - IndexService indexService = createIndex("index"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); - - assertFalse(new Builder("anything").index(false).build(ctx).fieldType().hasDocValues()); - assertTrue(new Builder("anything").index(false).fieldDataSettings(DOC_VALUES_SETTINGS).build(ctx).fieldType().hasDocValues()); - assertTrue(new Builder("anything").index(false).docValues(true).build(ctx).fieldType().hasDocValues()); - - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("str1") - .field("type", "string") - .startObject("fielddata") - .field("format", "paged_bytes") - .endObject() - .endObject() - .startObject("str2") - .field("type", "string") - .field("index", "not_analyzed") - .startObject("fielddata") - .field("format", "doc_values") - .endObject() - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("str1", "1234") - .field("str2", "1234") - .endObject() - .bytes()); - final Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str1")); - assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str2")); - } - public void testDocValues() throws Exception { // doc values only work on non-analyzed content final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 9b7de2aff19..54449bef7e6 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -293,6 +293,12 @@ values as follows: } ---- +==== `fielddata.format` + +Setting `fielddata.format: doc_values` in the mappings used to implicitly +enable doc values on a field. This no longer works: the only way to enable or +disable doc values is by using the `doc_values` property of mappings. + [[breaking_30_plugins]] === Plugin changes diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java index 584a8d2c284..3c062f871d3 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java @@ -194,9 +194,7 @@ public class EquivalenceTests extends ESIntegTestCase { .startObject("doc_values") .field("type", "string") .field("index", "no") - .startObject("fielddata") - .field("format", "doc_values") - .endObject() + .field("doc_values", true) .endObject() .endObject() .endObject() From 209860854ddc87ff71a49d9ced1186a8e2c8e3b8 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 21 Jan 2016 21:02:14 +0100 Subject: [PATCH 344/347] Make the `index` property a boolean. With the split of `string` into `text` and `keyword`, the `index` property can only have two values and should be a boolean. --- .../index/mapper/FieldMapper.java | 13 ++--- .../index/mapper/core/StringFieldMapper.java | 32 +++++++++++ .../index/mapper/core/TypeParsers.java | 35 ++++++++---- .../mapper/numeric/SimpleNumericTests.java | 55 +++++++++++++++++++ .../mapping/SimpleGetFieldMappingsIT.java | 2 +- .../bucket/SignificantTermsIT.java | 2 +- .../mapper/multifield/test-multi-fields.json | 2 +- docs/reference/mapping/types/boolean.asciidoc | 2 +- docs/reference/mapping/types/date.asciidoc | 4 +- docs/reference/mapping/types/ip.asciidoc | 4 +- docs/reference/mapping/types/numeric.asciidoc | 4 +- docs/reference/migration/migrate_3_0.asciidoc | 6 ++ 12 files changed, 130 insertions(+), 31 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index b5277b2575f..a9838503566 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -398,7 +398,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { boolean defaultIndexed = defaultFieldType.indexOptions() != IndexOptions.NONE; if (includeDefaults || indexed != defaultIndexed || fieldType().tokenized() != defaultFieldType.tokenized()) { - builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized())); + builder.field("index", indexTokenizeOption(indexed, fieldType().tokenized())); } if (includeDefaults || fieldType().stored() != defaultFieldType.stored()) { builder.field("store", fieldType().stored()); @@ -495,14 +495,9 @@ public abstract class FieldMapper extends Mapper implements Cloneable { } } - protected static String indexTokenizeOptionToString(boolean indexed, boolean tokenized) { - if (!indexed) { - return "no"; - } else if (tokenized) { - return "analyzed"; - } else { - return "not_analyzed"; - } + /* Only protected so that string can override it */ + protected Object indexTokenizeOption(boolean indexed, boolean tokenized) { + return indexed; } protected boolean hasCustomFieldDataSettings() { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index 4471db8954d..918731d0244 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -147,6 +147,27 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { StringFieldMapper.Builder builder = stringField(name); + // hack for the fact that string can't just accept true/false for + // the index property and still accepts no/not_analyzed/analyzed + final Object index = node.remove("index"); + if (index != null) { + final String normalizedIndex = Strings.toUnderscoreCase(index.toString()); + switch (normalizedIndex) { + case "analyzed": + builder.tokenized(true); + node.put("index", true); + break; + case "not_analyzed": + builder.tokenized(false); + node.put("index", true); + break; + case "no": + node.put("index", false); + break; + default: + throw new IllegalArgumentException("Can't parse [index] value [" + index + "], expected [true], [false], [no], [not_analyzed] or [analyzed]"); + } + } parseTextField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); @@ -369,6 +390,17 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc this.ignoreAbove = ((StringFieldMapper) mergeWith).ignoreAbove; } + @Override + protected String indexTokenizeOption(boolean indexed, boolean tokenized) { + if (!indexed) { + return "no"; + } else if (tokenized) { + return "analyzed"; + } else { + return "not_analyzed"; + } + } + @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index e3df1f7dbf1..e2f59b3ef4f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -172,7 +172,7 @@ public class TypeParsers { builder.store(parseStore(name, propNode.toString(), parserContext)); iterator.remove(); } else if (propName.equals("index")) { - parseIndex(name, propNode.toString(), builder); + builder.index(parseIndex(name, propNode.toString(), parserContext)); iterator.remove(); } else if (propName.equals(DOC_VALUES)) { builder.docValues(nodeBooleanValue(propNode, parserContext)); @@ -328,18 +328,29 @@ public class TypeParsers { } } - public static void parseIndex(String fieldName, String index, FieldMapper.Builder builder) throws MapperParsingException { - index = Strings.toUnderscoreCase(index); - if ("no".equals(index)) { - builder.index(false); - } else if ("not_analyzed".equals(index)) { - builder.index(true); - builder.tokenized(false); - } else if ("analyzed".equals(index)) { - builder.index(true); - builder.tokenized(true); + public static boolean parseIndex(String fieldName, String index, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException { + if (parserContext.indexVersionCreated().onOrAfter(Version.V_3_0_0)) { + switch (index) { + case "true": + return true; + case "false": + return false; + default: + throw new IllegalArgumentException("Can't parse [index] value [" + index + "], expected [true] or [false]"); + } } else { - throw new MapperParsingException("wrong value for index [" + index + "] for field [" + fieldName + "]"); + final String normalizedIndex = Strings.toUnderscoreCase(index); + switch (normalizedIndex) { + case "true": + case "not_analyzed": + case "analyzed": + return true; + case "false": + case "no": + return false; + default: + throw new IllegalArgumentException("Can't parse [index] value [" + index + "], expected [true], [false], [no], [not_analyzed] or [analyzed]"); + } } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java index 1c024a0d549..e0c55a85e09 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.analysis.NumericTokenStream; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -354,6 +355,60 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { assertEquals(DocValuesType.NONE, SimpleStringMappingTests.docValuesType(doc, "double2")); } + public void testUnIndex() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("int") + .field("type", "integer") + .field("index", false) + .endObject() + .startObject("double") + .field("type", "double") + .field("index", false) + .endObject() + .endObject() + .endObject().endObject().string(); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + + assertEquals("{\"type\":{\"properties\":{\"double\":{\"type\":\"double\",\"index\":false},\"int\":{\"type\":\"integer\",\"index\":false}}}}", + defaultMapper.mapping().toString()); + + ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("int", "1234") + .field("double", "1234") + .endObject() + .bytes()); + final Document doc = parsedDoc.rootDoc(); + for (IndexableField field : doc.getFields("int")) { + assertEquals(IndexOptions.NONE, field.fieldType().indexOptions()); + } + for (IndexableField field : doc.getFields("double")) { + assertEquals(IndexOptions.NONE, field.fieldType().indexOptions()); + } + } + + public void testBwCompatIndex() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("int") + .field("type", "integer") + .field("index", "no") + .endObject() + .startObject("double") + .field("type", "double") + .field("index", "not_analyzed") + .endObject() + .endObject() + .endObject().endObject().string(); + + Settings oldSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build(); + DocumentMapper defaultMapper = createIndex("test", oldSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + assertEquals("{\"type\":{\"properties\":{\"double\":{\"type\":\"double\"},\"int\":{\"type\":\"integer\",\"index\":false}}}}", + defaultMapper.mapping().toString()); + } + public void testDocValuesOnNested() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java index b96e9bff80c..a993130a91a 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -143,7 +143,7 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase { GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings().setFields("num", "field1", "obj.subfield").includeDefaults(true).get(); - assertThat((Map) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), hasEntry("index", (Object) "not_analyzed")); + assertThat((Map) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), hasEntry("index", Boolean.TRUE)); assertThat((Map) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), hasEntry("type", (Object) "long")); assertThat((Map) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("index", (Object) "analyzed")); assertThat((Map) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("type", (Object) "string")); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java index 7582d75ca0b..97a3cfa3ba2 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java @@ -76,7 +76,7 @@ public class SignificantTermsIT extends ESIntegTestCase { public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 5, SETTING_NUMBER_OF_REPLICAS, 0).addMapping("fact", "_routing", "required=true", "routing_id", "type=string,index=not_analyzed", "fact_category", - "type=integer,index=not_analyzed", "description", "type=string,index=analyzed")); + "type=integer,index=true", "description", "type=string,index=analyzed")); createIndex("idx_unmapped"); ensureGreen(); diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-fields.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-fields.json index c69f4aed0e8..3be34a98398 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-fields.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-fields.json @@ -26,7 +26,7 @@ }, "test2": { "type": "token_count", - "index": "not_analyzed", + "index": true, "store": true, "analyzer": "simple" } diff --git a/docs/reference/mapping/types/boolean.asciidoc b/docs/reference/mapping/types/boolean.asciidoc index 9ff1aa13dde..200b1c7b8d8 100644 --- a/docs/reference/mapping/types/boolean.asciidoc +++ b/docs/reference/mapping/types/boolean.asciidoc @@ -104,7 +104,7 @@ The following parameters are accepted by `boolean` fields: <>:: - Should the field be searchable? Accepts `not_analyzed` (default) and `no`. + Should the field be searchable? Accepts `true` (default) and `false`. <>:: diff --git a/docs/reference/mapping/types/date.asciidoc b/docs/reference/mapping/types/date.asciidoc index 118c1a85d4f..3ee123678b8 100644 --- a/docs/reference/mapping/types/date.asciidoc +++ b/docs/reference/mapping/types/date.asciidoc @@ -115,13 +115,13 @@ The following parameters are accepted by `date` fields: Whether or not the field value should be included in the <> field? Accepts `true` or `false`. Defaults - to `false` if <> is set to `no`, or if a parent + to `false` if <> is set to `false`, or if a parent <> field sets `include_in_all` to `false`. Otherwise defaults to `true`. <>:: - Should the field be searchable? Accepts `not_analyzed` (default) and `no`. + Should the field be searchable? Accepts `true` (default) and `false`. <>:: diff --git a/docs/reference/mapping/types/ip.asciidoc b/docs/reference/mapping/types/ip.asciidoc index 9b7443ef60a..49656f99013 100644 --- a/docs/reference/mapping/types/ip.asciidoc +++ b/docs/reference/mapping/types/ip.asciidoc @@ -62,13 +62,13 @@ The following parameters are accepted by `ip` fields: Whether or not the field value should be included in the <> field? Accepts `true` or `false`. Defaults - to `false` if <> is set to `no`, or if a parent + to `false` if <> is set to `false`, or if a parent <> field sets `include_in_all` to `false`. Otherwise defaults to `true`. <>:: - Should the field be searchable? Accepts `not_analyzed` (default) and `no`. + Should the field be searchable? Accepts `true` (default) and `false`. <>:: diff --git a/docs/reference/mapping/types/numeric.asciidoc b/docs/reference/mapping/types/numeric.asciidoc index 77f5808e6b0..4a4fd800152 100644 --- a/docs/reference/mapping/types/numeric.asciidoc +++ b/docs/reference/mapping/types/numeric.asciidoc @@ -65,13 +65,13 @@ The following parameters are accepted by numeric types: Whether or not the field value should be included in the <> field? Accepts `true` or `false`. Defaults - to `false` if <> is set to `no`, or if a parent + to `false` if <> is set to `false`, or if a parent <> field sets `include_in_all` to `false`. Otherwise defaults to `true`. <>:: - Should the field be searchable? Accepts `not_analyzed` (default) and `no`. + Should the field be searchable? Accepts `true` (default) and `false`. <>:: diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 54449bef7e6..db3ef9c98f6 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -273,6 +273,12 @@ float by default instead of a double. The reasoning is that floats should be more than enough for most cases but would decrease storage requirements significantly. +==== `index` property + +On all types but `string`, the `index` property now only accepts `true`/`false` +instead of `not_analyzed`/`no`. The `string` field still accepts +`analyzed`/`not_analyzed`/`no`. + ==== `_source`'s `format` option The `_source` mapping does not support the `format` option anymore. This option From aea7660e37d686375955d4093bbbdc344a0c1240 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Tue, 12 Jan 2016 17:40:34 +0100 Subject: [PATCH 345/347] Add search_after parameter in the Search API. The search_after parameter provides a way to efficiently paginate from one page to the next. This parameter accepts an array of sort values, those values are then used by the searcher to sort the top hits from the first document that is greater to the sort values. This parameter must be used in conjunction with the sort parameter, it must contain exactly the same number of values than the number of fields to sort on. NOTE: A field with one unique value per document should be used as the last element of the sort specification. Otherwise the sort order for documents that have the same sort values would be undefined. The recommended way is to use the field `_uuid` which is certain to contain one unique value for each document. Fixes #8192 --- .../action/search/SearchRequestBuilder.java | 10 + .../percolator/PercolateContext.java | 14 +- .../elasticsearch/search/SearchService.java | 12 + .../search/builder/SearchSourceBuilder.java | 43 ++- .../search/internal/DefaultSearchContext.java | 14 + .../internal/FilteredSearchContext.java | 11 + .../search/internal/SearchContext.java | 5 + .../search/query/QueryPhase.java | 12 +- .../searchafter/SearchAfterBuilder.java | 303 +++++++++++++++++ .../basic/TransportSearchFailuresIT.java | 1 - .../builder/SearchSourceBuilderTests.java | 52 +++ .../searchafter/SearchAfterBuilderTests.java | 257 ++++++++++++++ .../search/searchafter/SearchAfterIT.java | 314 ++++++++++++++++++ docs/reference/index-modules.asciidoc | 2 +- docs/reference/search/request-body.asciidoc | 2 + .../search/request/from-size.asciidoc | 2 +- .../search/request/search-after.asciidoc | 62 ++++ .../resources/rest-api-spec/api/search.json | 4 + .../test/search/90_search_after.yaml | 102 ++++++ .../elasticsearch/test/TestSearchContext.java | 11 + 20 files changed, 1220 insertions(+), 13 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java create mode 100644 core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java create mode 100644 core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java create mode 100644 docs/reference/search/request/search-after.asciidoc create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yaml diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 4c538e59e71..9d6f61ed580 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.script.Script; import org.elasticsearch.script.Template; import org.elasticsearch.search.Scroll; +import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; @@ -343,6 +344,15 @@ public class SearchRequestBuilder extends ActionRequestBuilderfalse. diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java index 7d4e18c3d37..bece4fd4418 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java @@ -26,7 +26,7 @@ import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; -import org.apache.lucene.util.BytesRef; +import org.apache.lucene.search.FieldDoc; import org.apache.lucene.util.Counter; import org.elasticsearch.action.percolate.PercolateShardRequest; import org.elasticsearch.action.search.SearchType; @@ -48,7 +48,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.similarity.SimilarityService; @@ -82,7 +81,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.ConcurrentMap; /** */ @@ -518,6 +516,16 @@ public class PercolateContext extends SearchContext { return trackScores; } + @Override + public SearchContext searchAfter(FieldDoc searchAfter) { + throw new UnsupportedOperationException(); + } + + @Override + public FieldDoc searchAfter() { + return null; + } + @Override public SearchContext parsedPostFilter(ParsedQuery postFilter) { throw new UnsupportedOperationException(); diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 8c9803c277a..5c74ccca2ba 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -27,6 +27,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.TopDocs; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cache.recycler.PageCacheRecycler; @@ -101,6 +102,7 @@ import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.search.query.ScrollQuerySearchResult; import org.elasticsearch.search.rescore.RescoreBuilder; +import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -864,6 +866,16 @@ public class SearchService extends AbstractLifecycleComponent imp if (source.stats() != null) { context.groupStats(source.stats()); } + if (source.searchAfter() != null && source.searchAfter().length > 0) { + if (context.scrollContext() != null) { + throw new SearchContextException(context, "`search_after` cannot be used in a scroll context."); + } + if (context.from() > 0) { + throw new SearchContextException(context, "`from` parameter must be set to 0 when `search_after` is used."); + } + FieldDoc fieldDoc = SearchAfterBuilder.buildFieldDoc(context.sort(), source.searchAfter()); + context.searchAfter(fieldDoc); + } } private static final int[] EMPTY_DOC_IDS = new int[0]; diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index a5e133466e0..16c00a0cca3 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; +import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.search.fetch.source.FetchSourceContext; @@ -94,6 +95,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ public static final ParseField STATS_FIELD = new ParseField("stats"); public static final ParseField EXT_FIELD = new ParseField("ext"); public static final ParseField PROFILE_FIELD = new ParseField("profile"); + public static final ParseField SEARCH_AFTER = new ParseField("search_after"); private static final SearchSourceBuilder PROTOTYPE = new SearchSourceBuilder(); @@ -135,6 +137,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ private boolean trackScores = false; + private SearchAfterBuilder searchAfterBuilder; + private Float minScore; private long timeoutInMillis = -1; @@ -381,6 +385,28 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ return trackScores; } + + /** + * The sort values that indicates which docs this request should "search after". + * The sort values of the search_after must be equal to the number of sort fields in the query and they should be + * of the same type (or parsable as such). + * Defaults to null. + */ + public Object[] searchAfter() { + if (searchAfterBuilder == null) { + return null; + } + return searchAfterBuilder.getSortValues(); + } + + /** + * Set the sort values that indicates which docs this request should "search after". + */ + public SearchSourceBuilder searchAfter(Object[] values) { + this.searchAfterBuilder = new SearchAfterBuilder().setSortValues(values); + return this; + } + /** * Add an aggregation to perform as part of the search. */ @@ -890,6 +916,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ builder.stats = stats; } else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { builder.fetchSourceContext = FetchSourceContext.parse(parser, context); + } else if (context.parseFieldMatcher().match(currentFieldName, SEARCH_AFTER)) { + builder.searchAfterBuilder = SearchAfterBuilder.PROTOTYPE.fromXContent(parser, context.parseFieldMatcher()); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); @@ -996,6 +1024,10 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ builder.field(TRACK_SCORES_FIELD.getPreferredName(), true); } + if (searchAfterBuilder != null) { + builder.field(SEARCH_AFTER.getPreferredName(), searchAfterBuilder.getSortValues()); + } + if (indexBoost != null) { builder.startObject(INDICES_BOOST_FIELD.getPreferredName()); assert !indexBoost.containsKey(null); @@ -1234,6 +1266,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } else { builder.profile = false; } + if (in.readBoolean()) { + builder.searchAfterBuilder = SearchAfterBuilder.PROTOTYPE.readFrom(in); + } return builder; } @@ -1350,13 +1385,18 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ if (out.getVersion().onOrAfter(Version.V_2_2_0)) { out.writeBoolean(profile); } + boolean hasSearchAfter = searchAfterBuilder != null; + out.writeBoolean(hasSearchAfter); + if (hasSearchAfter) { + searchAfterBuilder.writeTo(out); + } } @Override public int hashCode() { return Objects.hash(aggregations, explain, fetchSourceContext, fieldDataFields, fieldNames, from, highlightBuilder, indexBoost, innerHitsBuilder, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, - size, sorts, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version, profile); + size, sorts, searchAfterBuilder, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version, profile); } @Override @@ -1384,6 +1424,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ && Objects.equals(scriptFields, other.scriptFields) && Objects.equals(size, other.size) && Objects.equals(sorts, other.sorts) + && Objects.equals(searchAfterBuilder, other.searchAfterBuilder) && Objects.equals(stats, other.stats) && Objects.equals(suggestBuilder, other.suggestBuilder) && Objects.equals(terminateAfter, other.terminateAfter) diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index e5113bbc542..7486a451a7a 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -27,7 +27,9 @@ import org.apache.lucene.search.Collector; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; +import org.apache.lucene.search.FieldDoc; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cache.recycler.PageCacheRecycler; @@ -115,6 +117,7 @@ public class DefaultSearchContext extends SearchContext { private Sort sort; private Float minimumScore; private boolean trackScores = false; // when sorting, track scores as well... + private FieldDoc searchAfter; /** * The original query as sent by the user without the types and aliases * applied. Putting things in here leaks them into highlighting so don't add @@ -549,6 +552,17 @@ public class DefaultSearchContext extends SearchContext { return this.trackScores; } + @Override + public SearchContext searchAfter(FieldDoc searchAfter) { + this.searchAfter = searchAfter; + return this; + } + + @Override + public FieldDoc searchAfter() { + return searchAfter; + } + @Override public SearchContext parsedPostFilter(ParsedQuery postFilter) { this.postFilter = postFilter; diff --git a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index eaa14933b33..801b46f93b6 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.internal; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.util.Counter; @@ -336,6 +337,16 @@ public abstract class FilteredSearchContext extends SearchContext { return in.trackScores(); } + @Override + public SearchContext searchAfter(FieldDoc searchAfter) { + return in.searchAfter(searchAfter); + } + + @Override + public FieldDoc searchAfter() { + return in.searchAfter(); + } + @Override public SearchContext parsedPostFilter(ParsedQuery postFilter) { return in.parsedPostFilter(postFilter); diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 76164b5c0f8..374826a9879 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.internal; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.util.Counter; @@ -240,6 +241,10 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple public abstract boolean trackScores(); + public abstract SearchContext searchAfter(FieldDoc searchAfter); + + public abstract FieldDoc searchAfter(); + public abstract SearchContext parsedPostFilter(ParsedQuery postFilter); public abstract ParsedQuery parsedPostFilter(); diff --git a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 5352fb02895..5a98744505a 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -192,10 +192,10 @@ public class QueryPhase implements SearchPhase { final ScrollContext scrollContext = searchContext.scrollContext(); assert (scrollContext != null) == (searchContext.request().scroll() != null); final TopDocsCollector topDocsCollector; - ScoreDoc lastEmittedDoc; + ScoreDoc after = null; if (searchContext.request().scroll() != null) { numDocs = Math.min(searchContext.size(), totalNumDocs); - lastEmittedDoc = scrollContext.lastEmittedDoc; + after = scrollContext.lastEmittedDoc; if (returnsDocsInOrder(query, searchContext.sort())) { if (scrollContext.totalHits == -1) { @@ -209,7 +209,7 @@ public class QueryPhase implements SearchPhase { if (scrollContext.lastEmittedDoc != null) { BooleanQuery bq = new BooleanQuery.Builder() .add(query, BooleanClause.Occur.MUST) - .add(new MinDocQuery(lastEmittedDoc.doc + 1), BooleanClause.Occur.FILTER) + .add(new MinDocQuery(after.doc + 1), BooleanClause.Occur.FILTER) .build(); query = bq; } @@ -217,7 +217,7 @@ public class QueryPhase implements SearchPhase { } } } else { - lastEmittedDoc = null; + after = searchContext.searchAfter(); } if (totalNumDocs == 0) { // top collectors don't like a size of 0 @@ -226,13 +226,13 @@ public class QueryPhase implements SearchPhase { assert numDocs > 0; if (searchContext.sort() != null) { topDocsCollector = TopFieldCollector.create(searchContext.sort(), numDocs, - (FieldDoc) lastEmittedDoc, true, searchContext.trackScores(), searchContext.trackScores()); + (FieldDoc) after, true, searchContext.trackScores(), searchContext.trackScores()); } else { rescore = !searchContext.rescore().isEmpty(); for (RescoreSearchContext rescoreContext : searchContext.rescore()) { numDocs = Math.max(rescoreContext.window(), numDocs); } - topDocsCollector = TopScoreDocCollector.create(numDocs, lastEmittedDoc); + topDocsCollector = TopScoreDocCollector.create(numDocs, after); } collector = topDocsCollector; if (doProfile) { diff --git a/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java b/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java new file mode 100644 index 00000000000..7cfcee4de59 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java @@ -0,0 +1,303 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.searchafter; + +import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.FromXContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.fielddata.IndexFieldData; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +/** + * + */ +public class SearchAfterBuilder implements ToXContent, FromXContentBuilder, Writeable { + public static final SearchAfterBuilder PROTOTYPE = new SearchAfterBuilder(); + public static final ParseField SEARCH_AFTER = new ParseField("search_after"); + private static final Object[] EMPTY_SORT_VALUES = new Object[0]; + + private Object[] sortValues = EMPTY_SORT_VALUES; + + public SearchAfterBuilder setSortValues(Object[] values) { + if (values == null) { + throw new NullPointerException("Values cannot be null."); + } + if (values.length == 0) { + throw new IllegalArgumentException("Values must contains at least one value."); + } + sortValues = new Object[values.length]; + System.arraycopy(values, 0, sortValues, 0, values.length); + return this; + } + + public Object[] getSortValues() { + return sortValues; + } + + public static FieldDoc buildFieldDoc(Sort sort, Object[] values) { + if (sort == null || sort.getSort() == null || sort.getSort().length == 0) { + throw new IllegalArgumentException("Sort must contain at least one field."); + } + + SortField[] sortFields = sort.getSort(); + if (sortFields.length != values.length) { + throw new IllegalArgumentException(SEARCH_AFTER.getPreferredName() + " has " + values.length + " value(s) but sort has " + sort.getSort().length + "."); + } + Object[] fieldValues = new Object[sortFields.length]; + for (int i = 0; i < sortFields.length; i++) { + SortField sortField = sortFields[i]; + fieldValues[i] = convertValueFromSortField(values[i], sortField); + } + // We set the doc id to Integer.MAX_VALUE in order to make sure that the search starts "after" the first document that is equal to the field values. + return new FieldDoc(Integer.MAX_VALUE, 0, fieldValues); + } + + private static Object convertValueFromSortField(Object value, SortField sortField) { + if (sortField.getComparatorSource() instanceof IndexFieldData.XFieldComparatorSource) { + IndexFieldData.XFieldComparatorSource cmpSource = (IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource(); + return convertValueFromSortType(sortField.getField(), cmpSource.reducedType(), value); + } + return convertValueFromSortType(sortField.getField(), sortField.getType(), value); + } + + private static Object convertValueFromSortType(String fieldName, SortField.Type sortType, Object value) { + try { + switch (sortType) { + case DOC: + if (value instanceof Number) { + return ((Number) value).intValue(); + } + return Integer.parseInt(value.toString()); + + case SCORE: + if (value instanceof Number) { + return ((Number) value).floatValue(); + } + return Float.parseFloat(value.toString()); + + case INT: + if (value instanceof Number) { + return ((Number) value).intValue(); + } + return Integer.parseInt(value.toString()); + + case DOUBLE: + if (value instanceof Number) { + return ((Number) value).doubleValue(); + } + return Double.parseDouble(value.toString()); + + case LONG: + if (value instanceof Number) { + return ((Number) value).longValue(); + } + return Long.parseLong(value.toString()); + + case FLOAT: + if (value instanceof Number) { + return ((Number) value).floatValue(); + } + return Float.parseFloat(value.toString()); + + case STRING_VAL: + case STRING: + return new BytesRef(value.toString()); + + default: + throw new IllegalArgumentException("Comparator type [" + sortType.name() + "] for field [" + fieldName + "] is not supported."); + } + } catch(NumberFormatException e) { + throw new IllegalArgumentException("Failed to parse " + SEARCH_AFTER.getPreferredName() + " value for field [" + fieldName + "].", e); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + innerToXContent(builder); + builder.endObject(); + return builder; + } + + void innerToXContent(XContentBuilder builder) throws IOException { + builder.field(SEARCH_AFTER.getPreferredName(), sortValues); + } + + @Override + public SearchAfterBuilder fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException { + SearchAfterBuilder builder = new SearchAfterBuilder(); + XContentParser.Token token = parser.currentToken(); + List values = new ArrayList<> (); + if (token == XContentParser.Token.START_ARRAY) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token == XContentParser.Token.VALUE_NUMBER) { + switch (parser.numberType()) { + case INT: + values.add(parser.intValue()); + break; + + case LONG: + values.add(parser.longValue()); + break; + + case DOUBLE: + values.add(parser.doubleValue()); + break; + + case FLOAT: + values.add(parser.floatValue()); + break; + + default: + throw new AssertionError("Unknown number type []" + parser.numberType()); + } + } else if (token == XContentParser.Token.VALUE_STRING) { + values.add(parser.text()); + } else if (token == XContentParser.Token.VALUE_BOOLEAN) { + values.add(parser.booleanValue()); + } else { + throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] or [" + XContentParser.Token.VALUE_NUMBER + "] or [" + XContentParser.Token.VALUE_BOOLEAN + "] but found [" + token + "] inside search_after.", parser.getTokenLocation()); + } + } + } else { + throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_ARRAY + "] in [" + SEARCH_AFTER.getPreferredName() + "] but found [" + token + "] inside search_after", parser.getTokenLocation()); + } + builder.setSortValues(values.toArray()); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(sortValues.length); + for (Object fieldValue : sortValues) { + if (fieldValue == null) { + throw new IOException("Can't handle " + SEARCH_AFTER.getPreferredName() + " field value of type [null]"); + } + Class type = fieldValue.getClass(); + if (type == String.class) { + out.writeByte((byte) 1); + out.writeString((String) fieldValue); + } else if (type == Integer.class) { + out.writeByte((byte) 2); + out.writeInt((Integer) fieldValue); + } else if (type == Long.class) { + out.writeByte((byte) 3); + out.writeLong((Long) fieldValue); + } else if (type == Float.class) { + out.writeByte((byte) 4); + out.writeFloat((Float) fieldValue); + } else if (type == Double.class) { + out.writeByte((byte) 5); + out.writeDouble((Double) fieldValue); + } else if (type == Byte.class) { + out.writeByte((byte) 6); + out.writeByte((Byte) fieldValue); + } else if (type == Short.class) { + out.writeByte((byte) 7); + out.writeShort((Short) fieldValue); + } else if (type == Boolean.class) { + out.writeByte((byte) 8); + out.writeBoolean((Boolean) fieldValue); + } else if (fieldValue instanceof Text) { + out.writeByte((byte) 9); + out.writeText((Text) fieldValue); + } else { + throw new IOException("Can't handle " + SEARCH_AFTER.getPreferredName() + " field value of type [" + type + "]"); + } + } + } + + @Override + public SearchAfterBuilder readFrom(StreamInput in) throws IOException { + SearchAfterBuilder builder = new SearchAfterBuilder(); + int size = in.readVInt(); + Object[] values = new Object[size]; + for (int i = 0; i < size; i++) { + byte type = in.readByte(); + if (type == 1) { + values[i] = in.readString(); + } else if (type == 2) { + values[i] = in.readInt(); + } else if (type == 3) { + values[i] = in.readLong(); + } else if (type == 4) { + values[i] = in.readFloat(); + } else if (type == 5) { + values[i] = in.readDouble(); + } else if (type == 6) { + values[i] = in.readByte(); + } else if (type == 7) { + values[i] = in.readShort(); + } else if (type == 8) { + values[i] = in.readBoolean(); + } else if (type == 9) { + values[i] = in.readText(); + } else { + throw new IOException("Can't match type [" + type + "]"); + } + } + builder.setSortValues(values); + return builder; + } + + @Override + public boolean equals(Object other) { + if (! (other instanceof SearchAfterBuilder)) { + return false; + } + return Arrays.equals(sortValues, ((SearchAfterBuilder) other).sortValues); + } + + @Override + public int hashCode() { + return Objects.hash(this.sortValues); + } + + @Override + public String toString() { + try { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.prettyPrint(); + toXContent(builder, EMPTY_PARAMS); + return builder.string(); + } catch (Exception e) { + throw new ElasticsearchException("Failed to build xcontent.", e); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java index 3d3388b87b4..3cd1d269275 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java @@ -39,7 +39,6 @@ import java.io.IOException; import static org.elasticsearch.client.Requests.clusterHealthRequest; import static org.elasticsearch.client.Requests.refreshRequest; import static org.elasticsearch.client.Requests.searchRequest; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.anyOf; diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 46fbae95367..bb969b90de6 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -54,6 +55,7 @@ import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder.InnerHit; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilderTests; import org.elasticsearch.search.rescore.QueryRescoreBuilderTests; +import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilder; @@ -262,6 +264,56 @@ public class SearchSourceBuilderTests extends ESTestCase { } } } + + if (randomBoolean()) { + int numSearchFrom = randomIntBetween(1, 5); + // We build a json version of the search_from first in order to + // ensure that every number type remain the same before/after xcontent (de)serialization. + // This is not a problem because the final type of each field value is extracted from associated sort field. + // This little trick ensure that equals and hashcode are the same when using the xcontent serialization. + XContentBuilder jsonBuilder = XContentFactory.jsonBuilder(); + jsonBuilder.startObject(); + jsonBuilder.startArray("search_from"); + for (int i = 0; i < numSearchFrom; i++) { + int branch = randomInt(8); + switch (branch) { + case 0: + jsonBuilder.value(randomInt()); + break; + case 1: + jsonBuilder.value(randomFloat()); + break; + case 2: + jsonBuilder.value(randomLong()); + break; + case 3: + jsonBuilder.value(randomDouble()); + break; + case 4: + jsonBuilder.value(randomAsciiOfLengthBetween(5, 20)); + break; + case 5: + jsonBuilder.value(randomBoolean()); + break; + case 6: + jsonBuilder.value(randomByte()); + break; + case 7: + jsonBuilder.value(randomShort()); + break; + case 8: + jsonBuilder.value(new Text(randomAsciiOfLengthBetween(5, 20))); + break; + } + } + jsonBuilder.endArray(); + jsonBuilder.endObject(); + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(jsonBuilder.bytes()); + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + builder.searchAfter(SearchAfterBuilder.PROTOTYPE.fromXContent(parser, null).getSortValues()); + } if (randomBoolean()) { builder.highlighter(HighlightBuilderTests.randomHighlighterBuilder()); } diff --git a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java new file mode 100644 index 00000000000..b85c0ff5c36 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -0,0 +1,257 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.searchafter; + +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.query.MatchAllQueryParser; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; + +public class SearchAfterBuilderTests extends ESTestCase { + private static final int NUMBER_OF_TESTBUILDERS = 20; + private static NamedWriteableRegistry namedWriteableRegistry; + private static IndicesQueriesRegistry indicesQueriesRegistry; + + /** + * setup for the whole base test class + */ + @BeforeClass + public static void init() { + namedWriteableRegistry = new NamedWriteableRegistry(); + indicesQueriesRegistry = new IndicesQueriesRegistry(Settings.settingsBuilder().build(), + Collections.singletonMap("match_all", new MatchAllQueryParser())); + } + + @AfterClass + public static void afterClass() throws Exception { + namedWriteableRegistry = null; + indicesQueriesRegistry = null; + } + + private final SearchAfterBuilder randomSearchFromBuilder() throws IOException { + int numSearchFrom = randomIntBetween(1, 10); + SearchAfterBuilder searchAfterBuilder = new SearchAfterBuilder(); + Object[] values = new Object[numSearchFrom]; + for (int i = 0; i < numSearchFrom; i++) { + int branch = randomInt(8); + switch (branch) { + case 0: + values[i] = randomInt(); + break; + case 1: + values[i] = randomFloat(); + break; + case 2: + values[i] = randomLong(); + break; + case 3: + values[i] = randomDouble(); + break; + case 4: + values[i] = randomAsciiOfLengthBetween(5, 20); + break; + case 5: + values[i] = randomBoolean(); + break; + case 6: + values[i] = randomByte(); + break; + case 7: + values[i] = randomShort(); + break; + case 8: + values[i] = new Text(randomAsciiOfLengthBetween(5, 20)); + break; + } + } + searchAfterBuilder.setSortValues(values); + return searchAfterBuilder; + } + + // We build a json version of the search_after first in order to + // ensure that every number type remain the same before/after xcontent (de)serialization. + // This is not a problem because the final type of each field value is extracted from associated sort field. + // This little trick ensure that equals and hashcode are the same when using the xcontent serialization. + private final SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException { + int numSearchAfter = randomIntBetween(1, 10); + XContentBuilder jsonBuilder = XContentFactory.jsonBuilder(); + jsonBuilder.startObject(); + jsonBuilder.startArray("search_after"); + for (int i = 0; i < numSearchAfter; i++) { + int branch = randomInt(8); + switch (branch) { + case 0: + jsonBuilder.value(randomInt()); + break; + case 1: + jsonBuilder.value(randomFloat()); + break; + case 2: + jsonBuilder.value(randomLong()); + break; + case 3: + jsonBuilder.value(randomDouble()); + break; + case 4: + jsonBuilder.value(randomAsciiOfLengthBetween(5, 20)); + break; + case 5: + jsonBuilder.value(randomBoolean()); + break; + case 6: + jsonBuilder.value(randomByte()); + break; + case 7: + jsonBuilder.value(randomShort()); + break; + case 8: + jsonBuilder.value(new Text(randomAsciiOfLengthBetween(5, 20))); + break; + } + } + jsonBuilder.endArray(); + jsonBuilder.endObject(); + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(jsonBuilder.bytes()); + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + return SearchAfterBuilder.PROTOTYPE.fromXContent(parser, null); + } + + private static SearchAfterBuilder serializedCopy(SearchAfterBuilder original) throws IOException { + try (BytesStreamOutput output = new BytesStreamOutput()) { + original.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + return SearchAfterBuilder.PROTOTYPE.readFrom(in); + } + } + } + + public void testSerialization() throws Exception { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + SearchAfterBuilder original = randomSearchFromBuilder(); + SearchAfterBuilder deserialized = serializedCopy(original); + assertEquals(deserialized, original); + assertEquals(deserialized.hashCode(), original.hashCode()); + assertNotSame(deserialized, original); + } + } + + public void testEqualsAndHashcode() throws Exception { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + SearchAfterBuilder firstBuilder = randomSearchFromBuilder(); + assertFalse("searchFrom is equal to null", firstBuilder.equals(null)); + assertFalse("searchFrom is equal to incompatible type", firstBuilder.equals("")); + assertTrue("searchFrom is not equal to self", firstBuilder.equals(firstBuilder)); + assertThat("same searchFrom's hashcode returns different values if called multiple times", firstBuilder.hashCode(), + equalTo(firstBuilder.hashCode())); + + SearchAfterBuilder secondBuilder = serializedCopy(firstBuilder); + assertTrue("searchFrom is not equal to self", secondBuilder.equals(secondBuilder)); + assertTrue("searchFrom is not equal to its copy", firstBuilder.equals(secondBuilder)); + assertTrue("equals is not symmetric", secondBuilder.equals(firstBuilder)); + assertThat("searchFrom copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(firstBuilder.hashCode())); + + SearchAfterBuilder thirdBuilder = serializedCopy(secondBuilder); + assertTrue("searchFrom is not equal to self", thirdBuilder.equals(thirdBuilder)); + assertTrue("searchFrom is not equal to its copy", secondBuilder.equals(thirdBuilder)); + assertThat("searchFrom copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(thirdBuilder.hashCode())); + assertTrue("equals is not transitive", firstBuilder.equals(thirdBuilder)); + assertThat("searchFrom copy's hashcode is different from original hashcode", firstBuilder.hashCode(), equalTo(thirdBuilder.hashCode())); + assertTrue("searchFrom is not symmetric", thirdBuilder.equals(secondBuilder)); + assertTrue("searchFrom is not symmetric", thirdBuilder.equals(firstBuilder)); + } + } + + public void testFromXContent() throws Exception { + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); + context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); + for (int runs = 0; runs < 20; runs++) { + SearchAfterBuilder searchAfterBuilder = randomJsonSearchFromBuilder(); + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + if (randomBoolean()) { + builder.prettyPrint(); + } + builder.startObject(); + searchAfterBuilder.innerToXContent(builder); + builder.endObject(); + XContentParser parser = XContentHelper.createParser(builder.bytes()); + context.reset(parser); + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + SearchAfterBuilder secondSearchAfterBuilder = SearchAfterBuilder.PROTOTYPE.fromXContent(parser, null); + assertNotSame(searchAfterBuilder, secondSearchAfterBuilder); + assertEquals(searchAfterBuilder, secondSearchAfterBuilder); + assertEquals(searchAfterBuilder.hashCode(), secondSearchAfterBuilder.hashCode()); + } + } + + public void testWithNullValue() throws Exception { + SearchAfterBuilder builder = new SearchAfterBuilder(); + builder.setSortValues(new Object[] {1, "1", null}); + try { + serializedCopy(builder); + fail("Should fail on null values"); + } catch (IOException e) { + assertThat(e.getMessage(), Matchers.equalTo("Can't handle search_after field value of type [null]")); + } + } + + public void testWithNullArray() throws Exception { + SearchAfterBuilder builder = new SearchAfterBuilder(); + try { + builder.setSortValues(null); + fail("Should fail on null array."); + } catch (NullPointerException e) { + assertThat(e.getMessage(), Matchers.equalTo("Values cannot be null.")); + } + } + + public void testWithEmptyArray() throws Exception { + SearchAfterBuilder builder = new SearchAfterBuilder(); + try { + builder.setSortValues(new Object[0]); + fail("Should fail on empty array."); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), Matchers.equalTo("Values must contains at least one value.")); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java new file mode 100644 index 00000000000..fdbed8b755e --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -0,0 +1,314 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.searchafter; + +import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.SearchContextException; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.RemoteTransportException; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.util.List; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.Collections; +import java.util.Arrays; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.hamcrest.Matchers.equalTo; + +public class SearchAfterIT extends ESIntegTestCase { + private static final String INDEX_NAME = "test"; + private static final String TYPE_NAME = "type1"; + private static final int NUM_DOCS = 100; + + public void testsShouldFail() throws Exception { + client().admin().indices().prepareCreate("test").execute().actionGet(); + client().prepareIndex("test", "type1", "0").setSource("field1", 0, "field2", "toto").execute().actionGet(); + refresh(); + + try { + client().prepareSearch("test") + .addSort("field1", SortOrder.ASC) + .setQuery(matchAllQuery()) + .searchAfter(new Object[]{0}) + .setScroll("1m") + .execute().actionGet(); + + fail("Should fail on search_after cannot be used with scroll."); + } catch (SearchPhaseExecutionException e) { + assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class)); + assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(SearchContextException.class)); + assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("`search_after` cannot be used in a scroll context.")); + } + try { + client().prepareSearch("test") + .addSort("field1", SortOrder.ASC) + .setQuery(matchAllQuery()) + .searchAfter(new Object[]{0}) + .setFrom(10) + .execute().actionGet(); + + fail("Should fail on search_after cannot be used with from > 0."); + } catch (SearchPhaseExecutionException e) { + assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class)); + assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(SearchContextException.class)); + assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("`from` parameter must be set to 0 when `search_after` is used.")); + } + + try { + client().prepareSearch("test") + .setQuery(matchAllQuery()) + .searchAfter(new Object[]{0.75f}) + .execute().actionGet(); + + fail("Should fail on search_after on score only is disabled"); + } catch (SearchPhaseExecutionException e) { + assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class)); + assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(IllegalArgumentException.class)); + assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("Sort must contain at least one field.")); + } + + try { + client().prepareSearch("test") + .addSort("field2", SortOrder.DESC) + .addSort("field1", SortOrder.ASC) + .setQuery(matchAllQuery()) + .searchAfter(new Object[]{1}) + .get(); + fail("Should fail on search_after size differs from sort field size"); + } catch (SearchPhaseExecutionException e) { + assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class)); + assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(IllegalArgumentException.class)); + assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("search_after has 1 value(s) but sort has 2.")); + } + + try { + client().prepareSearch("test") + .setQuery(matchAllQuery()) + .addSort("field1", SortOrder.ASC) + .searchAfter(new Object[]{1, 2}) + .execute().actionGet(); + fail("Should fail on search_after size differs from sort field size"); + } catch (SearchPhaseExecutionException e) { + assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class)); + assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(IllegalArgumentException.class)); + assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("search_after has 2 value(s) but sort has 1.")); + } + + try { + client().prepareSearch("test") + .setQuery(matchAllQuery()) + .addSort("field1", SortOrder.ASC) + .searchAfter(new Object[]{"toto"}) + .execute().actionGet(); + + fail("Should fail on search_after on score only is disabled"); + } catch (SearchPhaseExecutionException e) { + assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class)); + assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(IllegalArgumentException.class)); + assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("Failed to parse search_after value for field [field1].")); + } + } + + public void testWithSimpleTypes() throws Exception { + int numFields = randomInt(20) + 1; + int[] types = new int[numFields-1]; + for (int i = 0; i < numFields-1; i++) { + types[i] = randomInt(6); + } + List documents = new ArrayList<> (); + for (int i = 0; i < NUM_DOCS; i++) { + List values = new ArrayList<>(); + for (int type : types) { + switch (type) { + case 0: + values.add(randomBoolean()); + break; + case 1: + values.add(randomByte()); + break; + case 2: + values.add(randomShort()); + break; + case 3: + values.add(randomInt()); + break; + case 4: + values.add(randomFloat()); + break; + case 5: + values.add(randomDouble()); + break; + case 6: + values.add(new Text(randomAsciiOfLengthBetween(5, 20))); + break; + } + } + values.add(new Text(Strings.randomBase64UUID())); + documents.add(values); + } + int reqSize = randomInt(NUM_DOCS-1); + if (reqSize == 0) { + reqSize = 1; + } + assertSearchFromWithSortValues(INDEX_NAME, TYPE_NAME, documents, reqSize); + } + + private static class ListComparator implements Comparator { + @Override + public int compare(List o1, List o2) { + if (o1.size() > o2.size()) { + return 1; + } + + if (o2.size() > o1.size()) { + return -1; + } + + for (int i = 0; i < o1.size(); i++) { + if (!(o1.get(i) instanceof Comparable)) { + throw new RuntimeException(o1.get(i).getClass() + " is not comparable"); + } + Object cmp1 = o1.get(i); + Object cmp2 = o2.get(i); + int cmp = ((Comparable)cmp1).compareTo(cmp2); + if (cmp != 0) { + return cmp; + } + } + return 0; + } + } + private ListComparator LST_COMPARATOR = new ListComparator(); + + private void assertSearchFromWithSortValues(String indexName, String typeName, List documents, int reqSize) throws Exception { + int numFields = documents.get(0).size(); + { + createIndexMappingsFromObjectType(indexName, typeName, documents.get(0)); + List requests = new ArrayList<>(); + for (int i = 0; i < documents.size(); i++) { + XContentBuilder builder = jsonBuilder(); + assertThat(documents.get(i).size(), Matchers.equalTo(numFields)); + builder.startObject(); + for (int j = 0; j < numFields; j++) { + builder.field("field" + Integer.toString(j), documents.get(i).get(j)); + } + builder.endObject(); + requests.add(client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i)).setSource(builder)); + } + indexRandom(true, requests); + } + + Collections.sort(documents, LST_COMPARATOR); + int offset = 0; + Object[] sortValues = null; + while (offset < documents.size()) { + SearchRequestBuilder req = client().prepareSearch(indexName); + for (int i = 0; i < documents.get(0).size(); i++) { + req.addSort("field" + Integer.toString(i), SortOrder.ASC); + } + req.setQuery(matchAllQuery()).setSize(reqSize); + if (sortValues != null) { + req.searchAfter(sortValues); + } + SearchResponse searchResponse = req.execute().actionGet(); + for (SearchHit hit : searchResponse.getHits()) { + List toCompare = convertSortValues(documents.get(offset++)); + assertThat(LST_COMPARATOR.compare(toCompare, Arrays.asList(hit.sortValues())), equalTo(0)); + } + sortValues = searchResponse.getHits().hits()[searchResponse.getHits().hits().length-1].getSortValues(); + } + } + + private void createIndexMappingsFromObjectType(String indexName, String typeName, List types) { + CreateIndexRequestBuilder indexRequestBuilder = client().admin().indices().prepareCreate(indexName); + List mappings = new ArrayList<> (); + int numFields = types.size(); + for (int i = 0; i < numFields; i++) { + Class type = types.get(i).getClass(); + if (type == Integer.class) { + mappings.add("field" + Integer.toString(i)); + mappings.add("type=integer"); + } else if (type == Long.class) { + mappings.add("field" + Integer.toString(i)); + mappings.add("type=long"); + } else if (type == Float.class) { + mappings.add("field" + Integer.toString(i)); + mappings.add("type=float"); + } else if (type == Double.class) { + mappings.add("field" + Integer.toString(i)); + mappings.add("type=double"); + } else if (type == Byte.class) { + mappings.add("field" + Integer.toString(i)); + mappings.add("type=byte"); + } else if (type == Short.class) { + mappings.add("field" + Integer.toString(i)); + mappings.add("type=short"); + } else if (type == Boolean.class) { + mappings.add("field" + Integer.toString(i)); + mappings.add("type=boolean"); + } else if (types.get(i) instanceof Text) { + mappings.add("field" + Integer.toString(i)); + mappings.add("type=string,index=not_analyzed"); + } else { + fail("Can't match type [" + type + "]"); + } + } + indexRequestBuilder.addMapping(typeName, mappings.toArray()).execute().actionGet(); + } + + // Convert Integer, Short, Byte and Boolean to Long in order to match the conversion done + // by the internal hits when populating the sort values. + private List convertSortValues(List sortValues) { + List converted = new ArrayList<> (); + for (int i = 0; i < sortValues.size(); i++) { + Object from = sortValues.get(i); + if (from instanceof Integer) { + converted.add(((Integer) from).longValue()); + } else if (from instanceof Short) { + converted.add(((Short) from).longValue()); + } else if (from instanceof Byte) { + converted.add(((Byte) from).longValue()); + } else if (from instanceof Boolean) { + boolean b = (boolean) from; + if (b) { + converted.add(1L); + } else { + converted.add(0L); + } + } else { + converted.add(from); + } + } + return converted; + } +} diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 56e9d4ddb91..f887fa3fb67 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -104,7 +104,7 @@ specific index module: The maximum value of `from + size` for searches to this index. Defaults to `10000`. Search requests take heap memory and time proportional to `from + size` and this limits that memory. See - {ref}/search-request-scroll.html[Scroll] for a more efficient alternative + <> or <> for a more efficient alternative to raising this. `index.blocks.read_only`:: diff --git a/docs/reference/search/request-body.asciidoc b/docs/reference/search/request-body.asciidoc index 325ae0d94a6..8207c6577fe 100644 --- a/docs/reference/search/request-body.asciidoc +++ b/docs/reference/search/request-body.asciidoc @@ -172,3 +172,5 @@ include::request/min-score.asciidoc[] include::request/named-queries-and-filters.asciidoc[] include::request/inner-hits.asciidoc[] + +include::request/search-after.asciidoc[] diff --git a/docs/reference/search/request/from-size.asciidoc b/docs/reference/search/request/from-size.asciidoc index 0804ff27d58..2e170dc2604 100644 --- a/docs/reference/search/request/from-size.asciidoc +++ b/docs/reference/search/request/from-size.asciidoc @@ -21,5 +21,5 @@ defaults to `10`. -------------------------------------------------- Note that `from` + `size` can not be more than the `index.max_result_window` -index setting which defaults to 10,000. See the <> +index setting which defaults to 10,000. See the <> or <> API for more efficient ways to do deep scrolling. diff --git a/docs/reference/search/request/search-after.asciidoc b/docs/reference/search/request/search-after.asciidoc new file mode 100644 index 00000000000..bbed3eb097d --- /dev/null +++ b/docs/reference/search/request/search-after.asciidoc @@ -0,0 +1,62 @@ +[[search-request-search-after]] +=== Search After + +Pagination of results can be done by using the `from` and `size` but the cost becomes prohibitive when the deep pagination is reached. +The `index.max_result_window` which defaults to 10,000 is a safeguard, search requests take heap memory and time proportional to `from + size`. +The <> api is recommended for efficient deep scrolling but scroll contexts are costly and it is not +recommended to use it for real time user requests. +The `search_after` parameter circumvents this problem by providing a live cursor. +The idea is to use the results from the previous page to help the retrieval of the next page. + +Suppose that the query to retrieve the first page looks like this: +[source,js] +-------------------------------------------------- +curl -XGET 'localhost:9200/twitter/tweet/_search' +{ + size: "10" + "query": { + "match" : { + "title" : "elasticsearch" + } + }, + "sort": [ + {"age": "asc"}, + {"_uid": "desc"} + ] +} +' +-------------------------------------------------- + +NOTE: A field with one unique value per document should be used as the tiebreaker of the sort specification. +Otherwise the sort order for documents that have the same sort values would be undefined. The recommended way is to use +the field `_uid` which is certain to contain one unique value for each document. + +The result from the above request includes an array of `sort values` for each document. +These `sort values` can be used in conjunction with the `search_after` parameter to start returning results "after" any +document in the result list. +For instance we can use the `sort values` of the last document and pass it to `search_after` to retrieve the next page of results: + +[source,js] +-------------------------------------------------- +curl -XGET 'localhost:9200/twitter/tweet/_search' +{ + "size": 10 + "query": { + "match" : { + "title" : "elasticsearch" + } + }, + "search_after": [18, "tweet#654323"], + "sort": [ + {"age": "asc"}, + {"_uid": "desc"} + ] +} +' +-------------------------------------------------- + +NOTE: The parameter `from` must be set to 0 (or -1) when `search_after` is used. + +`search_after` is not a solution to jump freely to a random page but rather to scroll many queries in parallel. +It is very similar to the `scroll` API but unlike it, the `search_after` parameter is stateless, it is always resolved against the latest + version of the searcher. For this reason the sort order may change during a walk depending on the updates and deletes of your index. diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json index d2b9b8cf9b4..d1c19f3ef21 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json @@ -154,6 +154,10 @@ "request_cache": { "type" : "boolean", "description" : "Specify if request cache should be used for this request or not, defaults to index level setting" + }, + "search_after": { + "type" : "list", + "description" : "An array of sort values that indicates where the sort of the top hits should start" } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yaml new file mode 100644 index 00000000000..8135d25399c --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yaml @@ -0,0 +1,102 @@ +setup: + - do: + indices.create: + index: test + - do: + index: + index: test + type: test + id: 1 + body: { foo: bar, age: 18 } + + - do: + index: + index: test + type: test + id: 42 + body: { foo: bar, age: 18 } + + - do: + index: + index: test + type: test + id: 172 + body: { foo: bar, age: 24 } + + - do: + indices.refresh: + index: test + +--- +"search with search_after parameter": + + - do: + search: + index: test + type: test + body: + size: 1 + query: + match: + foo: bar + sort: [{ age: desc }, { _uid: desc }] + + - match: {hits.total: 3 } + - length: {hits.hits: 1 } + - match: {hits.hits.0._index: test } + - match: {hits.hits.0._type: test } + - match: {hits.hits.0._id: "172" } + - match: {hits.hits.0.sort: [24, "test#172"] } + + - do: + search: + index: test + type: test + body: + size: 1 + query: + match: + foo: bar + sort: [{ age: desc }, { _uid: desc }] + search_after: [24, "test#172"] + + - match: {hits.total: 3 } + - length: {hits.hits: 1 } + - match: {hits.hits.0._index: test } + - match: {hits.hits.0._type: test } + - match: {hits.hits.0._id: "42" } + - match: {hits.hits.0.sort: [18, "test#42"] } + + - do: + search: + index: test + type: test + body: + size: 1 + query: + match: + foo: bar + sort: [ { age: desc }, { _uid: desc } ] + search_after: [18, "test#42"] + + - match: {hits.total: 3} + - length: {hits.hits: 1 } + - match: {hits.hits.0._index: test } + - match: {hits.hits.0._type: test } + - match: {hits.hits.0._id: "1" } + - match: {hits.hits.0.sort: [18, "test#1"] } + + - do: + search: + index: test + type: test + body: + size: 1 + query: + match: + foo: bar + sort: [{ age: desc }, { _uid: desc } ] + search_after: [18, "test#1"] + + - match: {hits.total: 3} + - length: {hits.hits: 0 } diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 796872bd350..e244c861ffa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -20,6 +20,7 @@ package org.elasticsearch.test; import com.carrotsearch.hppc.ObjectObjectAssociativeContainer; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.util.Counter; @@ -391,6 +392,16 @@ public class TestSearchContext extends SearchContext { return false; } + @Override + public SearchContext searchAfter(FieldDoc searchAfter) { + return null; + } + + @Override + public FieldDoc searchAfter() { + return null; + } + @Override public SearchContext parsedPostFilter(ParsedQuery postFilter) { this.postFilter = postFilter; From 63bc108e7e77eadeb64fca933e3936cc7c922df6 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 27 Jan 2016 10:29:49 +0100 Subject: [PATCH 346/347] docs: s/processor_id/tag --- docs/reference/ingest/ingest.asciidoc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/reference/ingest/ingest.asciidoc b/docs/reference/ingest/ingest.asciidoc index ee724d4ad07..e1ce35eb23c 100644 --- a/docs/reference/ingest/ingest.asciidoc +++ b/docs/reference/ingest/ingest.asciidoc @@ -1014,7 +1014,7 @@ response: { "processor_results": [ { - "processor_id": "processor[set]-0", + "tag": "processor[set]-0", "doc": { "_id": "id", "_ttl": null, @@ -1033,7 +1033,7 @@ response: } }, { - "processor_id": "processor[set]-1", + "tag": "processor[set]-1", "doc": { "_id": "id", "_ttl": null, @@ -1057,7 +1057,7 @@ response: { "processor_results": [ { - "processor_id": "processor[set]-0", + "tag": "processor[set]-0", "doc": { "_id": "id", "_ttl": null, @@ -1076,7 +1076,7 @@ response: } }, { - "processor_id": "processor[set]-1", + "tag": "processor[set]-1", "doc": { "_id": "id", "_ttl": null, From d3db3c690471aad0bf218ca1ef8f27acd706f22c Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 27 Jan 2016 11:36:20 +0100 Subject: [PATCH 347/347] Ignore non-grouped tribe settings --- .../java/org/elasticsearch/common/settings/SettingsModule.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 24fe7be56c6..f95cc1f6f6f 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -87,7 +87,7 @@ public class SettingsModule extends AbstractModule { } public void validateTribeSettings(Settings settings, ClusterSettings clusterSettings) { - Map groups = settings.getGroups("tribe."); + Map groups = settings.getGroups("tribe.", true); for (Map.Entry tribeSettings : groups.entrySet()) { for (Map.Entry entry : tribeSettings.getValue().getAsMap().entrySet()) { validateClusterSetting(clusterSettings, entry.getKey(), tribeSettings.getValue());