From 79375d35bbfdfcd380049cbc59d259fc31102d00 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 9 Aug 2018 14:32:36 +0200 Subject: [PATCH 01/11] Scripting: Replace Update Context (#32096) * SCRIPTING: Move Update Scripts to their own context * Added system property for backwards compatibility of change to `ctx.params` --- .../elasticsearch/gradle/BuildPlugin.groovy | 5 +- docs/build.gradle | 1 + modules/lang-painless/build.gradle | 1 + .../rest-api-spec/test/painless/15_update.yml | 2 +- .../AbstractAsyncBulkByScrollAction.java | 9 ++-- ...AsyncBulkByScrollActionScriptTestCase.java | 16 ++++-- .../action/update/UpdateHelper.java | 34 ++++++++---- .../script/ExecutableScript.java | 3 -- .../elasticsearch/script/ScriptModule.java | 2 +- .../elasticsearch/script/ScriptService.java | 2 +- .../elasticsearch/script/UpdateScript.java | 52 +++++++++++++++++++ .../script/ScriptServiceTests.java | 4 +- .../org/elasticsearch/update/UpdateIT.java | 1 + .../script/MockScriptEngine.java | 12 +++++ 14 files changed, 116 insertions(+), 28 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/script/UpdateScript.java diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index f6c6d5d7fd7..05fd4784863 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -782,9 +782,12 @@ class BuildPlugin implements Plugin { } } - // TODO: remove this once joda time is removed from scriptin in 7.0 + // TODO: remove this once joda time is removed from scripting in 7.0 systemProperty 'es.scripting.use_java_time', 'true' + // TODO: remove this once ctx isn't added to update script params in 7.0 + systemProperty 'es.scripting.update.ctx_in_params', 'false' + // Set the system keystore/truststore password if we're running tests in a FIPS-140 JVM if (project.inFipsJvm) { systemProperty 'javax.net.ssl.trustStorePassword', 'password' diff --git a/docs/build.gradle b/docs/build.gradle index 4c0502a0e06..029147bba2f 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -40,6 +40,7 @@ integTestCluster { // TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults systemProperty 'es.scripting.use_java_time', 'false' + systemProperty 'es.scripting.update.ctx_in_params', 'false' } // remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index e3a7ccecae2..ed4b1d631e0 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -25,6 +25,7 @@ esplugin { integTestCluster { module project.project(':modules:mapper-extras') systemProperty 'es.scripting.use_java_time', 'true' + systemProperty 'es.scripting.update.ctx_in_params', 'false' } dependencies { diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/15_update.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/15_update.yml index 20047e7d482..f2e1cb616b9 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/15_update.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/15_update.yml @@ -132,7 +132,7 @@ body: script: lang: painless - source: "for (def key : params.keySet()) { ctx._source[key] = params[key]}" + source: "ctx._source.ctx = ctx" params: { bar: 'xxx' } - match: { error.root_cause.0.type: "remote_transport_exception" } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java index 5bd83b6c19a..731a27aa72c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -48,9 +48,9 @@ import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.UpdateScript; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.threadpool.ThreadPool; @@ -746,7 +746,7 @@ public abstract class AbstractAsyncBulkByScrollAction params; - private ExecutableScript executable; + private UpdateScript executable; private Map context; public ScriptApplier(WorkerBulkByScrollTaskState taskWorker, @@ -766,7 +766,7 @@ public abstract class AbstractAsyncBulkByScrollAction T applyScript(Consumer> scriptBody) { IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar")); ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0); - ExecutableScript executableScript = new SimpleExecutableScript(scriptBody); - ExecutableScript.Factory factory = params -> executableScript; - when(scriptService.compile(any(), eq(ExecutableScript.CONTEXT))).thenReturn(factory); - when(scriptService.compile(any(), eq(ExecutableScript.UPDATE_CONTEXT))).thenReturn(factory); + UpdateScript updateScript = new UpdateScript(Collections.emptyMap()) { + @Override + public void execute(Map ctx) { + scriptBody.accept(ctx); + } + }; + UpdateScript.Factory factory = params -> updateScript; + ExecutableScript simpleExecutableScript = new SimpleExecutableScript(scriptBody); + when(scriptService.compile(any(), eq(ExecutableScript.CONTEXT))).thenReturn(params -> simpleExecutableScript); + when(scriptService.compile(any(), eq(UpdateScript.CONTEXT))).thenReturn(factory); AbstractAsyncBulkByScrollAction action = action(scriptService, request().setScript(mockScript(""))); RequestWrapper result = action.buildScriptApplier().apply(AbstractAsyncBulkByScrollAction.wrap(index), doc); return (result != null) ? (T) result.self() : null; diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 5212b1f3521..77485f81e58 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -19,6 +19,11 @@ package org.elasticsearch.action.update; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.function.LongSupplier; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteResponse; @@ -42,21 +47,22 @@ import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.UpdateScript; import org.elasticsearch.search.lookup.SourceLookup; -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.function.LongSupplier; +import static org.elasticsearch.common.Booleans.parseBoolean; /** * Helper for translating an update request to an index, delete request or update response. */ public class UpdateHelper extends AbstractComponent { + + /** Whether scripts should add the ctx variable to the params map. */ + private static final boolean CTX_IN_PARAMS = + parseBoolean(System.getProperty("es.scripting.update.ctx_in_params"), true); + private final ScriptService scriptService; public UpdateHelper(Settings settings, ScriptService scriptService) { @@ -279,10 +285,18 @@ public class UpdateHelper extends AbstractComponent { private Map executeScript(Script script, Map ctx) { try { if (scriptService != null) { - ExecutableScript.Factory factory = scriptService.compile(script, ExecutableScript.UPDATE_CONTEXT); - ExecutableScript executableScript = factory.newInstance(script.getParams()); - executableScript.setNextVar(ContextFields.CTX, ctx); - executableScript.run(); + UpdateScript.Factory factory = scriptService.compile(script, UpdateScript.CONTEXT); + final Map params; + if (CTX_IN_PARAMS) { + params = new HashMap<>(script.getParams()); + params.put(ContextFields.CTX, ctx); + deprecationLogger.deprecated("Using `ctx` via `params.ctx` is deprecated. " + + "Use -Des.scripting.update.ctx_in_params=false to enforce non-deprecated usage."); + } else { + params = script.getParams(); + } + UpdateScript executableScript = factory.newInstance(params); + executableScript.execute(ctx); } } catch (Exception e) { throw new IllegalArgumentException("failed to execute script", e); diff --git a/server/src/main/java/org/elasticsearch/script/ExecutableScript.java b/server/src/main/java/org/elasticsearch/script/ExecutableScript.java index 1bd4c31ebf3..d0d8020371b 100644 --- a/server/src/main/java/org/elasticsearch/script/ExecutableScript.java +++ b/server/src/main/java/org/elasticsearch/script/ExecutableScript.java @@ -46,7 +46,4 @@ public interface ExecutableScript { } ScriptContext CONTEXT = new ScriptContext<>("executable", Factory.class); - - // TODO: remove these once each has its own script interface - ScriptContext UPDATE_CONTEXT = new ScriptContext<>("update", Factory.class); } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index 3eeb26317f9..f04e690fa42 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -46,10 +46,10 @@ public class ScriptModule { SearchScript.SCRIPT_SORT_CONTEXT, SearchScript.TERMS_SET_QUERY_CONTEXT, ExecutableScript.CONTEXT, + UpdateScript.CONTEXT, BucketAggregationScript.CONTEXT, BucketAggregationSelectorScript.CONTEXT, SignificantTermsHeuristicScoreScript.CONTEXT, - ExecutableScript.UPDATE_CONTEXT, IngestScript.CONTEXT, FilterScript.CONTEXT, SimilarityScript.CONTEXT, diff --git a/server/src/main/java/org/elasticsearch/script/ScriptService.java b/server/src/main/java/org/elasticsearch/script/ScriptService.java index ca79e3b80fc..9768547b898 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptService.java @@ -285,7 +285,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust // TODO: fix this through some API or something, that's wrong // special exception to prevent expressions from compiling as update or mapping scripts boolean expression = "expression".equals(lang); - boolean notSupported = context.name.equals(ExecutableScript.UPDATE_CONTEXT.name); + boolean notSupported = context.name.equals(UpdateScript.CONTEXT.name); if (expression && notSupported) { throw new UnsupportedOperationException("scripts of type [" + script.getType() + "]," + " operation [" + context.name + "] and lang [" + lang + "] are not supported"); diff --git a/server/src/main/java/org/elasticsearch/script/UpdateScript.java b/server/src/main/java/org/elasticsearch/script/UpdateScript.java new file mode 100644 index 00000000000..c6a1d5dd9ea --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/UpdateScript.java @@ -0,0 +1,52 @@ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +import java.util.Map; + +/** + * An update script. + */ +public abstract class UpdateScript { + + public static final String[] PARAMETERS = { "ctx" }; + + /** The context used to compile {@link UpdateScript} factories. */ + public static final ScriptContext CONTEXT = new ScriptContext<>("update", Factory.class); + + /** The generic runtime parameters for the script. */ + private final Map params; + + public UpdateScript(Map params) { + this.params = params; + } + + /** Return the parameters for this script. */ + public Map getParams() { + return params; + } + + public abstract void execute(Map ctx); + + public interface Factory { + UpdateScript newInstance(Map params); + } +} diff --git a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 585f8601651..ea8b6a92234 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -167,7 +167,7 @@ public class ScriptServiceTests extends ESTestCase { assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.CONTEXT); assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.AGGS_CONTEXT); - assertCompileAccepted("painless", "script", ScriptType.INLINE, ExecutableScript.UPDATE_CONTEXT); + assertCompileAccepted("painless", "script", ScriptType.INLINE, UpdateScript.CONTEXT); assertCompileAccepted("painless", "script", ScriptType.INLINE, IngestScript.CONTEXT); } @@ -187,7 +187,7 @@ public class ScriptServiceTests extends ESTestCase { assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.CONTEXT); assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.AGGS_CONTEXT); - assertCompileRejected("painless", "script", ScriptType.INLINE, ExecutableScript.UPDATE_CONTEXT); + assertCompileRejected("painless", "script", ScriptType.INLINE, UpdateScript.CONTEXT); } public void testAllowNoScriptTypeSettings() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/update/UpdateIT.java b/server/src/test/java/org/elasticsearch/update/UpdateIT.java index e4ea078b8f7..85ebf01ef28 100644 --- a/server/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/server/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -93,6 +93,7 @@ public class UpdateIT extends ESIntegTestCase { } Map source = (Map) ctx.get("_source"); + params.remove("ctx"); source.putAll(params); return ctx; diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index 14dcac926f7..8083931e73d 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -96,6 +96,18 @@ public class MockScriptEngine implements ScriptEngine { } }; return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(UpdateScript.class)) { + UpdateScript.Factory factory = parameters -> new UpdateScript(parameters) { + @Override + public void execute(Map ctx) { + final Map vars = new HashMap<>(); + vars.put("ctx", ctx); + vars.put("params", parameters); + vars.putAll(parameters); + script.apply(vars); + } + }; + return context.factoryClazz.cast(factory); } else if (context.instanceClazz.equals(BucketAggregationScript.class)) { BucketAggregationScript.Factory factory = parameters -> new BucketAggregationScript(parameters) { @Override From e162127ff3cef81f23c56176b756217b5d63dac1 Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Wed, 8 Aug 2018 17:11:33 -0500 Subject: [PATCH 02/11] Upgrade to Lucene-7.5.0-snapshot-13b9e28f9d The main feature is the inclusion of bkd backed geo_shape with INTERSECT, DISJOINT, WITHIN bounding box and polygon query support. --- buildSrc/version.properties | 2 +- .../lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../lucene-expressions-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../lucene-analyzers-icu-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - ...lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + ...lucene-analyzers-kuromoji-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../lucene-analyzers-nori-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - ...lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + ...lucene-analyzers-phonetic-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../lucene-analyzers-smartcn-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../lucene-analyzers-stempel-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - ...cene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + ...cene-analyzers-morfologik-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../lucene-analyzers-common-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../lucene-analyzers-common-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../lucene-backward-codecs-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../lucene-backward-codecs-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - server/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + server/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../licenses/lucene-grouping-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../licenses/lucene-grouping-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../lucene-highlighter-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../lucene-highlighter-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - server/licenses/lucene-join-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + server/licenses/lucene-join-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../licenses/lucene-memory-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../licenses/lucene-memory-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - server/licenses/lucene-misc-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + server/licenses/lucene-misc-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../licenses/lucene-queries-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../licenses/lucene-queries-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../lucene-queryparser-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../lucene-queryparser-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../licenses/lucene-sandbox-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../licenses/lucene-sandbox-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../licenses/lucene-spatial-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../licenses/lucene-spatial-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../lucene-spatial-extras-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../lucene-spatial-extras-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../lucene-spatial3d-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../lucene-spatial3d-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../licenses/lucene-suggest-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../licenses/lucene-suggest-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - .../pipeline/bucketsort/BucketSortPipelineAggregator.java | 2 +- .../job/persistence/overallbuckets/OverallBucketsProvider.java | 2 +- .../licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 + .../licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 | 1 - 51 files changed, 27 insertions(+), 27 deletions(-) create mode 100644 modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-analyzers-common-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-analyzers-common-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-grouping-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-join-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-join-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-memory-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-memory-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-misc-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-misc-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-queries-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-queries-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-spatial-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 server/licenses/lucene-suggest-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-7.5.0-snapshot-608f0277b0.jar.sha1 create mode 100644 x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 delete mode 100644 x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index dc90fcced31..baa8590bdec 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 7.0.0-alpha1 -lucene = 7.5.0-snapshot-608f0277b0 +lucene = 7.5.0-snapshot-13b9e28f9d # optional dependencies spatial4j = 0.7 diff --git a/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..0ebdddcc5f1 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +fded6bb485b8b01bb2a9280162fd14d4d3ce4510 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-608f0277b0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 908f70131b3..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bd7d8078a2d0ad11a24f54156cc015630c96858a \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..1e79e1e70ef --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +a010e852be8d56efe1906e6da5292e4541239724 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 5b6947a9c75..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7a37816def72a748416c4ae8b0f6817e30efb99f \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..2d9669e4362 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +88e0ed90d433a9088528485cd4f59311735d92a4 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index d39638c1884..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ca7437178cdbf7b8bfe0d75c75e3c8eb93925724 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..f7b8fdd4bc1 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +0daec9ac3c4bba5f91b1bc413c651b7a98313982 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 21c25d2bb24..00000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3f5dec44f380d6d58bc1c8aec51964fcb5390b60 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..80cf627011b --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +f5af81eec04c1da0d6969cff18f360ff379b1bf7 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index f58c597eadd..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -453bf1d60df0415439095624e0b3e42492ad4716 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..14be684b96f --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +9e649088ee298293aa95a05391dff9cb0582648e \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 8ccec8dbf37..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -70095a45257bca9f46629b5fb6cedf9eff5e2b07 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..ea55c790537 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +47fb370054ba7413d050f13c177edf01180c31ca \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index ec9c33119f5..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7199d6962d268b7877f7b5160e98e4ff21cce5c7 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..2d6f580c35a --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +bc0708acbac195772b67b5ad2e9c4683d27ff450 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index ba9148ef1b3..00000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -12aff508d39d206a1aead5013ecd11882062eb06 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-analyzers-common-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..2cbf3968762 --- /dev/null +++ b/server/licenses/lucene-analyzers-common-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +c547b30525ad80d0ceeaa40c2d3a901c7e76fd46 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-analyzers-common-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 8b2a098a3a2..00000000000 --- a/server/licenses/lucene-analyzers-common-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d27958843ca118db2ffd2c242ae3761bd5a47328 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-backward-codecs-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..9e2473361f0 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +9c327295d54d5abd2684e00c3aefe58aa1caace7 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-backward-codecs-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index d8496a0a86a..00000000000 --- a/server/licenses/lucene-backward-codecs-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7ea220ba8e4accb8b04e280463042ad470e23bc0 \ No newline at end of file diff --git a/server/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..fdedaf3fc57 --- /dev/null +++ b/server/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +73dd7703a94ec2357581f65ee7c1c4d618ff310f \ No newline at end of file diff --git a/server/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index d38fb392c35..00000000000 --- a/server/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -471096d6e92338b208aa91f3a85feb2f9cfc4afd \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-grouping-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..4e555692b0f --- /dev/null +++ b/server/licenses/lucene-grouping-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +1c3802fa30990a1758f2df19d17fe2c95fc45870 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-grouping-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 7f83082fa0c..00000000000 --- a/server/licenses/lucene-grouping-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f0af947c60d24f779c22f774e81ebd7dd91cc932 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-highlighter-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..73b6c15f332 --- /dev/null +++ b/server/licenses/lucene-highlighter-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +8d7abdbb7900d7e6a76c391d8be07217c0d882ca \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-highlighter-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 6b9f2cb724d..00000000000 --- a/server/licenses/lucene-highlighter-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fbc83ac5a0139ed7e7faf6c95a2718f46f28c641 \ No newline at end of file diff --git a/server/licenses/lucene-join-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-join-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..23414b8e8e1 --- /dev/null +++ b/server/licenses/lucene-join-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +011f78ae9d9a386fcf20ceea29ba30e75fb512e8 \ No newline at end of file diff --git a/server/licenses/lucene-join-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-join-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index a085943140e..00000000000 --- a/server/licenses/lucene-join-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -30adfe493982b0db059dc243e269eea38d850d46 \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-memory-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..d227ebaf463 --- /dev/null +++ b/server/licenses/lucene-memory-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +c3dd461a7cebdcacc77304660218513e10f89adb \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-memory-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 7acc70be151..00000000000 --- a/server/licenses/lucene-memory-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -656f304261d9aad05070fb68593beffafe9147e3 \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-misc-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..a892f3a2272 --- /dev/null +++ b/server/licenses/lucene-misc-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +d63101181708d78eccc441b0d1193dd91d1a0bf1 \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-misc-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index e46b138ba7b..00000000000 --- a/server/licenses/lucene-misc-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8bf22ad81a7480c255b55bada401eb131bfdb4df \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-queries-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..5d0fead48cb --- /dev/null +++ b/server/licenses/lucene-queries-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +22e56fbd44d6a47d7dddbdda3c17ce22ad0a6680 \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-queries-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index a7114feef62..00000000000 --- a/server/licenses/lucene-queries-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -edb3de4d68a34c1e1ca08f79fe4d103b10e98ad1 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-queryparser-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..8be3d6447b0 --- /dev/null +++ b/server/licenses/lucene-queryparser-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +36b38a1d71045f5bee5dc40526f8d57084dbdc00 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-queryparser-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index cf3011c9a45..00000000000 --- a/server/licenses/lucene-queryparser-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7ece30d5f1e18d96f61644451c858c3d9960558f \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-sandbox-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..6d968f5400c --- /dev/null +++ b/server/licenses/lucene-sandbox-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +21eb8b111bcb94f4abb8c6402dfd10f51ecc0b38 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-sandbox-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 30513e58bf6..00000000000 --- a/server/licenses/lucene-sandbox-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ad3bd0c2ed96556193c7215bef328e689d0b157f \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-spatial-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..b6aec2eae1d --- /dev/null +++ b/server/licenses/lucene-spatial-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +d60081c5641ed21aea82d5d0976b40e1f184c8e5 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-spatial-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 6146b055c13..00000000000 --- a/server/licenses/lucene-spatial-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8a6bd97e39ee5af60126adbe8c8375dc41b1ea8e \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-spatial-extras-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..6999baccc89 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +2d42b373546aa8923d25e4e9a673dd186064f9bd \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-spatial-extras-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index c812f044927..00000000000 --- a/server/licenses/lucene-spatial-extras-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -07e748d2d80000a7a213f3405b82b6e26b452948 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-spatial3d-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..b866b198556 --- /dev/null +++ b/server/licenses/lucene-spatial3d-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +7f31607959e5a2ed84ab2d9a007a3f76e9a2d38c \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-spatial3d-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index b5ad83ac9fe..00000000000 --- a/server/licenses/lucene-spatial3d-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fd737bd5562f3943618ee7e73a0aaffb6319fdb2 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-suggest-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..55e1c5990de --- /dev/null +++ b/server/licenses/lucene-suggest-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +f7619348f0619867c52f4801531c70358f49873a \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-suggest-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 452b96420f8..00000000000 --- a/server/licenses/lucene-suggest-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ff3f260d1dc8c18bc67f3c33aa84a0ad290daac5 \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java index de5e2638c6c..e10d5c35800 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java @@ -180,7 +180,7 @@ public class BucketSortPipelineAggregator extends PipelineAggregator { private static class TopNPriorityQueue extends PriorityQueue { private TopNPriorityQueue(int n) { - super(n, false); + super(n); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java index 9bcabad9aea..d6ade87fa6e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java @@ -71,7 +71,7 @@ public class OverallBucketsProvider { static class TopNScores extends PriorityQueue { TopNScores(int n) { - super(n, false); + super(n); } @Override diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..fdedaf3fc57 --- /dev/null +++ b/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +73dd7703a94ec2357581f65ee7c1c4d618ff310f \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index d38fb392c35..00000000000 --- a/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -471096d6e92338b208aa91f3a85feb2f9cfc4afd \ No newline at end of file From 6750e157737a1f17260622f05660793647b9f017 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Thu, 9 Aug 2018 19:28:48 +0300 Subject: [PATCH 03/11] SQL: Ignore H2 comparative tests for uppercasing/lowercasing string functions (#32604) Skip the comparative tests using lowercasing/uppercasing against H2 (which considers the Locale). ES-SQL is, so far, ignoring the Locale. Still, the same queries are executed against ES-SQL alone and results asserted to be correct. --- .../string/StringFunctionProcessorTests.java | 26 +++++++++++++++++++ .../xpack/qa/sql/jdbc/SqlSpecTestCase.java | 11 +++++++- .../main/resources/case-functions.sql-spec | 13 ++++++++++ .../main/resources/string-functions.sql-spec | 18 ++----------- 4 files changed, 51 insertions(+), 17 deletions(-) create mode 100644 x-pack/qa/sql/src/main/resources/case-functions.sql-spec diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java index dcfb8d278ff..a4d9d4cb57a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; import java.io.IOException; +import java.util.Locale; public class StringFunctionProcessorTests extends AbstractWireSerializingTestCase { public static StringProcessor randomStringFunctionProcessor() { @@ -73,6 +74,19 @@ public class StringFunctionProcessorTests extends AbstractWireSerializingTestCas stringCharInputValidation(proc); } + + public void testLCaseWithTRLocale() { + Locale.setDefault(Locale.forLanguageTag("tr")); + StringProcessor proc = new StringProcessor(StringOperation.LCASE); + + // ES-SQL is not locale sensitive (so far). The obvious test for this is the Turkish language, uppercase letter I conversion + // in non-Turkish locale the lowercasing would create i and an additional dot, while in Turkish Locale it would only create "i" + // unicode 0069 = i + assertEquals("\u0069\u0307", proc.process("\u0130")); + // unicode 0049 = I (regular capital letter i) + // in Turkish locale this would be lowercased to a "i" without dot (unicode 0131) + assertEquals("\u0069", proc.process("\u0049")); + } public void testUCase() { StringProcessor proc = new StringProcessor(StringOperation.UCASE); @@ -81,9 +95,21 @@ public class StringFunctionProcessorTests extends AbstractWireSerializingTestCas assertEquals("SOMELOWERCASE", proc.process("SomeLoweRCasE")); assertEquals("FULLUPPERCASE", proc.process("FULLUPPERCASE")); assertEquals("A", proc.process('a')); + + // special uppercasing for small letter sharp "s" resulting "SS" + assertEquals("\u0053\u0053", proc.process("\u00df")); stringCharInputValidation(proc); } + + public void testUCaseWithTRLocale() { + Locale.setDefault(Locale.forLanguageTag("tr")); + StringProcessor proc = new StringProcessor(StringOperation.UCASE); + + // ES-SQL is not Locale sensitive (so far). + // in Turkish locale, small letter "i" is uppercased to "I" with a dot above (unicode 130), otherwise in "i" (unicode 49) + assertEquals("\u0049", proc.process("\u0069")); + } public void testLength() { StringProcessor proc = new StringProcessor(StringOperation.LENGTH); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java index b782e1474ea..38b04e4ad65 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java @@ -7,12 +7,14 @@ package org.elasticsearch.xpack.qa.sql.jdbc; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.junit.Assume; import org.junit.ClassRule; import java.sql.Connection; import java.sql.ResultSet; import java.util.ArrayList; import java.util.List; +import java.util.Locale; /** * Tests comparing sql queries executed against our jdbc client @@ -25,7 +27,7 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase { public static LocalH2 H2 = new LocalH2((c) -> c.createStatement().execute("RUNSCRIPT FROM 'classpath:/setup_test_emp.sql'")); @ParametersFactory(argumentFormatting = PARAM_FORMATTING) - public static List readScriptSpec() throws Exception { + public static List readScriptSpec() throws Exception { Parser parser = specParser(); List tests = new ArrayList<>(); tests.addAll(readScriptSpec("/select.sql-spec", parser)); @@ -35,6 +37,7 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase { tests.addAll(readScriptSpec("/agg.sql-spec", parser)); tests.addAll(readScriptSpec("/arithmetic.sql-spec", parser)); tests.addAll(readScriptSpec("/string-functions.sql-spec", parser)); + tests.addAll(readScriptSpec("/case-functions.sql-spec", parser)); return tests; } @@ -56,6 +59,12 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase { @Override protected final void doTest() throws Throwable { + boolean goodLocale = !(Locale.getDefault().equals(new Locale.Builder().setLanguageTag("tr").build()) + || Locale.getDefault().equals(new Locale.Builder().setLanguageTag("tr-TR").build())); + if (fileName.startsWith("case-functions")) { + Assume.assumeTrue(goodLocale); + } + try (Connection h2 = H2.get(); Connection es = esJdbc()) { diff --git a/x-pack/qa/sql/src/main/resources/case-functions.sql-spec b/x-pack/qa/sql/src/main/resources/case-functions.sql-spec new file mode 100644 index 00000000000..899d7cb0a6c --- /dev/null +++ b/x-pack/qa/sql/src/main/resources/case-functions.sql-spec @@ -0,0 +1,13 @@ +// Next 4 SELECTs in this file are related to https://github.com/elastic/elasticsearch/issues/32589 +// H2 is Locale sensitive, while ES-SQL is not (so far) +selectInsertWithLcaseAndLengthWithOrderBy +SELECT "first_name" origFN, "last_name" origLN, INSERT(UCASE("first_name"),LENGTH("first_name")+1,123,LCASE("last_name")) modified FROM "test_emp" WHERE ASCII("first_name")=65 ORDER BY "first_name" ASC, "last_name" ASC LIMIT 10; + +upperCasingTheSecondLetterFromTheRightFromFirstName +SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f FROM "test_emp" ORDER BY "first_name" LIMIT 10; + +upperCasingTheSecondLetterFromTheRightFromFirstNameWithOrderByAndGroupBy +SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10; + +upperCasingTheSecondLetterFromTheRightFromFirstNameWithWhere +SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" WHERE CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1))='AlejandRo' GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10; diff --git a/x-pack/qa/sql/src/main/resources/string-functions.sql-spec b/x-pack/qa/sql/src/main/resources/string-functions.sql-spec index bad6a5d0432..15bb6dea935 100644 --- a/x-pack/qa/sql/src/main/resources/string-functions.sql-spec +++ b/x-pack/qa/sql/src/main/resources/string-functions.sql-spec @@ -22,7 +22,8 @@ SELECT LCASE(first_name) lc, CHAR(ASCII(LCASE(first_name))) chr FROM "test_emp" ltrimFilter SELECT LTRIM(first_name) lt FROM "test_emp" WHERE LTRIM(first_name) = 'Bob'; -//Unsupported yet +// Unsupported yet +// Functions combined with 'LIKE' should perform the match inside a Painless script, whereas at the moment it's handled as a regular `match` query in ES. //ltrimFilterWithLike //SELECT LTRIM("first_name") lt FROM "test_emp" WHERE LTRIM("first_name") LIKE '%a%'; @@ -93,10 +94,6 @@ SELECT "first_name" orig, REPEAT("first_name",2) reps FROM "test_emp" WHERE ASCI selectInsertWithLcase SELECT "first_name" orig, INSERT("first_name",2,1000,LCASE("first_name")) modified FROM "test_emp" WHERE ASCII("first_name")=65 ORDER BY "first_name" ASC LIMIT 10; -// AWAITS FIX for https://github.com/elastic/elasticsearch/issues/32589 -// selectInsertWithLcaseAndLengthWithOrderBy -//SELECT "first_name" origFN, "last_name" origLN, INSERT(UCASE("first_name"),LENGTH("first_name")+1,123,LCASE("last_name")) modified FROM "test_emp" WHERE ASCII("first_name")=65 ORDER BY "first_name" ASC, "last_name" ASC LIMIT 10; - selectInsertWithUcaseWithGroupByAndOrderBy SELECT INSERT(UCASE("first_name"),2,123000,INSERT(UCASE("last_name"),2,500,' ')) modified, COUNT(*) count FROM "test_emp" WHERE ASCII("first_name")=65 GROUP BY INSERT(UCASE("first_name"),2,123000,INSERT(UCASE("last_name"),2,500,' ')) ORDER BY INSERT(UCASE("first_name"),2,123000,INSERT(UCASE("last_name"),2,500,' ')) ASC LIMIT 10; @@ -141,14 +138,3 @@ SELECT RIGHT("first_name",2) f FROM "test_emp" ORDER BY "first_name" LIMIT 10; selectRightWithGroupByAndOrderBy SELECT RIGHT("first_name",2) f, COUNT(*) count FROM "test_emp" GROUP BY RIGHT("first_name",2) ORDER BY RIGHT("first_name",2) LIMIT 10; - -// AWAITS FIX for https://github.com/elastic/elasticsearch/issues/32589 -// upperCasingTheSecondLetterFromTheRightFromFirstName -// SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f FROM "test_emp" ORDER BY "first_name" LIMIT 10; - -// AWAITS FIX for https://github.com/elastic/elasticsearch/issues/32589 -// upperCasingTheSecondLetterFromTheRightFromFirstNameWithOrderByAndGroupBy -// SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10; - -upperCasingTheSecondLetterFromTheRightFromFirstNameWithWhere -SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" WHERE CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1))='AlejandRo' GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10; From 7b618f3db4148e2205b3d88b35adc2c38d6e694e Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Thu, 9 Aug 2018 19:30:02 +0300 Subject: [PATCH 04/11] SQL: Bug fix for the optional "start" parameter usage inside LOCATE function (#32576) The incorrect NodeInfo is created when the optional parameter is not used, leading to the incorrect constructor being used. Simplified LocateFunctionProcessorDefinition by using one constructor instead of two. Fixes https://github.com/elastic/elasticsearch/issues/32554 --- .../function/scalar/string/Locate.java | 18 ++--- .../LocateFunctionProcessorDefinition.java | 13 +--- ...ocateFunctionProcessorDefinitionTests.java | 69 ++++++------------- 3 files changed, 26 insertions(+), 74 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java index d6e2f80b025..e33511cfbfd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java @@ -62,20 +62,10 @@ public class Locate extends ScalarFunction { @Override protected ProcessorDefinition makeProcessorDefinition() { - LocateFunctionProcessorDefinition processorDefinition; - if (start == null) { - processorDefinition = new LocateFunctionProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(pattern), - ProcessorDefinitions.toProcessorDefinition(source)); - } - else { - processorDefinition = new LocateFunctionProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(pattern), - ProcessorDefinitions.toProcessorDefinition(source), - ProcessorDefinitions.toProcessorDefinition(start)); - } - - return processorDefinition; + return new LocateFunctionProcessorDefinition(location(), this, + ProcessorDefinitions.toProcessorDefinition(pattern), + ProcessorDefinitions.toProcessorDefinition(source), + start == null ? null : ProcessorDefinitions.toProcessorDefinition(start)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinition.java index 84b0c4457b1..19020511852 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinition.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinition.java @@ -21,20 +21,12 @@ public class LocateFunctionProcessorDefinition extends ProcessorDefinition { public LocateFunctionProcessorDefinition(Location location, Expression expression, ProcessorDefinition pattern, ProcessorDefinition source, ProcessorDefinition start) { - super(location, expression, Arrays.asList(pattern, source, start)); + super(location, expression, start == null ? Arrays.asList(pattern, source) : Arrays.asList(pattern, source, start)); this.pattern = pattern; this.source = source; this.start = start; } - public LocateFunctionProcessorDefinition(Location location, Expression expression, ProcessorDefinition pattern, - ProcessorDefinition source) { - super(location, expression, Arrays.asList(pattern, source)); - this.pattern = pattern; - this.source = source; - this.start = null; - } - @Override public final ProcessorDefinition replaceChildren(List newChildren) { int childrenSize = newChildren.size(); @@ -68,9 +60,6 @@ public class LocateFunctionProcessorDefinition extends ProcessorDefinition { protected ProcessorDefinition replaceChildren(ProcessorDefinition newPattern, ProcessorDefinition newSource, ProcessorDefinition newStart) { - if (newStart == null) { - return new LocateFunctionProcessorDefinition(location(), expression(), newPattern, newSource); - } return new LocateFunctionProcessorDefinition(location(), expression(), newPattern, newSource, newStart); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinitionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinitionTests.java index 6d86e807be2..4815c9c9528 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinitionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinitionTests.java @@ -38,50 +38,34 @@ public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase return (LocateFunctionProcessorDefinition) (new Locate(randomLocation(), randomStringLiteral(), randomStringLiteral(), - frequently() ? randomIntLiteral() : null) + randomFrom(true, false) ? randomIntLiteral() : null) .makeProcessorDefinition()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32554") @Override public void testTransform() { // test transforming only the properties (location, expression), // skipping the children (the two parameters of the binary function) which are tested separately LocateFunctionProcessorDefinition b1 = randomInstance(); Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomLocateFunctionExpression()); - LocateFunctionProcessorDefinition newB; - if (b1.start() == null) { - newB = new LocateFunctionProcessorDefinition( - b1.location(), - newExpression, - b1.pattern(), - b1.source()); - } else { - newB = new LocateFunctionProcessorDefinition( - b1.location(), - newExpression, - b1.pattern(), - b1.source(), - b1.start()); - } + LocateFunctionProcessorDefinition newB = new LocateFunctionProcessorDefinition( + b1.location(), + newExpression, + b1.pattern(), + b1.source(), + b1.start()); + assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); LocateFunctionProcessorDefinition b2 = randomInstance(); Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); - if (b2.start() == null) { - newB = new LocateFunctionProcessorDefinition( - newLoc, - b2.expression(), - b2.pattern(), - b2.source()); - } else { - newB = new LocateFunctionProcessorDefinition( - newLoc, - b2.expression(), - b2.pattern(), - b2.source(), - b2.start()); - } + newB = new LocateFunctionProcessorDefinition( + newLoc, + b2.expression(), + b2.pattern(), + b2.source(), + b2.start()); + assertEquals(newB, b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); } @@ -93,15 +77,9 @@ public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase ProcessorDefinition newSource = toProcessorDefinition((Expression) randomValueOtherThan(b.source(), () -> randomStringLiteral())); ProcessorDefinition newStart; - LocateFunctionProcessorDefinition newB; - if (b.start() == null) { - newB = new LocateFunctionProcessorDefinition(b.location(), b.expression(), b.pattern(), b.source()); - newStart = null; - } - else { - newB = new LocateFunctionProcessorDefinition(b.location(), b.expression(), b.pattern(), b.source(), b.start()); - newStart = toProcessorDefinition((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral())); - } + LocateFunctionProcessorDefinition newB = new LocateFunctionProcessorDefinition( + b.location(), b.expression(), b.pattern(), b.source(), b.start()); + newStart = toProcessorDefinition((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral())); LocateFunctionProcessorDefinition transformed = null; // generate all the combinations of possible children modifications and test all of them @@ -132,7 +110,8 @@ public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase comb.get(0) ? toProcessorDefinition((Expression) randomValueOtherThan(f.pattern(), () -> randomStringLiteral())) : f.pattern(), comb.get(1) ? toProcessorDefinition((Expression) randomValueOtherThan(f.source(), - () -> randomStringLiteral())) : f.source())); + () -> randomStringLiteral())) : f.source(), + null)); } } } else { @@ -155,13 +134,7 @@ public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase @Override protected LocateFunctionProcessorDefinition copy(LocateFunctionProcessorDefinition instance) { - return instance.start() == null ? - new LocateFunctionProcessorDefinition(instance.location(), - instance.expression(), - instance.pattern(), - instance.source()) - : - new LocateFunctionProcessorDefinition(instance.location(), + return new LocateFunctionProcessorDefinition(instance.location(), instance.expression(), instance.pattern(), instance.source(), From 9a16491ebf58e0c0b7dad3dcf51690367cb5202e Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 9 Aug 2018 10:11:08 -0700 Subject: [PATCH 05/11] Build: Add back setting artifact id of pom for rest high level client (#32731) This commit adds back the publishing section that sets the artifact id of the generated pom file for the high level rest client. This was accidentally removed during a consolidationo of the shadow plugin logic. --- client/rest-high-level/build.gradle | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 65c5d094c71..6f5eab6e1db 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -30,6 +30,14 @@ apply plugin: 'com.github.johnrengelman.shadow' group = 'org.elasticsearch.client' archivesBaseName = 'elasticsearch-rest-high-level-client' +publishing { + publications { + nebula { + artifactId = archivesBaseName + } + } +} + //we need to copy the yaml spec so we can check naming (see RestHighlevelClientTests#testApiNamingConventions) Task copyRestSpec = RestIntegTestTask.createCopyRestSpecTask(project, Providers.FALSE) test.dependsOn(copyRestSpec) From abda9fdac1f8e300e73f1ba22d8e336831f35c4b Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 9 Aug 2018 14:35:30 -0400 Subject: [PATCH 06/11] Logging: Fix test on windows Windows' `\` instead of `/` strikes again! Closes #32546 --- .../org/elasticsearch/common/logging/EvilLoggerTests.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java index ff9e1cb4ded..f92257fc91b 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java @@ -28,6 +28,7 @@ import org.apache.logging.log4j.core.appender.ConsoleAppender; import org.apache.logging.log4j.core.appender.CountingNoOpAppender; import org.apache.logging.log4j.core.config.Configurator; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.util.Constants; import org.elasticsearch.cli.UserException; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.Randomness; @@ -360,7 +361,6 @@ public class EvilLoggerTests extends ESTestCase { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32546") public void testNoNodeNameWarning() throws IOException, UserException { setupLogging("no_node_name"); @@ -376,7 +376,11 @@ public class EvilLoggerTests extends ESTestCase { + "have %node_name. We will automatically add %node_name to the pattern to ease the migration for users " + "who customize log4j2.properties but will stop this behavior in 7.0. You should manually replace " + "`%node_name` with `\\[%node_name\\]%marker ` in these locations:"); - assertThat(events.get(1), endsWith("no_node_name/log4j2.properties")); + if (Constants.WINDOWS) { + assertThat(events.get(1), endsWith("no_node_name\\log4j2.properties")); + } else { + assertThat(events.get(1), endsWith("no_node_name/log4j2.properties")); + } } private void setupLogging(final String config) throws IOException, UserException { From c5e66cb3451510c9e9e53736e14b04bb58feebd1 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Thu, 9 Aug 2018 21:54:58 +0300 Subject: [PATCH 07/11] mute test #32737 --- .../qa/full-cluster-restart/with-system-key/build.gradle | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/x-pack/qa/full-cluster-restart/with-system-key/build.gradle b/x-pack/qa/full-cluster-restart/with-system-key/build.gradle index e69de29bb2d..928280b6584 100644 --- a/x-pack/qa/full-cluster-restart/with-system-key/build.gradle +++ b/x-pack/qa/full-cluster-restart/with-system-key/build.gradle @@ -0,0 +1,8 @@ +import org.elasticsearch.gradle.test.RestIntegTestTask + +// Skip test on FIPS FIXME https://github.com/elastic/elasticsearch/issues/32737 +if (project.inFipsJvm) { + tasks.withType(RestIntegTestTask) { + enabled = false + } +} From 294ab7ee96267549af8c5b15d8493c6c7fb49ea2 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 9 Aug 2018 16:11:48 -0400 Subject: [PATCH 08/11] Core: Remove some logging constructors (#32513) Remove a few of the logger constructors that aren't widely used or aren't used at all and deprecate a few more logger constructors in favor of log4j2's `LogManager`. --- .../common/logging/EvilLoggerTests.java | 18 ++++++--------- .../elasticsearch/bootstrap/Bootstrap.java | 5 +---- .../common/logging/ESLoggerFactory.java | 15 +++++++++++++ .../elasticsearch/common/logging/Loggers.java | 22 ++++++++++--------- .../index/analysis/ESSolrSynonymParser.java | 5 ++--- .../analysis/ESWordnetSynonymParser.java | 5 ++--- 6 files changed, 39 insertions(+), 31 deletions(-) diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java index f92257fc91b..ede61da1369 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java @@ -27,6 +27,7 @@ import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.appender.ConsoleAppender; import org.apache.logging.log4j.core.appender.CountingNoOpAppender; import org.apache.logging.log4j.core.config.Configurator; +import org.apache.logging.log4j.spi.ExtendedLogger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.util.Constants; import org.elasticsearch.cli.UserException; @@ -299,8 +300,8 @@ public class EvilLoggerTests extends ESTestCase { public void testPrefixLogger() throws IOException, IllegalAccessException, UserException { setupLogging("prefix"); - final String prefix = randomBoolean() ? null : randomAlphaOfLength(16); - final Logger logger = Loggers.getLogger("prefix", prefix); + final String prefix = randomAlphaOfLength(16); + final Logger logger = new PrefixLogger((ExtendedLogger) LogManager.getLogger("prefix_test"), "prefix_test", prefix); logger.info("test"); logger.info("{}", "test"); final Exception e = new Exception("exception"); @@ -320,13 +321,8 @@ public class EvilLoggerTests extends ESTestCase { final int expectedLogLines = 3; assertThat(events.size(), equalTo(expectedLogLines + stackTraceLength)); for (int i = 0; i < expectedLogLines; i++) { - if (prefix == null) { - assertThat("Contents of [" + path + "] are wrong", - events.get(i), startsWith("[" + getTestName() + "] test")); - } else { - assertThat("Contents of [" + path + "] are wrong", - events.get(i), startsWith("[" + getTestName() + "][" + prefix + "] test")); - } + assertThat("Contents of [" + path + "] are wrong", + events.get(i), startsWith("[" + getTestName() + "]" + prefix + " test")); } } @@ -335,8 +331,8 @@ public class EvilLoggerTests extends ESTestCase { final int prefixes = 1 << 19; // to ensure enough markers that the GC should collect some when we force a GC below for (int i = 0; i < prefixes; i++) { - Loggers.getLogger("prefix" + i, "prefix" + i); // this has the side effect of caching a marker with this prefix - + // this has the side effect of caching a marker with this prefix + new PrefixLogger((ExtendedLogger) LogManager.getLogger("prefix" + i), "prefix" + i, "prefix" + i); } System.gc(); // this will free the weakly referenced keys in the marker cache diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 76db6db7674..bc2fe747c03 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -345,10 +345,7 @@ final class Bootstrap { if (foreground && maybeConsoleAppender != null) { Loggers.removeAppender(rootLogger, maybeConsoleAppender); } - Logger logger = Loggers.getLogger(Bootstrap.class); - if (INSTANCE.node != null) { - logger = Loggers.getLogger(Bootstrap.class, Node.NODE_NAME_SETTING.get(INSTANCE.node.settings())); - } + Logger logger = LogManager.getLogger(Bootstrap.class); // HACK, it sucks to do this, but we will run users out of disk space otherwise if (e instanceof CreationException) { // guice: log the shortened exc to the log file diff --git a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index b45b55609f5..2159014f825 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -62,14 +62,29 @@ public final class ESLoggerFactory { return new PrefixLogger((ExtendedLogger)logger, logger.getName(), prefix); } + /** + * Get or build a logger. + * @deprecated Prefer {@link LogManager#getLogger} + */ + @Deprecated public static Logger getLogger(Class clazz) { return getLogger(null, clazz); } + /** + * Get or build a logger. + * @deprecated Prefer {@link LogManager#getLogger} + */ + @Deprecated public static Logger getLogger(String name) { return getLogger(null, name); } + /** + * Get the root logger. + * @deprecated Prefer {@link LogManager#getRootLogger} + */ + @Deprecated public static Logger getRootLogger() { return LogManager.getRootLogger(); } diff --git a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java index 58ffe277531..b2a24faf643 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java +++ b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java @@ -67,11 +67,11 @@ public class Loggers { } public static Logger getLogger(Class clazz, Settings settings, String... prefixes) { - return Loggers.getLogger(clazz, prefixes); + return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz); } public static Logger getLogger(String loggerName, Settings settings, String... prefixes) { - return Loggers.getLogger(loggerName, prefixes); + return ESLoggerFactory.getLogger(formatPrefix(prefixes), loggerName); } public static Logger getLogger(Logger parentLogger, String s) { @@ -82,22 +82,24 @@ public class Loggers { return ESLoggerFactory.getLogger(prefix, parentLogger.getName() + s); } + /** + * Get or build a logger. + * @deprecated Prefer {@link LogManager#getLogger} + */ + @Deprecated public static Logger getLogger(String s) { return ESLoggerFactory.getLogger(s); } + /** + * Get or build a logger. + * @deprecated Prefer {@link LogManager#getLogger} + */ + @Deprecated public static Logger getLogger(Class clazz) { return ESLoggerFactory.getLogger(clazz); } - public static Logger getLogger(Class clazz, String... prefixes) { - return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz); - } - - public static Logger getLogger(String name, String... prefixes) { - return ESLoggerFactory.getLogger(formatPrefix(prefixes), name); - } - private static String formatPrefix(String... prefixes) { String prefix = null; if (prefixes != null && prefixes.length > 0) { diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java b/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java index bcc249f8a8a..006973dd9b6 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java @@ -20,19 +20,18 @@ package org.elasticsearch.index.analysis; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.synonym.SolrSynonymParser; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRefBuilder; -import org.elasticsearch.common.logging.Loggers; import java.io.IOException; public class ESSolrSynonymParser extends SolrSynonymParser { + private static final Logger logger = LogManager.getLogger(ESSolrSynonymParser.class); private final boolean lenient; - private static final Logger logger = - Loggers.getLogger(ESSolrSynonymParser.class, "ESSolrSynonymParser"); public ESSolrSynonymParser(boolean dedup, boolean expand, boolean lenient, Analyzer analyzer) { super(dedup, expand, analyzer); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java b/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java index 3764820c434..ebcd84e39d7 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java @@ -20,19 +20,18 @@ package org.elasticsearch.index.analysis; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.synonym.WordnetSynonymParser; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRefBuilder; -import org.elasticsearch.common.logging.Loggers; import java.io.IOException; public class ESWordnetSynonymParser extends WordnetSynonymParser { + private static final Logger logger = LogManager.getLogger(ESWordnetSynonymParser.class); private final boolean lenient; - private static final Logger logger = - Loggers.getLogger(ESSolrSynonymParser.class, "ESWordnetSynonymParser"); public ESWordnetSynonymParser(boolean dedup, boolean expand, boolean lenient, Analyzer analyzer) { super(dedup, expand, analyzer); From be54ba39c4042419dbbbfa149d92aaf3396475cd Mon Sep 17 00:00:00 2001 From: lipsill <39668292+lipsill@users.noreply.github.com> Date: Thu, 9 Aug 2018 23:10:51 +0200 Subject: [PATCH 09/11] Add expected mapping type to `MapperException` (#31564) Currently if a document cannot be indexed because it violates the defined mapping for the index, a MapperException is thrown. In some cases it is useful to expose the expected field type in the exception itself, so that the user can react based on the error message. This change adds the expected data type to the MapperException. Closes #31502 --- .../index/mapper/FieldMapper.java | 3 +- .../index/mapper/GeoShapeFieldMapper.java | 3 +- .../index/mapper/BooleanFieldMapperTests.java | 2 +- .../index/mapper/DocumentParserTests.java | 29 +++++++++++++++++++ 4 files changed, 34 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index cbb008c9d00..2e949f027d1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -276,7 +276,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable { context.doc().add(field); } } catch (Exception e) { - throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e); + throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(), + fieldType().typeName()); } multiFields.parse(this, context); return null; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index 318d9cfc6fa..fb9e16cbe13 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -510,7 +510,8 @@ public class GeoShapeFieldMapper extends FieldMapper { indexShape(context, shape); } catch (Exception e) { if (ignoreMalformed.value() == false) { - throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e); + throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(), + fieldType().typeName()); } context.addIgnoredField(fieldType.name()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index 8638e16e29a..fd98a8199a2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -142,7 +142,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { .endObject()); MapperParsingException ex = expectThrows(MapperParsingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON))); - assertEquals("failed to parse [field]", ex.getMessage()); + assertEquals("failed to parse field [field] of type [boolean]", ex.getMessage()); } public void testMultiFields() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index 098aacc799f..76ca6aa7ea8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -125,6 +125,35 @@ public class DocumentParserTests extends ESSingleNodeTestCase { e.getMessage()); } + public void testUnexpectedFieldMappingType() throws Exception { + DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("foo").field("type", "long").endObject() + .startObject("bar").field("type", "boolean").endObject() + .startObject("geo").field("type", "geo_shape").endObject() + .endObject().endObject().endObject()); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); + { + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("foo", true).endObject()); + MapperException exception = expectThrows(MapperException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + assertThat(exception.getMessage(), containsString("failed to parse field [foo] of type [long]")); + } + { + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("bar", "bar").endObject()); + MapperException exception = expectThrows(MapperException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "2", bytes, XContentType.JSON))); + assertThat(exception.getMessage(), containsString("failed to parse field [bar] of type [boolean]")); + } + { + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("geo", 123).endObject()); + MapperException exception = expectThrows(MapperException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "2", bytes, XContentType.JSON))); + assertThat(exception.getMessage(), containsString("failed to parse field [geo] of type [geo_shape]")); + } + + } + public void testDotsWithDynamicNestedMapper() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") From 293c8a2b24a611e1f8de77d1b46fd041bf1e386c Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Thu, 9 Aug 2018 14:24:55 -0700 Subject: [PATCH 10/11] Painless: Add an Ingest Script Processor Example (#32302) This commit adds two pieces. The first is a small set of documentation providing instructions on how to get setup to run context examples. This will require a download similar to how Kibana works for some of the examples. The second is an ingest processor example using the downloaded data. More examples will follow as ideally one per PR. This also adds a set of tests to individually test each script as a unit test. --- .../resources/checkstyle_suppressions.xml | 1 + docs/painless/painless-contexts.asciidoc | 6 +- .../painless/painless-contexts/index.asciidoc | 4 +- .../painless-context-examples.asciidoc | 80 +++++ ...painless-ingest-processor-context.asciidoc | 158 ++++++++- docs/painless/painless-keywords.asciidoc | 2 +- .../painless/ContextExampleTests.java | 311 ++++++++++++++++++ 7 files changed, 555 insertions(+), 7 deletions(-) create mode 100644 docs/painless/painless-contexts/painless-context-examples.asciidoc create mode 100644 modules/lang-painless/src/test/java/org/elasticsearch/painless/ContextExampleTests.java diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 8ab68b40c0a..420ed3b10b4 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -686,6 +686,7 @@ + diff --git a/docs/painless/painless-contexts.asciidoc b/docs/painless/painless-contexts.asciidoc index 8b8a3b0eec6..cc7bc752ec6 100644 --- a/docs/painless/painless-contexts.asciidoc +++ b/docs/painless/painless-contexts.asciidoc @@ -14,6 +14,8 @@ specialized code may define new ways to use a Painless script. |==== | Name | Painless Documentation | Elasticsearch Documentation +| Ingest processor | <> + | {ref}/script-processor.html[Elasticsearch Documentation] | Update | <> | {ref}/docs-update.html[Elasticsearch Documentation] | Update by query | <> @@ -44,12 +46,12 @@ specialized code may define new ways to use a Painless script. | {ref}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation] | Bucket aggregation | <> | {ref}/search-aggregations-pipeline-bucket-script-aggregation.html[Elasticsearch Documentation] -| Ingest processor | <> - | {ref}/script-processor.html[Elasticsearch Documentation] | Watcher condition | <> | {xpack-ref}/condition-script.html[Elasticsearch Documentation] | Watcher transform | <> | {xpack-ref}/transform-script.html[Elasticsearch Documentation] |==== +include::painless-contexts/painless-context-examples.asciidoc[] + include::painless-contexts/index.asciidoc[] diff --git a/docs/painless/painless-contexts/index.asciidoc b/docs/painless/painless-contexts/index.asciidoc index 64e4326e052..a9d3982133e 100644 --- a/docs/painless/painless-contexts/index.asciidoc +++ b/docs/painless/painless-contexts/index.asciidoc @@ -1,3 +1,5 @@ +include::painless-ingest-processor-context.asciidoc[] + include::painless-update-context.asciidoc[] include::painless-update-by-query-context.asciidoc[] @@ -28,8 +30,6 @@ include::painless-metric-agg-reduce-context.asciidoc[] include::painless-bucket-agg-context.asciidoc[] -include::painless-ingest-processor-context.asciidoc[] - include::painless-watcher-condition-context.asciidoc[] include::painless-watcher-transform-context.asciidoc[] diff --git a/docs/painless/painless-contexts/painless-context-examples.asciidoc b/docs/painless/painless-contexts/painless-context-examples.asciidoc new file mode 100644 index 00000000000..469f425d1d8 --- /dev/null +++ b/docs/painless/painless-contexts/painless-context-examples.asciidoc @@ -0,0 +1,80 @@ +[[painless-context-examples]] +=== Context examples + +To run the examples, index the sample seat data into Elasticsearch. The examples +must be run sequentially to work correctly. + +. Download the +https://download.elastic.co/demos/painless/contexts/seats.json[seat data]. This +data set contains booking information for a collection of plays. Each document +represents a single seat for a play at a particular theater on a specific date +and time. ++ +Each document contains the following fields: ++ +`theatre` ({ref}/keyword.html[`keyword`]):: + The name of the theater the play is in. +`play` ({ref}/text.html[`text`]):: + The name of the play. +`actors` ({ref}/text.html[`text`]):: + A list of actors in the play. +`row` ({ref}/number.html[`integer`]):: + The row of the seat. +`number` ({ref}/number.html[`integer`]):: + The number of the seat within a row. +`cost` ({ref}/number.html[`double`]):: + The cost of the ticket for the seat. +`sold` ({ref}/boolean.html[`boolean`]):: + Whether or not the seat is sold. +`datetime` ({ref}/date.html[`date`]):: + The date and time of the play as a date object. +`date` ({ref}/keyword.html[`keyword`]):: + The date of the play as a keyword. +`time` ({ref}/keyword.html[`keyword`]):: + The time of the play as a keyword. + +. {defguide}/running-elasticsearch.html[Start] Elasticsearch. Note these +examples assume Elasticsearch and Kibana are running locally. To use the Console +editor with a remote Kibana instance, click the settings icon and enter the +Console URL. To submit a cURL request to a remote Elasticsearch instance, edit +the request URL. + +. Create {ref}/mapping.html[mappings] for the sample data: ++ +[source,js] +---- +PUT /seats +{ + "mappings": { + "seat": { + "properties": { + "theatre": { "type": "keyword" }, + "play": { "type": "text" }, + "actors": { "type": "text" }, + "row": { "type": "integer" }, + "number": { "type": "integer" }, + "cost": { "type": "double" }, + "sold": { "type": "boolean" }, + "datetime": { "type": "date" }, + "date": { "type": "keyword" }, + "time": { "type": "keyword" } + } + } + } +} +---- ++ +// CONSOLE + +. Run the <> +example. This sets up a script ingest processor used on each document as the +seat data is indexed. + +. Index the seat data: ++ +[source,js] +---- +curl -XPOST localhost:9200/seats/seat/_bulk?pipeline=seats -H "Content-Type: application/x-ndjson" --data-binary "@//seats.json" +---- +// NOTCONSOLE + diff --git a/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc b/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc index 5d451268ded..546057ab1a0 100644 --- a/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc +++ b/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc @@ -27,7 +27,7 @@ to modify documents upon insertion. {ref}/mapping-type-field.html[`ctx['_type']`]:: Modify this to change the type for the current document. -`ctx` (`Map`, read-only):: +`ctx` (`Map`):: Modify the values in the `Map/List` structure to add, modify, or delete the fields of a document. @@ -38,4 +38,158 @@ void:: *API* -The standard <> is available. \ No newline at end of file +The standard <> is available. + +*Example* + +To run this example, first follow the steps in +<>. + +The seat data contains: + +* A date in the format `YYYY-MM-DD` where the second digit of both month and day + is optional. +* A time in the format HH:MM* where the second digit of both hours and minutes + is optional. The star (*) represents either the `String` `AM` or `PM`. + +The following ingest script processes the date and time `Strings` and stores the +result in a `datetime` field. + +[source,Painless] +---- +String[] split(String s, char d) { <1> + int count = 0; + + for (char c : s.toCharArray()) { <2> + if (c == d) { + ++count; + } + } + + if (count == 0) { + return new String[] {s}; <3> + } + + String[] r = new String[count + 1]; <4> + int i0 = 0, i1 = 0; + count = 0; + + for (char c : s.toCharArray()) { <5> + if (c == d) { + r[count++] = s.substring(i0, i1); + i0 = i1 + 1; + } + + ++i1; + } + + r[count] = s.substring(i0, i1); <6> + + return r; +} + +String[] dateSplit = split(ctx.date, (char)"-"); <7> +String year = dateSplit[0].trim(); +String month = dateSplit[1].trim(); + +if (month.length() == 1) { <8> + month = "0" + month; +} + +String day = dateSplit[2].trim(); + +if (day.length() == 1) { <9> + day = "0" + day; +} + +boolean pm = ctx.time.substring(ctx.time.length() - 2).equals("PM"); <10> +String[] timeSplit = split( + ctx.time.substring(0, ctx.time.length() - 2), (char)":"); <11> +int hours = Integer.parseInt(timeSplit[0].trim()); +int minutes = Integer.parseInt(timeSplit[1].trim()); + +if (pm) { <12> + hours += 12; +} + +String dts = year + "-" + month + "-" + day + "T" + + (hours < 10 ? "0" + hours : "" + hours) + ":" + + (minutes < 10 ? "0" + minutes : "" + minutes) + + ":00+08:00"; <13> + +ZonedDateTime dt = ZonedDateTime.parse( + dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME); <14> +ctx.datetime = dt.getLong(ChronoField.INSTANT_SECONDS)*1000L; <15> +---- +<1> Creates a `split` <> to split a + <> type value using a <> + type value as the delimiter. This is useful for handling the necessity of + pulling out the individual pieces of the date and time `Strings` from the + original seat data. +<2> The first pass through each `char` in the `String` collects how many new + `Strings` the original is split into. +<3> Returns the original `String` if there are no instances of the delimiting + `char`. +<4> Creates an <> value to collect the split `Strings` + into based on the number of `char` delimiters found in the first pass. +<5> The second pass through each `char` in the `String` collects each split + substring into an array type value of `Strings`. +<6> Collects the last substring into the array type value of `Strings`. +<7> Uses the `split` function to separate the date `String` from the seat data + into year, month, and day `Strings`. + Note:: + * The use of a `String` type value to `char` type value + <> as part of the second argument since + character literals do not exist. + * The use of the `ctx` ingest processor context variable to retrieve the + data from the `date` field. +<8> Appends the <> `"0"` value to a single + digit month since the format of the seat data allows for this case. +<9> Appends the <> `"0"` value to a single + digit day since the format of the seat data allows for this case. +<10> Sets the <> + <> to `true` if the time `String` is a time + in the afternoon or evening. + Note:: + * The use of the `ctx` ingest processor context variable to retrieve the + data from the `time` field. +<11> Uses the `split` function to separate the time `String` from the seat data + into hours and minutes `Strings`. + Note:: + * The use of the `substring` method to remove the `AM` or `PM` portion of + the time `String`. + * The use of a `String` type value to `char` type value + <> as part of the second argument since + character literals do not exist. + * The use of the `ctx` ingest processor context variable to retrieve the + data from the `date` field. +<12> If the time `String` is an afternoon or evening value adds the + <> `12` to the existing hours to move to + a 24-hour based time. +<13> Builds a new time `String` that is parsable using existing API methods. +<14> Creates a `ZonedDateTime` <> value by using + the API method `parse` to parse the new time `String`. +<15> Sets the datetime field `datetime` to the number of milliseconds retrieved + from the API method `getLong`. + Note:: + * The use of the `ctx` ingest processor context variable to set the field + `datetime`. Manipulate each document's fields with the `ctx` variable as + each document is indexed. + +Submit the following request: + +[source,js] +---- +PUT /_ingest/pipeline/seats +{ + "description": "update datetime for seats", + "processors": [ + { + "script": { + "source": "String[] split(String s, char d) { int count = 0; for (char c : s.toCharArray()) { if (c == d) { ++count; } } if (count == 0) { return new String[] {s}; } String[] r = new String[count + 1]; int i0 = 0, i1 = 0; count = 0; for (char c : s.toCharArray()) { if (c == d) { r[count++] = s.substring(i0, i1); i0 = i1 + 1; } ++i1; } r[count] = s.substring(i0, i1); return r; } String[] dateSplit = split(ctx.date, (char)\"-\"); String year = dateSplit[0].trim(); String month = dateSplit[1].trim(); if (month.length() == 1) { month = \"0\" + month; } String day = dateSplit[2].trim(); if (day.length() == 1) { day = \"0\" + day; } boolean pm = ctx.time.substring(ctx.time.length() - 2).equals(\"PM\"); String[] timeSplit = split(ctx.time.substring(0, ctx.time.length() - 2), (char)\":\"); int hours = Integer.parseInt(timeSplit[0].trim()); int minutes = Integer.parseInt(timeSplit[1].trim()); if (pm) { hours += 12; } String dts = year + \"-\" + month + \"-\" + day + \"T\" + (hours < 10 ? \"0\" + hours : \"\" + hours) + \":\" + (minutes < 10 ? \"0\" + minutes : \"\" + minutes) + \":00+08:00\"; ZonedDateTime dt = ZonedDateTime.parse(dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME); ctx.datetime = dt.getLong(ChronoField.INSTANT_SECONDS)*1000L;" + } + } + ] +} +---- +// CONSOLE \ No newline at end of file diff --git a/docs/painless/painless-keywords.asciidoc b/docs/painless/painless-keywords.asciidoc index 9463902c8d3..24371d3713c 100644 --- a/docs/painless/painless-keywords.asciidoc +++ b/docs/painless/painless-keywords.asciidoc @@ -5,7 +5,7 @@ Keywords are reserved tokens for built-in language features. *Errors* -If a keyword is used as an <>. +* If a keyword is used as an <>. *Keywords* diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ContextExampleTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ContextExampleTests.java new file mode 100644 index 00000000000..15eed75bcb8 --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ContextExampleTests.java @@ -0,0 +1,311 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +/** + * These tests run the Painless scripts used in the context docs against + * slightly modified data designed around unit tests rather than a fully- + * running Elasticsearch server. + */ +public class ContextExampleTests extends ScriptTestCase { + + // **** Docs Generator Code **** + + /* + + import java.io.FileWriter; + import java.io.IOException; + + public class Generator { + + public final static String[] theatres = new String[] {"Down Port", "Graye", "Skyline", "Courtyard"}; + public final static String[] plays = new String[] {"Driving", "Pick It Up", "Sway and Pull", "Harriot", + "The Busline", "Ants Underground", "Exploria", "Line and Single", "Shafted", "Sunnyside Down", + "Test Run", "Auntie Jo"}; + public final static String[] actors = new String[] {"James Holland", "Krissy Smith", "Joe Muir", "Ryan Earns", + "Joel Madigan", "Jessica Brown", "Baz Knight", "Jo Hangum", "Rachel Grass", "Phoebe Miller", "Sarah Notch", + "Brayden Green", "Joshua Iller", "Jon Hittle", "Rob Kettleman", "Laura Conrad", "Simon Hower", "Nora Blue", + "Mike Candlestick", "Jacey Bell"}; + + public static void writeSeat(FileWriter writer, int id, String theatre, String play, String[] actors, + String date, String time, int row, int number, double cost, boolean sold) throws IOException { + StringBuilder builder = new StringBuilder(); + builder.append("{ \"create\" : { \"_index\" : \"seats\", \"_type\" : \"seat\", \"_id\" : \""); + builder.append(id); + builder.append("\" } }\n"); + builder.append("{ \"theatre\" : \""); + builder.append(theatre); + builder.append("\", \"play\" : \""); + builder.append(play); + builder.append("\", \"actors\": [ \""); + for (String actor : actors) { + builder.append(actor); + if (actor.equals(actors[actors.length - 1]) == false) { + builder.append("\", \""); + } + } + builder.append("\" ], \"date\": \""); + builder.append(date); + builder.append("\", \"time\": \""); + builder.append(time); + builder.append("\", \"row\": "); + builder.append(row); + builder.append(", \"number\": "); + builder.append(number); + builder.append(", \"cost\": "); + builder.append(cost); + builder.append(", \"sold\": "); + builder.append(sold ? "true" : "false"); + builder.append(" }\n"); + writer.write(builder.toString()); + } + + public static void main(String args[]) throws IOException { + FileWriter writer = new FileWriter("/home/jdconrad/test/seats.json"); + int id = 0; + + for (int playCount = 0; playCount < 12; ++playCount) { + String play = plays[playCount]; + String theatre; + String[] actor; + int startMonth; + int endMonth; + String time; + + if (playCount == 0) { + theatre = theatres[0]; + actor = new String[] {actors[0], actors[1], actors[2], actors[3]}; + startMonth = 4; + endMonth = 5; + time = "3:00PM"; + } else if (playCount == 1) { + theatre = theatres[0]; + actor = new String[] {actors[4], actors[5], actors[6], actors[7], actors[8], actors[9]}; + startMonth = 4; + endMonth = 6; + time = "8:00PM"; + } else if (playCount == 2) { + theatre = theatres[0]; + actor = new String[] {actors[0], actors[1], actors[2], actors[3], + actors[4], actors[5], actors[6], actors[7]}; + startMonth = 6; + endMonth = 8; + time = "3:00 PM"; + } else if (playCount == 3) { + theatre = theatres[0]; + actor = new String[] {actors[9], actors[10], actors[11], actors[12], actors[13], actors[14], + actors[15], actors[16], actors[17], actors[18], actors[19]}; + startMonth = 7; + endMonth = 8; + time = "8:00PM"; + } else if (playCount == 4) { + theatre = theatres[0]; + actor = new String[] {actors[13], actors[14], actors[15], actors[17], actors[18], actors[19]}; + startMonth = 8; + endMonth = 10; + time = "3:00PM"; + } else if (playCount == 5) { + theatre = theatres[0]; + actor = new String[] {actors[8], actors[9], actors[10], actors[11], actors[12]}; + startMonth = 8; + endMonth = 10; + time = "8:00PM"; + } else if (playCount == 6) { + theatre = theatres[1]; + actor = new String[] {actors[10], actors[11], actors[12], actors[13], actors[14], actors[15], actors[16]}; + startMonth = 4; + endMonth = 5; + time = "11:00AM"; + } else if (playCount == 7) { + theatre = theatres[1]; + actor = new String[] {actors[17], actors[18]}; + startMonth = 6; + endMonth = 9; + time = "2:00PM"; + } else if (playCount == 8) { + theatre = theatres[1]; + actor = new String[] {actors[0], actors[1], actors[2], actors[3], actors[16]}; + startMonth = 10; + endMonth = 11; + time = "11:00AM"; + } else if (playCount == 9) { + theatre = theatres[2]; + actor = new String[] {actors[1], actors[2], actors[3], actors[17], actors[18], actors[19]}; + startMonth = 3; + endMonth = 6; + time = "4:00PM"; + } else if (playCount == 10) { + theatre = theatres[2]; + actor = new String[] {actors[2], actors[3], actors[4], actors[5]}; + startMonth = 7; + endMonth = 8; + time = "7:30PM"; + } else if (playCount == 11) { + theatre = theatres[2]; + actor = new String[] {actors[7], actors[13], actors[14], actors[15], actors[16], actors[17]}; + startMonth = 9; + endMonth = 12; + time = "5:40PM"; + } else { + throw new RuntimeException("too many plays"); + } + + int rows; + int number; + + if (playCount < 6) { + rows = 3; + number = 12; + } else if (playCount < 9) { + rows = 5; + number = 9; + } else if (playCount < 12) { + rows = 11; + number = 15; + } else { + throw new RuntimeException("too many seats"); + } + + for (int month = startMonth; month <= endMonth; ++month) { + for (int day = 1; day <= 14; ++day) { + for (int row = 1; row <= rows; ++row) { + for (int count = 1; count <= number; ++count) { + String date = "2018-" + month + "-" + day; + double cost = (25 - row) * 1.25; + + writeSeat(writer, ++id, theatre, play, actor, date, time, row, count, cost, false); + } + } + } + } + } + + writer.write("\n"); + writer.close(); + } + } + + */ + + // **** Initial Mappings **** + + /* + + curl -X PUT "localhost:9200/seats" -H 'Content-Type: application/json' -d' + { + "mappings": { + "seat": { + "properties": { + "theatre": { "type": "keyword" }, + "play": { "type": "text" }, + "actors": { "type": "text" }, + "row": { "type": "integer" }, + "number": { "type": "integer" }, + "cost": { "type": "double" }, + "sold": { "type": "boolean" }, + "datetime": { "type": "date" }, + "date": { "type": "keyword" }, + "time": { "type": "keyword" } + } + } + } + } + ' + + */ + + // Create Ingest to Modify Dates: + + /* + + curl -X PUT "localhost:9200/_ingest/pipeline/seats" -H 'Content-Type: application/json' -d' + { + "description": "update datetime for seats", + "processors": [ + { + "script": { + "source": "String[] split(String s, char d) { int count = 0; for (char c : s.toCharArray()) { if (c == d) { ++count; } } if (count == 0) { return new String[] {s}; } String[] r = new String[count + 1]; int i0 = 0, i1 = 0; count = 0; for (char c : s.toCharArray()) { if (c == d) { r[count++] = s.substring(i0, i1); i0 = i1 + 1; } ++i1; } r[count] = s.substring(i0, i1); return r; } String[] dateSplit = split(ctx.date, (char)\"-\"); String year = dateSplit[0].trim(); String month = dateSplit[1].trim(); if (month.length() == 1) { month = \"0\" + month; } String day = dateSplit[2].trim(); if (day.length() == 1) { day = \"0\" + day; } boolean pm = ctx.time.substring(ctx.time.length() - 2).equals(\"PM\"); String[] timeSplit = split(ctx.time.substring(0, ctx.time.length() - 2), (char)\":\"); int hours = Integer.parseInt(timeSplit[0].trim()); int minutes = Integer.parseInt(timeSplit[1].trim()); if (pm) { hours += 12; } String dts = year + \"-\" + month + \"-\" + day + \"T\" + (hours < 10 ? \"0\" + hours : \"\" + hours) + \":\" + (minutes < 10 ? \"0\" + minutes : \"\" + minutes) + \":00+08:00\"; ZonedDateTime dt = ZonedDateTime.parse(dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME); ctx.datetime = dt.getLong(ChronoField.INSTANT_SECONDS)*1000L;" + } + } + ] + } + ' + + */ + + public void testIngestProcessorScript() { + assertEquals(1535785200000L, + exec("String[] split(String s, char d) {" + + " int count = 0;" + + " for (char c : s.toCharArray()) {" + + " if (c == d) {" + + " ++count;" + + " }" + + " }" + + " if (count == 0) {" + + " return new String[] {s};" + + " }" + + " String[] r = new String[count + 1];" + + " int i0 = 0, i1 = 0;" + + " count = 0;" + + " for (char c : s.toCharArray()) {" + + " if (c == d) {" + + " r[count++] = s.substring(i0, i1);" + + " i0 = i1 + 1;" + + " }" + + " ++i1;" + + " }" + + " r[count] = s.substring(i0, i1);" + + " return r;" + + "}" + + "def x = ['date': '2018-9-1', 'time': '3:00 PM'];" + + "String[] dateSplit = split(x.date, (char)'-');" + + "String year = dateSplit[0].trim();" + + "String month = dateSplit[1].trim();" + + "if (month.length() == 1) {" + + " month = '0' + month;" + + "}" + + "String day = dateSplit[2].trim();" + + "if (day.length() == 1) {" + + " day = '0' + day;" + + "}" + + "boolean pm = x.time.substring(x.time.length() - 2).equals('PM');" + + "String[] timeSplit = split(x.time.substring(0, x.time.length() - 2), (char)':');" + + "int hours = Integer.parseInt(timeSplit[0].trim());" + + "String minutes = timeSplit[1].trim();" + + "if (pm) {" + + " hours += 12;" + + "}" + + "String dts = year + '-' + month + '-' + day + " + + "'T' + (hours < 10 ? '0' + hours : '' + hours) + ':' + minutes + ':00+08:00';" + + "ZonedDateTime dt = ZonedDateTime.parse(dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME);" + + "return dt.getLong(ChronoField.INSTANT_SECONDS) * 1000L" + ) + ); + } + + // Post Generated Data: + + /* + + curl -XPOST localhost:9200/seats/seat/_bulk?pipeline=seats -H "Content-Type: application/x-ndjson" --data-binary "@/home/jdconrad/test/seats.json" + + */ +} From 0161bbc77f2030bf95346324d2fe9fc6e41aa9e3 Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Fri, 10 Aug 2018 09:55:43 +1000 Subject: [PATCH 11/11] [Test] Handle file permissions for Windows (#32681) This commit modifies the test to handle file permission tests in windows/dos environments. The test requires access to UserPrincipal and so have modified the plugin-security policy to access user information. Closes#32637 --- .../plugin-metadata/plugin-security.policy | 4 ++ .../authc/kerberos/KerberosRealmTests.java | 44 +++++++++++++------ 2 files changed, 34 insertions(+), 14 deletions(-) diff --git a/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy index 8ce72be3ef9..df6cf33c65d 100644 --- a/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy +++ b/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy @@ -25,6 +25,10 @@ grant { permission java.util.PropertyPermission "sun.security.krb5.debug","write"; permission java.util.PropertyPermission "java.security.debug","write"; permission java.util.PropertyPermission "sun.security.spnego.debug","write"; + + // needed for kerberos file permission tests to access user information + permission java.lang.RuntimePermission "accessUserInformation"; + permission java.lang.RuntimePermission "getFileStoreAttributes"; }; grant codeBase "${codebase.xmlsec-2.0.8.jar}" { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java index 0f44544c9c2..ecf1bff6d5e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java @@ -23,17 +23,19 @@ import org.elasticsearch.xpack.security.authc.support.UserRoleMapper.UserData; import org.ietf.jgss.GSSException; import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.channels.SeekableByteChannel; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.StandardOpenOption; -import java.nio.file.attribute.FileAttribute; -import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.AclEntry; +import java.nio.file.attribute.AclEntryPermission; +import java.nio.file.attribute.AclEntryType; +import java.nio.file.attribute.AclFileAttributeView; +import java.nio.file.attribute.PosixFileAttributeView; import java.nio.file.attribute.PosixFilePermissions; +import java.nio.file.attribute.UserPrincipal; import java.util.Arrays; -import java.util.EnumSet; +import java.util.List; +import java.util.Locale; import java.util.Set; import javax.security.auth.login.LoginException; @@ -112,7 +114,6 @@ public class KerberosRealmTests extends KerberosRealmTestCase { final String keytabPathCase = randomFrom("keytabPathAsDirectory", "keytabFileDoesNotExist", "keytabPathWithNoReadPermissions"); final String expectedErrorMessage; final String keytabPath; - final Set filePerms; switch (keytabPathCase) { case "keytabPathAsDirectory": final String dirName = randomAlphaOfLength(5); @@ -125,14 +126,29 @@ public class KerberosRealmTests extends KerberosRealmTestCase { expectedErrorMessage = "configured service key tab file [" + keytabPath + "] does not exist"; break; case "keytabPathWithNoReadPermissions": - filePerms = PosixFilePermissions.fromString("---------"); - final String keytabFileName = randomAlphaOfLength(5) + ".keytab"; - final FileAttribute> fileAttributes = PosixFilePermissions.asFileAttribute(filePerms); - try (SeekableByteChannel byteChannel = Files.newByteChannel(dir.resolve(keytabFileName), - EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), fileAttributes)) { - byteChannel.write(ByteBuffer.wrap(randomByteArrayOfLength(10))); + final String fileName = randomAlphaOfLength(5); + final Path keytabFilePath = Files.createTempFile(dir, fileName, ".keytab"); + Files.write(keytabFilePath, randomAlphaOfLength(5).getBytes(StandardCharsets.UTF_8)); + final Set supportedAttributes = keytabFilePath.getFileSystem().supportedFileAttributeViews(); + if (supportedAttributes.contains("posix")) { + final PosixFileAttributeView fileAttributeView = Files.getFileAttributeView(keytabFilePath, PosixFileAttributeView.class); + fileAttributeView.setPermissions(PosixFilePermissions.fromString("---------")); + } else if (supportedAttributes.contains("acl")) { + final UserPrincipal principal = Files.getOwner(keytabFilePath); + final AclFileAttributeView view = Files.getFileAttributeView(keytabFilePath, AclFileAttributeView.class); + final AclEntry entry = AclEntry.newBuilder() + .setType(AclEntryType.DENY) + .setPrincipal(principal) + .setPermissions(AclEntryPermission.READ_DATA, AclEntryPermission.READ_ATTRIBUTES).build(); + final List acl = view.getAcl(); + acl.add(0, entry); + view.setAcl(acl); + } else { + throw new UnsupportedOperationException( + String.format(Locale.ROOT, "Don't know how to make file [%s] non-readable on a file system with attributes [%s]", + keytabFilePath, supportedAttributes)); } - keytabPath = dir.resolve(keytabFileName).toString(); + keytabPath = keytabFilePath.toString(); expectedErrorMessage = "configured service key tab file [" + keytabPath + "] must have read permission"; break; default: